##// END OF EJS Templates
merge with crew-stable
Thomas Arendsen Hein -
r5921:549a7ebe merge default
parent child Browse files
Show More
@@ -1,291 +1,312 b''
1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
2
2
3 import os, locale, re, socket
3 import os, locale, re, socket
4 from cStringIO import StringIO
4 from cStringIO import StringIO
5 from mercurial import util
5 from mercurial import util
6
6
7 from common import NoRepo, commit, converter_source, checktool
7 from common import NoRepo, commit, converter_source, checktool
8
8
9 class convert_cvs(converter_source):
9 class convert_cvs(converter_source):
10 def __init__(self, ui, path, rev=None):
10 def __init__(self, ui, path, rev=None):
11 super(convert_cvs, self).__init__(ui, path, rev=rev)
11 super(convert_cvs, self).__init__(ui, path, rev=rev)
12
12
13 cvs = os.path.join(path, "CVS")
13 cvs = os.path.join(path, "CVS")
14 if not os.path.exists(cvs):
14 if not os.path.exists(cvs):
15 raise NoRepo("%s does not look like a CVS checkout" % path)
15 raise NoRepo("%s does not look like a CVS checkout" % path)
16
16
17 for tool in ('cvsps', 'cvs'):
17 for tool in ('cvsps', 'cvs'):
18 checktool(tool)
18 checktool(tool)
19
19
20 self.changeset = {}
20 self.changeset = {}
21 self.files = {}
21 self.files = {}
22 self.tags = {}
22 self.tags = {}
23 self.lastbranch = {}
23 self.lastbranch = {}
24 self.parent = {}
24 self.parent = {}
25 self.socket = None
25 self.socket = None
26 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
26 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
27 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
27 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
28 self.encoding = locale.getpreferredencoding()
28 self.encoding = locale.getpreferredencoding()
29 self._parse()
29 self._parse()
30 self._connect()
30 self._connect()
31
31
32 def _parse(self):
32 def _parse(self):
33 if self.changeset:
33 if self.changeset:
34 return
34 return
35
35
36 maxrev = 0
36 maxrev = 0
37 cmd = 'cvsps -A -u --cvs-direct -q'
37 cmd = 'cvsps -A -u --cvs-direct -q'
38 if self.rev:
38 if self.rev:
39 # TODO: handle tags
39 # TODO: handle tags
40 try:
40 try:
41 # patchset number?
41 # patchset number?
42 maxrev = int(self.rev)
42 maxrev = int(self.rev)
43 except ValueError:
43 except ValueError:
44 try:
44 try:
45 # date
45 # date
46 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
46 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
47 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
47 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
48 except util.Abort:
48 except util.Abort:
49 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
49 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
50
50
51 d = os.getcwd()
51 d = os.getcwd()
52 try:
52 try:
53 os.chdir(self.path)
53 os.chdir(self.path)
54 id = None
54 id = None
55 state = 0
55 state = 0
56 filerevids = {}
56 for l in util.popen(cmd):
57 for l in util.popen(cmd):
57 if state == 0: # header
58 if state == 0: # header
58 if l.startswith("PatchSet"):
59 if l.startswith("PatchSet"):
59 id = l[9:-2]
60 id = l[9:-2]
60 if maxrev and int(id) > maxrev:
61 if maxrev and int(id) > maxrev:
62 # ignore everything
61 state = 3
63 state = 3
62 elif l.startswith("Date"):
64 elif l.startswith("Date"):
63 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
65 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
64 date = util.datestr(date)
66 date = util.datestr(date)
65 elif l.startswith("Branch"):
67 elif l.startswith("Branch"):
66 branch = l[8:-1]
68 branch = l[8:-1]
67 self.parent[id] = self.lastbranch.get(branch, 'bad')
69 self.parent[id] = self.lastbranch.get(branch, 'bad')
68 self.lastbranch[branch] = id
70 self.lastbranch[branch] = id
69 elif l.startswith("Ancestor branch"):
71 elif l.startswith("Ancestor branch"):
70 ancestor = l[17:-1]
72 ancestor = l[17:-1]
71 self.parent[id] = self.lastbranch[ancestor]
73 # figure out the parent later
74 self.parent[id] = None
72 elif l.startswith("Author"):
75 elif l.startswith("Author"):
73 author = self.recode(l[8:-1])
76 author = self.recode(l[8:-1])
74 elif l.startswith("Tag:") or l.startswith("Tags:"):
77 elif l.startswith("Tag:") or l.startswith("Tags:"):
75 t = l[l.index(':')+1:]
78 t = l[l.index(':')+1:]
76 t = [ut.strip() for ut in t.split(',')]
79 t = [ut.strip() for ut in t.split(',')]
77 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
80 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
78 self.tags.update(dict.fromkeys(t, id))
81 self.tags.update(dict.fromkeys(t, id))
79 elif l.startswith("Log:"):
82 elif l.startswith("Log:"):
83 # switch to gathering log
80 state = 1
84 state = 1
81 log = ""
85 log = ""
82 elif state == 1: # log
86 elif state == 1: # log
83 if l == "Members: \n":
87 if l == "Members: \n":
88 # switch to gathering members
84 files = {}
89 files = {}
90 oldrevs = []
85 log = self.recode(log[:-1])
91 log = self.recode(log[:-1])
86 state = 2
92 state = 2
87 else:
93 else:
94 # gather log
88 log += l
95 log += l
89 elif state == 2:
96 elif state == 2: # members
90 if l == "\n": #
97 if l == "\n": # start of next entry
91 state = 0
98 state = 0
92 p = [self.parent[id]]
99 p = [self.parent[id]]
93 if id == "1":
100 if id == "1":
94 p = []
101 p = []
95 if branch == "HEAD":
102 if branch == "HEAD":
96 branch = ""
103 branch = ""
104 if branch and p[0] == None:
105 latest = None
106 # the last changeset that contains a base
107 # file is our parent
108 for r in oldrevs:
109 latest = max(filerevids[r], latest)
110 p = [latest]
111
112 # add current commit to set
97 c = commit(author=author, date=date, parents=p,
113 c = commit(author=author, date=date, parents=p,
98 desc=log, branch=branch)
114 desc=log, branch=branch)
99 self.changeset[id] = c
115 self.changeset[id] = c
100 self.files[id] = files
116 self.files[id] = files
101 else:
117 else:
102 colon = l.rfind(':')
118 colon = l.rfind(':')
103 file = l[1:colon]
119 file = l[1:colon]
104 rev = l[colon+1:-2]
120 rev = l[colon+1:-2]
105 rev = rev.split("->")[1]
121 oldrev, rev = rev.split("->")
106 files[file] = rev
122 files[file] = rev
123
124 # save some information for identifying branch points
125 oldrevs.append("%s:%s" % (oldrev, file))
126 filerevids["%s:%s" % (rev, file)] = id
107 elif state == 3:
127 elif state == 3:
128 # swallow all input
108 continue
129 continue
109
130
110 self.heads = self.lastbranch.values()
131 self.heads = self.lastbranch.values()
111 finally:
132 finally:
112 os.chdir(d)
133 os.chdir(d)
113
134
114 def _connect(self):
135 def _connect(self):
115 root = self.cvsroot
136 root = self.cvsroot
116 conntype = None
137 conntype = None
117 user, host = None, None
138 user, host = None, None
118 cmd = ['cvs', 'server']
139 cmd = ['cvs', 'server']
119
140
120 self.ui.status("connecting to %s\n" % root)
141 self.ui.status("connecting to %s\n" % root)
121
142
122 if root.startswith(":pserver:"):
143 if root.startswith(":pserver:"):
123 root = root[9:]
144 root = root[9:]
124 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
145 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
125 root)
146 root)
126 if m:
147 if m:
127 conntype = "pserver"
148 conntype = "pserver"
128 user, passw, serv, port, root = m.groups()
149 user, passw, serv, port, root = m.groups()
129 if not user:
150 if not user:
130 user = "anonymous"
151 user = "anonymous"
131 if not port:
152 if not port:
132 port = 2401
153 port = 2401
133 else:
154 else:
134 port = int(port)
155 port = int(port)
135 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
156 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
136 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
157 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
137
158
138 if not passw:
159 if not passw:
139 passw = "A"
160 passw = "A"
140 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
161 pf = open(os.path.join(os.environ["HOME"], ".cvspass"))
141 for line in pf.read().splitlines():
162 for line in pf.read().splitlines():
142 part1, part2 = line.split(' ', 1)
163 part1, part2 = line.split(' ', 1)
143 if part1 == '/1':
164 if part1 == '/1':
144 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
165 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
145 part1, part2 = part2.split(' ', 1)
166 part1, part2 = part2.split(' ', 1)
146 format = format1
167 format = format1
147 else:
168 else:
148 # :pserver:user@example.com:/cvsroot/foo Ah<Z
169 # :pserver:user@example.com:/cvsroot/foo Ah<Z
149 format = format0
170 format = format0
150 if part1 == format:
171 if part1 == format:
151 passw = part2
172 passw = part2
152 break
173 break
153 pf.close()
174 pf.close()
154
175
155 sck = socket.socket()
176 sck = socket.socket()
156 sck.connect((serv, port))
177 sck.connect((serv, port))
157 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
178 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
158 "END AUTH REQUEST", ""]))
179 "END AUTH REQUEST", ""]))
159 if sck.recv(128) != "I LOVE YOU\n":
180 if sck.recv(128) != "I LOVE YOU\n":
160 raise util.Abort("CVS pserver authentication failed")
181 raise util.Abort("CVS pserver authentication failed")
161
182
162 self.writep = self.readp = sck.makefile('r+')
183 self.writep = self.readp = sck.makefile('r+')
163
184
164 if not conntype and root.startswith(":local:"):
185 if not conntype and root.startswith(":local:"):
165 conntype = "local"
186 conntype = "local"
166 root = root[7:]
187 root = root[7:]
167
188
168 if not conntype:
189 if not conntype:
169 # :ext:user@host/home/user/path/to/cvsroot
190 # :ext:user@host/home/user/path/to/cvsroot
170 if root.startswith(":ext:"):
191 if root.startswith(":ext:"):
171 root = root[5:]
192 root = root[5:]
172 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
193 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
173 # Do not take Windows path "c:\foo\bar" for a connection strings
194 # Do not take Windows path "c:\foo\bar" for a connection strings
174 if os.path.isdir(root) or not m:
195 if os.path.isdir(root) or not m:
175 conntype = "local"
196 conntype = "local"
176 else:
197 else:
177 conntype = "rsh"
198 conntype = "rsh"
178 user, host, root = m.group(1), m.group(2), m.group(3)
199 user, host, root = m.group(1), m.group(2), m.group(3)
179
200
180 if conntype != "pserver":
201 if conntype != "pserver":
181 if conntype == "rsh":
202 if conntype == "rsh":
182 rsh = os.environ.get("CVS_RSH") or "ssh"
203 rsh = os.environ.get("CVS_RSH") or "ssh"
183 if user:
204 if user:
184 cmd = [rsh, '-l', user, host] + cmd
205 cmd = [rsh, '-l', user, host] + cmd
185 else:
206 else:
186 cmd = [rsh, host] + cmd
207 cmd = [rsh, host] + cmd
187
208
188 # popen2 does not support argument lists under Windows
209 # popen2 does not support argument lists under Windows
189 cmd = [util.shellquote(arg) for arg in cmd]
210 cmd = [util.shellquote(arg) for arg in cmd]
190 cmd = util.quotecommand(' '.join(cmd))
211 cmd = util.quotecommand(' '.join(cmd))
191 self.writep, self.readp = os.popen2(cmd, 'b')
212 self.writep, self.readp = os.popen2(cmd, 'b')
192
213
193 self.realroot = root
214 self.realroot = root
194
215
195 self.writep.write("Root %s\n" % root)
216 self.writep.write("Root %s\n" % root)
196 self.writep.write("Valid-responses ok error Valid-requests Mode"
217 self.writep.write("Valid-responses ok error Valid-requests Mode"
197 " M Mbinary E Checked-in Created Updated"
218 " M Mbinary E Checked-in Created Updated"
198 " Merged Removed\n")
219 " Merged Removed\n")
199 self.writep.write("valid-requests\n")
220 self.writep.write("valid-requests\n")
200 self.writep.flush()
221 self.writep.flush()
201 r = self.readp.readline()
222 r = self.readp.readline()
202 if not r.startswith("Valid-requests"):
223 if not r.startswith("Valid-requests"):
203 raise util.Abort("server sucks")
224 raise util.Abort("server sucks")
204 if "UseUnchanged" in r:
225 if "UseUnchanged" in r:
205 self.writep.write("UseUnchanged\n")
226 self.writep.write("UseUnchanged\n")
206 self.writep.flush()
227 self.writep.flush()
207 r = self.readp.readline()
228 r = self.readp.readline()
208
229
209 def getheads(self):
230 def getheads(self):
210 return self.heads
231 return self.heads
211
232
212 def _getfile(self, name, rev):
233 def _getfile(self, name, rev):
213
234
214 def chunkedread(fp, count):
235 def chunkedread(fp, count):
215 # file-objects returned by socked.makefile() do not handle
236 # file-objects returned by socked.makefile() do not handle
216 # large read() requests very well.
237 # large read() requests very well.
217 chunksize = 65536
238 chunksize = 65536
218 output = StringIO()
239 output = StringIO()
219 while count > 0:
240 while count > 0:
220 data = fp.read(min(count, chunksize))
241 data = fp.read(min(count, chunksize))
221 if not data:
242 if not data:
222 raise util.Abort("%d bytes missing from remote file" % count)
243 raise util.Abort("%d bytes missing from remote file" % count)
223 count -= len(data)
244 count -= len(data)
224 output.write(data)
245 output.write(data)
225 return output.getvalue()
246 return output.getvalue()
226
247
227 if rev.endswith("(DEAD)"):
248 if rev.endswith("(DEAD)"):
228 raise IOError
249 raise IOError
229
250
230 args = ("-N -P -kk -r %s --" % rev).split()
251 args = ("-N -P -kk -r %s --" % rev).split()
231 args.append(self.cvsrepo + '/' + name)
252 args.append(self.cvsrepo + '/' + name)
232 for x in args:
253 for x in args:
233 self.writep.write("Argument %s\n" % x)
254 self.writep.write("Argument %s\n" % x)
234 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
255 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
235 self.writep.flush()
256 self.writep.flush()
236
257
237 data = ""
258 data = ""
238 while 1:
259 while 1:
239 line = self.readp.readline()
260 line = self.readp.readline()
240 if line.startswith("Created ") or line.startswith("Updated "):
261 if line.startswith("Created ") or line.startswith("Updated "):
241 self.readp.readline() # path
262 self.readp.readline() # path
242 self.readp.readline() # entries
263 self.readp.readline() # entries
243 mode = self.readp.readline()[:-1]
264 mode = self.readp.readline()[:-1]
244 count = int(self.readp.readline()[:-1])
265 count = int(self.readp.readline()[:-1])
245 data = chunkedread(self.readp, count)
266 data = chunkedread(self.readp, count)
246 elif line.startswith(" "):
267 elif line.startswith(" "):
247 data += line[1:]
268 data += line[1:]
248 elif line.startswith("M "):
269 elif line.startswith("M "):
249 pass
270 pass
250 elif line.startswith("Mbinary "):
271 elif line.startswith("Mbinary "):
251 count = int(self.readp.readline()[:-1])
272 count = int(self.readp.readline()[:-1])
252 data = chunkedread(self.readp, count)
273 data = chunkedread(self.readp, count)
253 else:
274 else:
254 if line == "ok\n":
275 if line == "ok\n":
255 return (data, "x" in mode and "x" or "")
276 return (data, "x" in mode and "x" or "")
256 elif line.startswith("E "):
277 elif line.startswith("E "):
257 self.ui.warn("cvs server: %s\n" % line[2:])
278 self.ui.warn("cvs server: %s\n" % line[2:])
258 elif line.startswith("Remove"):
279 elif line.startswith("Remove"):
259 l = self.readp.readline()
280 l = self.readp.readline()
260 l = self.readp.readline()
281 l = self.readp.readline()
261 if l != "ok\n":
282 if l != "ok\n":
262 raise util.Abort("unknown CVS response: %s" % l)
283 raise util.Abort("unknown CVS response: %s" % l)
263 else:
284 else:
264 raise util.Abort("unknown CVS response: %s" % line)
285 raise util.Abort("unknown CVS response: %s" % line)
265
286
266 def getfile(self, file, rev):
287 def getfile(self, file, rev):
267 data, mode = self._getfile(file, rev)
288 data, mode = self._getfile(file, rev)
268 self.modecache[(file, rev)] = mode
289 self.modecache[(file, rev)] = mode
269 return data
290 return data
270
291
271 def getmode(self, file, rev):
292 def getmode(self, file, rev):
272 return self.modecache[(file, rev)]
293 return self.modecache[(file, rev)]
273
294
274 def getchanges(self, rev):
295 def getchanges(self, rev):
275 self.modecache = {}
296 self.modecache = {}
276 files = self.files[rev]
297 files = self.files[rev]
277 cl = files.items()
298 cl = files.items()
278 cl.sort()
299 cl.sort()
279 return (cl, {})
300 return (cl, {})
280
301
281 def getcommit(self, rev):
302 def getcommit(self, rev):
282 return self.changeset[rev]
303 return self.changeset[rev]
283
304
284 def gettags(self):
305 def gettags(self):
285 return self.tags
306 return self.tags
286
307
287 def getchangedfiles(self, rev, i):
308 def getchangedfiles(self, rev, i):
288 files = self.files[rev].keys()
309 files = self.files[rev].keys()
289 files.sort()
310 files.sort()
290 return files
311 return files
291
312
@@ -1,284 +1,288 b''
1 # hg backend for convert extension
1 # hg backend for convert extension
2
2
3 # Notes for hg->hg conversion:
3 # Notes for hg->hg conversion:
4 #
4 #
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 # of commit messages, but new versions do. Changesets created by
6 # of commit messages, but new versions do. Changesets created by
7 # those older versions, then converted, may thus have different
7 # those older versions, then converted, may thus have different
8 # hashes for changesets that are otherwise identical.
8 # hashes for changesets that are otherwise identical.
9 #
9 #
10 # * By default, the source revision is stored in the converted
10 # * By default, the source revision is stored in the converted
11 # revision. This will cause the converted revision to have a
11 # revision. This will cause the converted revision to have a
12 # different identity than the source. To avoid this, use the
12 # different identity than the source. To avoid this, use the
13 # following option: "--config convert.hg.saverev=false"
13 # following option: "--config convert.hg.saverev=false"
14
14
15
15
16 import os, time
16 import os, time
17 from mercurial.i18n import _
17 from mercurial.i18n import _
18 from mercurial.node import *
18 from mercurial.node import *
19 from mercurial import hg, lock, revlog, util
19 from mercurial import hg, lock, revlog, util
20
20
21 from common import NoRepo, commit, converter_source, converter_sink
21 from common import NoRepo, commit, converter_source, converter_sink
22
22
23 class mercurial_sink(converter_sink):
23 class mercurial_sink(converter_sink):
24 def __init__(self, ui, path):
24 def __init__(self, ui, path):
25 converter_sink.__init__(self, ui, path)
25 converter_sink.__init__(self, ui, path)
26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
26 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
27 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
28 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
29 self.lastbranch = None
29 self.lastbranch = None
30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
30 if os.path.isdir(path) and len(os.listdir(path)) > 0:
31 try:
31 try:
32 self.repo = hg.repository(self.ui, path)
32 self.repo = hg.repository(self.ui, path)
33 if not self.repo.local():
34 raise NoRepo(_('%s is not a local Mercurial repo') % path)
33 except hg.RepoError, err:
35 except hg.RepoError, err:
34 ui.print_exc()
36 ui.print_exc()
35 raise NoRepo(err.args[0])
37 raise NoRepo(err.args[0])
36 else:
38 else:
37 try:
39 try:
38 ui.status(_('initializing destination %s repository\n') % path)
40 ui.status(_('initializing destination %s repository\n') % path)
39 self.repo = hg.repository(self.ui, path, create=True)
41 self.repo = hg.repository(self.ui, path, create=True)
42 if not self.repo.local():
43 raise NoRepo(_('%s is not a local Mercurial repo') % path)
40 self.created.append(path)
44 self.created.append(path)
41 except hg.RepoError, err:
45 except hg.RepoError, err:
42 ui.print_exc()
46 ui.print_exc()
43 raise NoRepo("could not create hg repo %s as sink" % path)
47 raise NoRepo("could not create hg repo %s as sink" % path)
44 self.lock = None
48 self.lock = None
45 self.wlock = None
49 self.wlock = None
46 self.filemapmode = False
50 self.filemapmode = False
47
51
48 def before(self):
52 def before(self):
49 self.ui.debug(_('run hg sink pre-conversion action\n'))
53 self.ui.debug(_('run hg sink pre-conversion action\n'))
50 self.wlock = self.repo.wlock()
54 self.wlock = self.repo.wlock()
51 self.lock = self.repo.lock()
55 self.lock = self.repo.lock()
52 self.repo.dirstate.clear()
56 self.repo.dirstate.clear()
53
57
54 def after(self):
58 def after(self):
55 self.ui.debug(_('run hg sink post-conversion action\n'))
59 self.ui.debug(_('run hg sink post-conversion action\n'))
56 self.repo.dirstate.invalidate()
60 self.repo.dirstate.invalidate()
57 self.lock = None
61 self.lock = None
58 self.wlock = None
62 self.wlock = None
59
63
60 def revmapfile(self):
64 def revmapfile(self):
61 return os.path.join(self.path, ".hg", "shamap")
65 return os.path.join(self.path, ".hg", "shamap")
62
66
63 def authorfile(self):
67 def authorfile(self):
64 return os.path.join(self.path, ".hg", "authormap")
68 return os.path.join(self.path, ".hg", "authormap")
65
69
66 def getheads(self):
70 def getheads(self):
67 h = self.repo.changelog.heads()
71 h = self.repo.changelog.heads()
68 return [ hex(x) for x in h ]
72 return [ hex(x) for x in h ]
69
73
70 def putfile(self, f, e, data):
74 def putfile(self, f, e, data):
71 self.repo.wwrite(f, data, e)
75 self.repo.wwrite(f, data, e)
72 if f not in self.repo.dirstate:
76 if f not in self.repo.dirstate:
73 self.repo.dirstate.normallookup(f)
77 self.repo.dirstate.normallookup(f)
74
78
75 def copyfile(self, source, dest):
79 def copyfile(self, source, dest):
76 self.repo.copy(source, dest)
80 self.repo.copy(source, dest)
77
81
78 def delfile(self, f):
82 def delfile(self, f):
79 try:
83 try:
80 util.unlink(self.repo.wjoin(f))
84 util.unlink(self.repo.wjoin(f))
81 #self.repo.remove([f])
85 #self.repo.remove([f])
82 except OSError:
86 except OSError:
83 pass
87 pass
84
88
85 def setbranch(self, branch, pbranch, parents):
89 def setbranch(self, branch, pbranch, parents):
86 if (not self.clonebranches) or (branch == self.lastbranch):
90 if (not self.clonebranches) or (branch == self.lastbranch):
87 return
91 return
88
92
89 self.lastbranch = branch
93 self.lastbranch = branch
90 self.after()
94 self.after()
91 if not branch:
95 if not branch:
92 branch = 'default'
96 branch = 'default'
93 if not pbranch:
97 if not pbranch:
94 pbranch = 'default'
98 pbranch = 'default'
95
99
96 branchpath = os.path.join(self.path, branch)
100 branchpath = os.path.join(self.path, branch)
97 try:
101 try:
98 self.repo = hg.repository(self.ui, branchpath)
102 self.repo = hg.repository(self.ui, branchpath)
99 except:
103 except:
100 if not parents:
104 if not parents:
101 self.repo = hg.repository(self.ui, branchpath, create=True)
105 self.repo = hg.repository(self.ui, branchpath, create=True)
102 else:
106 else:
103 self.ui.note(_('cloning branch %s to %s\n') % (pbranch, branch))
107 self.ui.note(_('cloning branch %s to %s\n') % (pbranch, branch))
104 hg.clone(self.ui, os.path.join(self.path, pbranch),
108 hg.clone(self.ui, os.path.join(self.path, pbranch),
105 branchpath, rev=parents, update=False,
109 branchpath, rev=parents, update=False,
106 stream=True)
110 stream=True)
107 self.repo = hg.repository(self.ui, branchpath)
111 self.repo = hg.repository(self.ui, branchpath)
108 self.before()
112 self.before()
109
113
110 def putcommit(self, files, parents, commit):
114 def putcommit(self, files, parents, commit):
111 seen = {}
115 seen = {}
112 pl = []
116 pl = []
113 for p in parents:
117 for p in parents:
114 if p not in seen:
118 if p not in seen:
115 pl.append(p)
119 pl.append(p)
116 seen[p] = 1
120 seen[p] = 1
117 parents = pl
121 parents = pl
118 nparents = len(parents)
122 nparents = len(parents)
119 if self.filemapmode and nparents == 1:
123 if self.filemapmode and nparents == 1:
120 m1node = self.repo.changelog.read(bin(parents[0]))[0]
124 m1node = self.repo.changelog.read(bin(parents[0]))[0]
121 parent = parents[0]
125 parent = parents[0]
122
126
123 if len(parents) < 2: parents.append("0" * 40)
127 if len(parents) < 2: parents.append("0" * 40)
124 if len(parents) < 2: parents.append("0" * 40)
128 if len(parents) < 2: parents.append("0" * 40)
125 p2 = parents.pop(0)
129 p2 = parents.pop(0)
126
130
127 text = commit.desc
131 text = commit.desc
128 extra = commit.extra.copy()
132 extra = commit.extra.copy()
129 if self.branchnames and commit.branch:
133 if self.branchnames and commit.branch:
130 extra['branch'] = commit.branch
134 extra['branch'] = commit.branch
131 if commit.rev:
135 if commit.rev:
132 extra['convert_revision'] = commit.rev
136 extra['convert_revision'] = commit.rev
133
137
134 while parents:
138 while parents:
135 p1 = p2
139 p1 = p2
136 p2 = parents.pop(0)
140 p2 = parents.pop(0)
137 a = self.repo.rawcommit(files, text, commit.author, commit.date,
141 a = self.repo.rawcommit(files, text, commit.author, commit.date,
138 bin(p1), bin(p2), extra=extra)
142 bin(p1), bin(p2), extra=extra)
139 self.repo.dirstate.clear()
143 self.repo.dirstate.clear()
140 text = "(octopus merge fixup)\n"
144 text = "(octopus merge fixup)\n"
141 p2 = hg.hex(self.repo.changelog.tip())
145 p2 = hg.hex(self.repo.changelog.tip())
142
146
143 if self.filemapmode and nparents == 1:
147 if self.filemapmode and nparents == 1:
144 man = self.repo.manifest
148 man = self.repo.manifest
145 mnode = self.repo.changelog.read(bin(p2))[0]
149 mnode = self.repo.changelog.read(bin(p2))[0]
146 if not man.cmp(m1node, man.revision(mnode)):
150 if not man.cmp(m1node, man.revision(mnode)):
147 self.repo.rollback()
151 self.repo.rollback()
148 self.repo.dirstate.clear()
152 self.repo.dirstate.clear()
149 return parent
153 return parent
150 return p2
154 return p2
151
155
152 def puttags(self, tags):
156 def puttags(self, tags):
153 try:
157 try:
154 old = self.repo.wfile(".hgtags").read()
158 old = self.repo.wfile(".hgtags").read()
155 oldlines = old.splitlines(1)
159 oldlines = old.splitlines(1)
156 oldlines.sort()
160 oldlines.sort()
157 except:
161 except:
158 oldlines = []
162 oldlines = []
159
163
160 k = tags.keys()
164 k = tags.keys()
161 k.sort()
165 k.sort()
162 newlines = []
166 newlines = []
163 for tag in k:
167 for tag in k:
164 newlines.append("%s %s\n" % (tags[tag], tag))
168 newlines.append("%s %s\n" % (tags[tag], tag))
165
169
166 newlines.sort()
170 newlines.sort()
167
171
168 if newlines != oldlines:
172 if newlines != oldlines:
169 self.ui.status("updating tags\n")
173 self.ui.status("updating tags\n")
170 f = self.repo.wfile(".hgtags", "w")
174 f = self.repo.wfile(".hgtags", "w")
171 f.write("".join(newlines))
175 f.write("".join(newlines))
172 f.close()
176 f.close()
173 if not oldlines: self.repo.add([".hgtags"])
177 if not oldlines: self.repo.add([".hgtags"])
174 date = "%s 0" % int(time.mktime(time.gmtime()))
178 date = "%s 0" % int(time.mktime(time.gmtime()))
175 extra = {}
179 extra = {}
176 if self.tagsbranch != 'default':
180 if self.tagsbranch != 'default':
177 extra['branch'] = self.tagsbranch
181 extra['branch'] = self.tagsbranch
178 try:
182 try:
179 tagparent = self.repo.changectx(self.tagsbranch).node()
183 tagparent = self.repo.changectx(self.tagsbranch).node()
180 except hg.RepoError, inst:
184 except hg.RepoError, inst:
181 tagparent = nullid
185 tagparent = nullid
182 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
186 self.repo.rawcommit([".hgtags"], "update tags", "convert-repo",
183 date, tagparent, nullid, extra=extra)
187 date, tagparent, nullid, extra=extra)
184 return hex(self.repo.changelog.tip())
188 return hex(self.repo.changelog.tip())
185
189
186 def setfilemapmode(self, active):
190 def setfilemapmode(self, active):
187 self.filemapmode = active
191 self.filemapmode = active
188
192
189 class mercurial_source(converter_source):
193 class mercurial_source(converter_source):
190 def __init__(self, ui, path, rev=None):
194 def __init__(self, ui, path, rev=None):
191 converter_source.__init__(self, ui, path, rev)
195 converter_source.__init__(self, ui, path, rev)
192 self.saverev = ui.configbool('convert', 'hg.saverev', True)
196 self.saverev = ui.configbool('convert', 'hg.saverev', True)
193 try:
197 try:
194 self.repo = hg.repository(self.ui, path)
198 self.repo = hg.repository(self.ui, path)
195 # try to provoke an exception if this isn't really a hg
199 # try to provoke an exception if this isn't really a hg
196 # repo, but some other bogus compatible-looking url
200 # repo, but some other bogus compatible-looking url
197 if not self.repo.local():
201 if not self.repo.local():
198 raise hg.RepoError()
202 raise hg.RepoError()
199 except hg.RepoError:
203 except hg.RepoError:
200 ui.print_exc()
204 ui.print_exc()
201 raise NoRepo("%s is not a local Mercurial repo" % path)
205 raise NoRepo("%s is not a local Mercurial repo" % path)
202 self.lastrev = None
206 self.lastrev = None
203 self.lastctx = None
207 self.lastctx = None
204 self._changescache = None
208 self._changescache = None
205 self.convertfp = None
209 self.convertfp = None
206
210
207 def changectx(self, rev):
211 def changectx(self, rev):
208 if self.lastrev != rev:
212 if self.lastrev != rev:
209 self.lastctx = self.repo.changectx(rev)
213 self.lastctx = self.repo.changectx(rev)
210 self.lastrev = rev
214 self.lastrev = rev
211 return self.lastctx
215 return self.lastctx
212
216
213 def getheads(self):
217 def getheads(self):
214 if self.rev:
218 if self.rev:
215 return [hex(self.repo.changectx(self.rev).node())]
219 return [hex(self.repo.changectx(self.rev).node())]
216 else:
220 else:
217 return [hex(node) for node in self.repo.heads()]
221 return [hex(node) for node in self.repo.heads()]
218
222
219 def getfile(self, name, rev):
223 def getfile(self, name, rev):
220 try:
224 try:
221 return self.changectx(rev).filectx(name).data()
225 return self.changectx(rev).filectx(name).data()
222 except revlog.LookupError, err:
226 except revlog.LookupError, err:
223 raise IOError(err)
227 raise IOError(err)
224
228
225 def getmode(self, name, rev):
229 def getmode(self, name, rev):
226 m = self.changectx(rev).manifest()
230 m = self.changectx(rev).manifest()
227 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
231 return (m.execf(name) and 'x' or '') + (m.linkf(name) and 'l' or '')
228
232
229 def getchanges(self, rev):
233 def getchanges(self, rev):
230 ctx = self.changectx(rev)
234 ctx = self.changectx(rev)
231 if self._changescache and self._changescache[0] == rev:
235 if self._changescache and self._changescache[0] == rev:
232 m, a, r = self._changescache[1]
236 m, a, r = self._changescache[1]
233 else:
237 else:
234 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
238 m, a, r = self.repo.status(ctx.parents()[0].node(), ctx.node())[:3]
235 changes = [(name, rev) for name in m + a + r]
239 changes = [(name, rev) for name in m + a + r]
236 changes.sort()
240 changes.sort()
237 return (changes, self.getcopies(ctx, m + a))
241 return (changes, self.getcopies(ctx, m + a))
238
242
239 def getcopies(self, ctx, files):
243 def getcopies(self, ctx, files):
240 copies = {}
244 copies = {}
241 for name in files:
245 for name in files:
242 try:
246 try:
243 copies[name] = ctx.filectx(name).renamed()[0]
247 copies[name] = ctx.filectx(name).renamed()[0]
244 except TypeError:
248 except TypeError:
245 pass
249 pass
246 return copies
250 return copies
247
251
248 def getcommit(self, rev):
252 def getcommit(self, rev):
249 ctx = self.changectx(rev)
253 ctx = self.changectx(rev)
250 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
254 parents = [hex(p.node()) for p in ctx.parents() if p.node() != nullid]
251 if self.saverev:
255 if self.saverev:
252 crev = rev
256 crev = rev
253 else:
257 else:
254 crev = None
258 crev = None
255 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
259 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
256 desc=ctx.description(), rev=crev, parents=parents,
260 desc=ctx.description(), rev=crev, parents=parents,
257 branch=ctx.branch(), extra=ctx.extra())
261 branch=ctx.branch(), extra=ctx.extra())
258
262
259 def gettags(self):
263 def gettags(self):
260 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
264 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
261 return dict([(name, hex(node)) for name, node in tags])
265 return dict([(name, hex(node)) for name, node in tags])
262
266
263 def getchangedfiles(self, rev, i):
267 def getchangedfiles(self, rev, i):
264 ctx = self.changectx(rev)
268 ctx = self.changectx(rev)
265 i = i or 0
269 i = i or 0
266 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
270 changes = self.repo.status(ctx.parents()[i].node(), ctx.node())[:3]
267
271
268 if i == 0:
272 if i == 0:
269 self._changescache = (rev, changes)
273 self._changescache = (rev, changes)
270
274
271 return changes[0] + changes[1] + changes[2]
275 return changes[0] + changes[1] + changes[2]
272
276
273 def converted(self, rev, destrev):
277 def converted(self, rev, destrev):
274 if self.convertfp is None:
278 if self.convertfp is None:
275 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
279 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
276 'a')
280 'a')
277 self.convertfp.write('%s %s\n' % (destrev, rev))
281 self.convertfp.write('%s %s\n' % (destrev, rev))
278 self.convertfp.flush()
282 self.convertfp.flush()
279
283
280 def before(self):
284 def before(self):
281 self.ui.debug(_('run hg source pre-conversion action\n'))
285 self.ui.debug(_('run hg source pre-conversion action\n'))
282
286
283 def after(self):
287 def after(self):
284 self.ui.debug(_('run hg source post-conversion action\n'))
288 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,984 +1,984 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4 #
4 #
5 # Configuration options:
5 # Configuration options:
6 #
6 #
7 # convert.svn.trunk
7 # convert.svn.trunk
8 # Relative path to the trunk (default: "trunk")
8 # Relative path to the trunk (default: "trunk")
9 # convert.svn.branches
9 # convert.svn.branches
10 # Relative path to tree of branches (default: "branches")
10 # Relative path to tree of branches (default: "branches")
11 # convert.svn.tags
11 # convert.svn.tags
12 # Relative path to tree of tags (default: "tags")
12 # Relative path to tree of tags (default: "tags")
13 #
13 #
14 # Set these in a hgrc, or on the command line as follows:
14 # Set these in a hgrc, or on the command line as follows:
15 #
15 #
16 # hg convert --config convert.svn.trunk=wackoname [...]
16 # hg convert --config convert.svn.trunk=wackoname [...]
17
17
18 import locale
18 import locale
19 import os
19 import os
20 import re
20 import re
21 import sys
21 import sys
22 import cPickle as pickle
22 import cPickle as pickle
23 import tempfile
23 import tempfile
24
24
25 from mercurial import strutil, util
25 from mercurial import strutil, util
26 from mercurial.i18n import _
26 from mercurial.i18n import _
27
27
28 # Subversion stuff. Works best with very recent Python SVN bindings
28 # Subversion stuff. Works best with very recent Python SVN bindings
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 # these bindings.
30 # these bindings.
31
31
32 from cStringIO import StringIO
32 from cStringIO import StringIO
33
33
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
35 from common import commandline, converter_sink, mapfile
35 from common import commandline, converter_sink, mapfile
36
36
37 try:
37 try:
38 from svn.core import SubversionException, Pool
38 from svn.core import SubversionException, Pool
39 import svn
39 import svn
40 import svn.client
40 import svn.client
41 import svn.core
41 import svn.core
42 import svn.ra
42 import svn.ra
43 import svn.delta
43 import svn.delta
44 import transport
44 import transport
45 except ImportError:
45 except ImportError:
46 pass
46 pass
47
47
48 def geturl(path):
48 def geturl(path):
49 try:
49 try:
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
51 except SubversionException:
51 except SubversionException:
52 pass
52 pass
53 if os.path.isdir(path):
53 if os.path.isdir(path):
54 path = os.path.normpath(os.path.abspath(path))
54 path = os.path.normpath(os.path.abspath(path))
55 if os.name == 'nt':
55 if os.name == 'nt':
56 path = '/' + util.normpath(path)
56 path = '/' + util.normpath(path)
57 return 'file://%s' % path
57 return 'file://%s' % path
58 return path
58 return path
59
59
60 def optrev(number):
60 def optrev(number):
61 optrev = svn.core.svn_opt_revision_t()
61 optrev = svn.core.svn_opt_revision_t()
62 optrev.kind = svn.core.svn_opt_revision_number
62 optrev.kind = svn.core.svn_opt_revision_number
63 optrev.value.number = number
63 optrev.value.number = number
64 return optrev
64 return optrev
65
65
66 class changedpath(object):
66 class changedpath(object):
67 def __init__(self, p):
67 def __init__(self, p):
68 self.copyfrom_path = p.copyfrom_path
68 self.copyfrom_path = p.copyfrom_path
69 self.copyfrom_rev = p.copyfrom_rev
69 self.copyfrom_rev = p.copyfrom_rev
70 self.action = p.action
70 self.action = p.action
71
71
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
73 strict_node_history=False):
73 strict_node_history=False):
74 protocol = -1
74 protocol = -1
75 def receiver(orig_paths, revnum, author, date, message, pool):
75 def receiver(orig_paths, revnum, author, date, message, pool):
76 if orig_paths is not None:
76 if orig_paths is not None:
77 for k, v in orig_paths.iteritems():
77 for k, v in orig_paths.iteritems():
78 orig_paths[k] = changedpath(v)
78 orig_paths[k] = changedpath(v)
79 pickle.dump((orig_paths, revnum, author, date, message),
79 pickle.dump((orig_paths, revnum, author, date, message),
80 fp, protocol)
80 fp, protocol)
81
81
82 try:
82 try:
83 # Use an ra of our own so that our parent can consume
83 # Use an ra of our own so that our parent can consume
84 # our results without confusing the server.
84 # our results without confusing the server.
85 t = transport.SvnRaTransport(url=url)
85 t = transport.SvnRaTransport(url=url)
86 svn.ra.get_log(t.ra, paths, start, end, limit,
86 svn.ra.get_log(t.ra, paths, start, end, limit,
87 discover_changed_paths,
87 discover_changed_paths,
88 strict_node_history,
88 strict_node_history,
89 receiver)
89 receiver)
90 except SubversionException, (inst, num):
90 except SubversionException, (inst, num):
91 pickle.dump(num, fp, protocol)
91 pickle.dump(num, fp, protocol)
92 except IOError:
92 except IOError:
93 # Caller may interrupt the iteration
93 # Caller may interrupt the iteration
94 pickle.dump(None, fp, protocol)
94 pickle.dump(None, fp, protocol)
95 else:
95 else:
96 pickle.dump(None, fp, protocol)
96 pickle.dump(None, fp, protocol)
97 fp.close()
97 fp.close()
98
98
99 def debugsvnlog(ui, **opts):
99 def debugsvnlog(ui, **opts):
100 """Fetch SVN log in a subprocess and channel them back to parent to
100 """Fetch SVN log in a subprocess and channel them back to parent to
101 avoid memory collection issues.
101 avoid memory collection issues.
102 """
102 """
103 util.set_binary(sys.stdin)
103 util.set_binary(sys.stdin)
104 util.set_binary(sys.stdout)
104 util.set_binary(sys.stdout)
105 args = decodeargs(sys.stdin.read())
105 args = decodeargs(sys.stdin.read())
106 get_log_child(sys.stdout, *args)
106 get_log_child(sys.stdout, *args)
107
107
108 class logstream:
108 class logstream:
109 """Interruptible revision log iterator."""
109 """Interruptible revision log iterator."""
110 def __init__(self, stdout):
110 def __init__(self, stdout):
111 self._stdout = stdout
111 self._stdout = stdout
112
112
113 def __iter__(self):
113 def __iter__(self):
114 while True:
114 while True:
115 entry = pickle.load(self._stdout)
115 entry = pickle.load(self._stdout)
116 try:
116 try:
117 orig_paths, revnum, author, date, message = entry
117 orig_paths, revnum, author, date, message = entry
118 except:
118 except:
119 if entry is None:
119 if entry is None:
120 break
120 break
121 raise SubversionException("child raised exception", entry)
121 raise SubversionException("child raised exception", entry)
122 yield entry
122 yield entry
123
123
124 def close(self):
124 def close(self):
125 if self._stdout:
125 if self._stdout:
126 self._stdout.close()
126 self._stdout.close()
127 self._stdout = None
127 self._stdout = None
128
128
129 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
129 def get_log(url, paths, start, end, limit=0, discover_changed_paths=True,
130 strict_node_history=False):
130 strict_node_history=False):
131 args = [url, paths, start, end, limit, discover_changed_paths,
131 args = [url, paths, start, end, limit, discover_changed_paths,
132 strict_node_history]
132 strict_node_history]
133 arg = encodeargs(args)
133 arg = encodeargs(args)
134 hgexe = util.hgexecutable()
134 hgexe = util.hgexecutable()
135 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
135 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
136 stdin, stdout = os.popen2(cmd, 'b')
136 stdin, stdout = os.popen2(cmd, 'b')
137 stdin.write(arg)
137 stdin.write(arg)
138 stdin.close()
138 stdin.close()
139 return logstream(stdout)
139 return logstream(stdout)
140
140
141 # SVN conversion code stolen from bzr-svn and tailor
141 # SVN conversion code stolen from bzr-svn and tailor
142 #
142 #
143 # Subversion looks like a versioned filesystem, branches structures
143 # Subversion looks like a versioned filesystem, branches structures
144 # are defined by conventions and not enforced by the tool. First,
144 # are defined by conventions and not enforced by the tool. First,
145 # we define the potential branches (modules) as "trunk" and "branches"
145 # we define the potential branches (modules) as "trunk" and "branches"
146 # children directories. Revisions are then identified by their
146 # children directories. Revisions are then identified by their
147 # module and revision number (and a repository identifier).
147 # module and revision number (and a repository identifier).
148 #
148 #
149 # The revision graph is really a tree (or a forest). By default, a
149 # The revision graph is really a tree (or a forest). By default, a
150 # revision parent is the previous revision in the same module. If the
150 # revision parent is the previous revision in the same module. If the
151 # module directory is copied/moved from another module then the
151 # module directory is copied/moved from another module then the
152 # revision is the module root and its parent the source revision in
152 # revision is the module root and its parent the source revision in
153 # the parent module. A revision has at most one parent.
153 # the parent module. A revision has at most one parent.
154 #
154 #
155 class svn_source(converter_source):
155 class svn_source(converter_source):
156 def __init__(self, ui, url, rev=None):
156 def __init__(self, ui, url, rev=None):
157 super(svn_source, self).__init__(ui, url, rev=rev)
157 super(svn_source, self).__init__(ui, url, rev=rev)
158
158
159 try:
159 try:
160 SubversionException
160 SubversionException
161 except NameError:
161 except NameError:
162 raise NoRepo('Subversion python bindings could not be loaded')
162 raise NoRepo('Subversion python bindings could not be loaded')
163
163
164 self.encoding = locale.getpreferredencoding()
164 self.encoding = locale.getpreferredencoding()
165 self.lastrevs = {}
165 self.lastrevs = {}
166
166
167 latest = None
167 latest = None
168 try:
168 try:
169 # Support file://path@rev syntax. Useful e.g. to convert
169 # Support file://path@rev syntax. Useful e.g. to convert
170 # deleted branches.
170 # deleted branches.
171 at = url.rfind('@')
171 at = url.rfind('@')
172 if at >= 0:
172 if at >= 0:
173 latest = int(url[at+1:])
173 latest = int(url[at+1:])
174 url = url[:at]
174 url = url[:at]
175 except ValueError, e:
175 except ValueError, e:
176 pass
176 pass
177 self.url = geturl(url)
177 self.url = geturl(url)
178 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
178 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
179 try:
179 try:
180 self.transport = transport.SvnRaTransport(url=self.url)
180 self.transport = transport.SvnRaTransport(url=self.url)
181 self.ra = self.transport.ra
181 self.ra = self.transport.ra
182 self.ctx = self.transport.client
182 self.ctx = self.transport.client
183 self.base = svn.ra.get_repos_root(self.ra)
183 self.base = svn.ra.get_repos_root(self.ra)
184 self.module = self.url[len(self.base):]
184 self.module = self.url[len(self.base):]
185 self.commits = {}
185 self.commits = {}
186 self.paths = {}
186 self.paths = {}
187 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
187 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
188 except SubversionException, e:
188 except SubversionException, e:
189 ui.print_exc()
189 ui.print_exc()
190 raise NoRepo("%s does not look like a Subversion repo" % self.url)
190 raise NoRepo("%s does not look like a Subversion repo" % self.url)
191
191
192 if rev:
192 if rev:
193 try:
193 try:
194 latest = int(rev)
194 latest = int(rev)
195 except ValueError:
195 except ValueError:
196 raise util.Abort('svn: revision %s is not an integer' % rev)
196 raise util.Abort('svn: revision %s is not an integer' % rev)
197
197
198 try:
198 try:
199 self.get_blacklist()
199 self.get_blacklist()
200 except IOError, e:
200 except IOError, e:
201 pass
201 pass
202
202
203 self.last_changed = self.latest(self.module, latest)
203 self.last_changed = self.latest(self.module, latest)
204
204
205 self.head = self.revid(self.last_changed)
205 self.head = self.revid(self.last_changed)
206 self._changescache = None
206 self._changescache = None
207
207
208 if os.path.exists(os.path.join(url, '.svn/entries')):
208 if os.path.exists(os.path.join(url, '.svn/entries')):
209 self.wc = url
209 self.wc = url
210 else:
210 else:
211 self.wc = None
211 self.wc = None
212 self.convertfp = None
212 self.convertfp = None
213
213
214 def setrevmap(self, revmap):
214 def setrevmap(self, revmap):
215 lastrevs = {}
215 lastrevs = {}
216 for revid in revmap.iterkeys():
216 for revid in revmap.iterkeys():
217 uuid, module, revnum = self.revsplit(revid)
217 uuid, module, revnum = self.revsplit(revid)
218 lastrevnum = lastrevs.setdefault(module, revnum)
218 lastrevnum = lastrevs.setdefault(module, revnum)
219 if revnum > lastrevnum:
219 if revnum > lastrevnum:
220 lastrevs[module] = revnum
220 lastrevs[module] = revnum
221 self.lastrevs = lastrevs
221 self.lastrevs = lastrevs
222
222
223 def exists(self, path, optrev):
223 def exists(self, path, optrev):
224 try:
224 try:
225 svn.client.ls(self.url.rstrip('/') + '/' + path,
225 svn.client.ls(self.url.rstrip('/') + '/' + path,
226 optrev, False, self.ctx)
226 optrev, False, self.ctx)
227 return True
227 return True
228 except SubversionException, err:
228 except SubversionException, err:
229 return False
229 return False
230
230
231 def getheads(self):
231 def getheads(self):
232
232
233 def getcfgpath(name, rev):
233 def getcfgpath(name, rev):
234 cfgpath = self.ui.config('convert', 'svn.' + name)
234 cfgpath = self.ui.config('convert', 'svn.' + name)
235 path = (cfgpath or name).strip('/')
235 path = (cfgpath or name).strip('/')
236 if not self.exists(path, rev):
236 if not self.exists(path, rev):
237 if cfgpath:
237 if cfgpath:
238 raise util.Abort(_('expected %s to be at %r, but not found')
238 raise util.Abort(_('expected %s to be at %r, but not found')
239 % (name, path))
239 % (name, path))
240 return None
240 return None
241 self.ui.note(_('found %s at %r\n') % (name, path))
241 self.ui.note(_('found %s at %r\n') % (name, path))
242 return path
242 return path
243
243
244 rev = optrev(self.last_changed)
244 rev = optrev(self.last_changed)
245 oldmodule = ''
245 oldmodule = ''
246 trunk = getcfgpath('trunk', rev)
246 trunk = getcfgpath('trunk', rev)
247 tags = getcfgpath('tags', rev)
247 tags = getcfgpath('tags', rev)
248 branches = getcfgpath('branches', rev)
248 branches = getcfgpath('branches', rev)
249
249
250 # If the project has a trunk or branches, we will extract heads
250 # If the project has a trunk or branches, we will extract heads
251 # from them. We keep the project root otherwise.
251 # from them. We keep the project root otherwise.
252 if trunk:
252 if trunk:
253 oldmodule = self.module or ''
253 oldmodule = self.module or ''
254 self.module += '/' + trunk
254 self.module += '/' + trunk
255 lt = self.latest(self.module, self.last_changed)
255 lt = self.latest(self.module, self.last_changed)
256 self.head = self.revid(lt)
256 self.head = self.revid(lt)
257
257
258 # First head in the list is the module's head
258 # First head in the list is the module's head
259 self.heads = [self.head]
259 self.heads = [self.head]
260 self.tags = '%s/%s' % (oldmodule , (tags or 'tags'))
260 self.tags = '%s/%s' % (oldmodule , (tags or 'tags'))
261
261
262 # Check if branches bring a few more heads to the list
262 # Check if branches bring a few more heads to the list
263 if branches:
263 if branches:
264 rpath = self.url.strip('/')
264 rpath = self.url.strip('/')
265 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
265 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
266 self.ctx)
266 self.ctx)
267 for branch in branchnames.keys():
267 for branch in branchnames.keys():
268 module = '%s/%s/%s' % (oldmodule, branches, branch)
268 module = '%s/%s/%s' % (oldmodule, branches, branch)
269 brevnum = self.latest(module, self.last_changed)
269 brevnum = self.latest(module, self.last_changed)
270 brev = self.revid(brevnum, module)
270 brev = self.revid(brevnum, module)
271 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
271 self.ui.note('found branch %s at %d\n' % (branch, brevnum))
272 self.heads.append(brev)
272 self.heads.append(brev)
273
273
274 return self.heads
274 return self.heads
275
275
276 def getfile(self, file, rev):
276 def getfile(self, file, rev):
277 data, mode = self._getfile(file, rev)
277 data, mode = self._getfile(file, rev)
278 self.modecache[(file, rev)] = mode
278 self.modecache[(file, rev)] = mode
279 return data
279 return data
280
280
281 def getmode(self, file, rev):
281 def getmode(self, file, rev):
282 return self.modecache[(file, rev)]
282 return self.modecache[(file, rev)]
283
283
284 def getchanges(self, rev):
284 def getchanges(self, rev):
285 if self._changescache and self._changescache[0] == rev:
285 if self._changescache and self._changescache[0] == rev:
286 return self._changescache[1]
286 return self._changescache[1]
287 self._changescache = None
287 self._changescache = None
288 self.modecache = {}
288 self.modecache = {}
289 (paths, parents) = self.paths[rev]
289 (paths, parents) = self.paths[rev]
290 files, copies = self.expandpaths(rev, paths, parents)
290 files, copies = self.expandpaths(rev, paths, parents)
291 files.sort()
291 files.sort()
292 files = zip(files, [rev] * len(files))
292 files = zip(files, [rev] * len(files))
293
293
294 # caller caches the result, so free it here to release memory
294 # caller caches the result, so free it here to release memory
295 del self.paths[rev]
295 del self.paths[rev]
296 return (files, copies)
296 return (files, copies)
297
297
298 def getchangedfiles(self, rev, i):
298 def getchangedfiles(self, rev, i):
299 changes = self.getchanges(rev)
299 changes = self.getchanges(rev)
300 self._changescache = (rev, changes)
300 self._changescache = (rev, changes)
301 return [f[0] for f in changes[0]]
301 return [f[0] for f in changes[0]]
302
302
303 def getcommit(self, rev):
303 def getcommit(self, rev):
304 if rev not in self.commits:
304 if rev not in self.commits:
305 uuid, module, revnum = self.revsplit(rev)
305 uuid, module, revnum = self.revsplit(rev)
306 self.module = module
306 self.module = module
307 self.reparent(module)
307 self.reparent(module)
308 # We assume that:
308 # We assume that:
309 # - requests for revisions after "stop" come from the
309 # - requests for revisions after "stop" come from the
310 # revision graph backward traversal. Cache all of them
310 # revision graph backward traversal. Cache all of them
311 # down to stop, they will be used eventually.
311 # down to stop, they will be used eventually.
312 # - requests for revisions before "stop" come to get
312 # - requests for revisions before "stop" come to get
313 # isolated branches parents. Just fetch what is needed.
313 # isolated branches parents. Just fetch what is needed.
314 stop = self.lastrevs.get(module, 0)
314 stop = self.lastrevs.get(module, 0)
315 if revnum < stop:
315 if revnum < stop:
316 stop = revnum + 1
316 stop = revnum + 1
317 self._fetch_revisions(revnum, stop)
317 self._fetch_revisions(revnum, stop)
318 commit = self.commits[rev]
318 commit = self.commits[rev]
319 # caller caches the result, so free it here to release memory
319 # caller caches the result, so free it here to release memory
320 del self.commits[rev]
320 del self.commits[rev]
321 return commit
321 return commit
322
322
323 def gettags(self):
323 def gettags(self):
324 tags = {}
324 tags = {}
325 start = self.revnum(self.head)
325 start = self.revnum(self.head)
326 try:
326 try:
327 for entry in get_log(self.url, [self.tags], 0, start):
327 for entry in get_log(self.url, [self.tags], 0, start):
328 orig_paths, revnum, author, date, message = entry
328 orig_paths, revnum, author, date, message = entry
329 for path in orig_paths:
329 for path in orig_paths:
330 if not path.startswith(self.tags+'/'):
330 if not path.startswith(self.tags+'/'):
331 continue
331 continue
332 ent = orig_paths[path]
332 ent = orig_paths[path]
333 source = ent.copyfrom_path
333 source = ent.copyfrom_path
334 rev = ent.copyfrom_rev
334 rev = ent.copyfrom_rev
335 tag = path.split('/')[-1]
335 tag = path.split('/')[-1]
336 tags[tag] = self.revid(rev, module=source)
336 tags[tag] = self.revid(rev, module=source)
337 except SubversionException, (inst, num):
337 except SubversionException, (inst, num):
338 self.ui.note('no tags found at revision %d\n' % start)
338 self.ui.note('no tags found at revision %d\n' % start)
339 return tags
339 return tags
340
340
341 def converted(self, rev, destrev):
341 def converted(self, rev, destrev):
342 if not self.wc:
342 if not self.wc:
343 return
343 return
344 if self.convertfp is None:
344 if self.convertfp is None:
345 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
345 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
346 'a')
346 'a')
347 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
347 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
348 self.convertfp.flush()
348 self.convertfp.flush()
349
349
350 # -- helper functions --
350 # -- helper functions --
351
351
352 def revid(self, revnum, module=None):
352 def revid(self, revnum, module=None):
353 if not module:
353 if not module:
354 module = self.module
354 module = self.module
355 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
355 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
356 revnum)
356 revnum)
357
357
358 def revnum(self, rev):
358 def revnum(self, rev):
359 return int(rev.split('@')[-1])
359 return int(rev.split('@')[-1])
360
360
361 def revsplit(self, rev):
361 def revsplit(self, rev):
362 url, revnum = rev.encode(self.encoding).split('@', 1)
362 url, revnum = rev.encode(self.encoding).split('@', 1)
363 revnum = int(revnum)
363 revnum = int(revnum)
364 parts = url.split('/', 1)
364 parts = url.split('/', 1)
365 uuid = parts.pop(0)[4:]
365 uuid = parts.pop(0)[4:]
366 mod = ''
366 mod = ''
367 if parts:
367 if parts:
368 mod = '/' + parts[0]
368 mod = '/' + parts[0]
369 return uuid, mod, revnum
369 return uuid, mod, revnum
370
370
371 def latest(self, path, stop=0):
371 def latest(self, path, stop=0):
372 'find the latest revision affecting path, up to stop'
372 'find the latest revision affecting path, up to stop'
373 if not stop:
373 if not stop:
374 stop = svn.ra.get_latest_revnum(self.ra)
374 stop = svn.ra.get_latest_revnum(self.ra)
375 try:
375 try:
376 self.reparent('')
376 self.reparent('')
377 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
377 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
378 self.reparent(self.module)
378 self.reparent(self.module)
379 except SubversionException:
379 except SubversionException:
380 dirent = None
380 dirent = None
381 if not dirent:
381 if not dirent:
382 raise util.Abort('%s not found up to revision %d' % (path, stop))
382 raise util.Abort('%s not found up to revision %d' % (path, stop))
383
383
384 return dirent.created_rev
384 return dirent.created_rev
385
385
386 def get_blacklist(self):
386 def get_blacklist(self):
387 """Avoid certain revision numbers.
387 """Avoid certain revision numbers.
388 It is not uncommon for two nearby revisions to cancel each other
388 It is not uncommon for two nearby revisions to cancel each other
389 out, e.g. 'I copied trunk into a subdirectory of itself instead
389 out, e.g. 'I copied trunk into a subdirectory of itself instead
390 of making a branch'. The converted repository is significantly
390 of making a branch'. The converted repository is significantly
391 smaller if we ignore such revisions."""
391 smaller if we ignore such revisions."""
392 self.blacklist = util.set()
392 self.blacklist = util.set()
393 blacklist = self.blacklist
393 blacklist = self.blacklist
394 for line in file("blacklist.txt", "r"):
394 for line in file("blacklist.txt", "r"):
395 if not line.startswith("#"):
395 if not line.startswith("#"):
396 try:
396 try:
397 svn_rev = int(line.strip())
397 svn_rev = int(line.strip())
398 blacklist.add(svn_rev)
398 blacklist.add(svn_rev)
399 except ValueError, e:
399 except ValueError, e:
400 pass # not an integer or a comment
400 pass # not an integer or a comment
401
401
402 def is_blacklisted(self, svn_rev):
402 def is_blacklisted(self, svn_rev):
403 return svn_rev in self.blacklist
403 return svn_rev in self.blacklist
404
404
405 def reparent(self, module):
405 def reparent(self, module):
406 svn_url = self.base + module
406 svn_url = self.base + module
407 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
407 self.ui.debug("reparent to %s\n" % svn_url.encode(self.encoding))
408 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
408 svn.ra.reparent(self.ra, svn_url.encode(self.encoding))
409
409
410 def expandpaths(self, rev, paths, parents):
410 def expandpaths(self, rev, paths, parents):
411 def get_entry_from_path(path, module=self.module):
411 def get_entry_from_path(path, module=self.module):
412 # Given the repository url of this wc, say
412 # Given the repository url of this wc, say
413 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
413 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
414 # extract the "entry" portion (a relative path) from what
414 # extract the "entry" portion (a relative path) from what
415 # svn log --xml says, ie
415 # svn log --xml says, ie
416 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
416 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
417 # that is to say "tests/PloneTestCase.py"
417 # that is to say "tests/PloneTestCase.py"
418 if path.startswith(module):
418 if path.startswith(module):
419 relative = path[len(module):]
419 relative = path[len(module):]
420 if relative.startswith('/'):
420 if relative.startswith('/'):
421 return relative[1:]
421 return relative[1:]
422 else:
422 else:
423 return relative
423 return relative
424
424
425 # The path is outside our tracked tree...
425 # The path is outside our tracked tree...
426 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
426 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
427 return None
427 return None
428
428
429 entries = []
429 entries = []
430 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
430 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
431 copies = {}
431 copies = {}
432
432
433 new_module, revnum = self.revsplit(rev)[1:]
433 new_module, revnum = self.revsplit(rev)[1:]
434 if new_module != self.module:
434 if new_module != self.module:
435 self.module = new_module
435 self.module = new_module
436 self.reparent(self.module)
436 self.reparent(self.module)
437
437
438 for path, ent in paths:
438 for path, ent in paths:
439 entrypath = get_entry_from_path(path, module=self.module)
439 entrypath = get_entry_from_path(path, module=self.module)
440 entry = entrypath.decode(self.encoding)
440 entry = entrypath.decode(self.encoding)
441
441
442 kind = svn.ra.check_path(self.ra, entrypath, revnum)
442 kind = svn.ra.check_path(self.ra, entrypath, revnum)
443 if kind == svn.core.svn_node_file:
443 if kind == svn.core.svn_node_file:
444 if ent.copyfrom_path:
444 if ent.copyfrom_path:
445 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
445 copyfrom_path = get_entry_from_path(ent.copyfrom_path)
446 if copyfrom_path:
446 if copyfrom_path:
447 self.ui.debug("Copied to %s from %s@%s\n" %
447 self.ui.debug("Copied to %s from %s@%s\n" %
448 (entrypath, copyfrom_path,
448 (entrypath, copyfrom_path,
449 ent.copyfrom_rev))
449 ent.copyfrom_rev))
450 # It's probably important for hg that the source
450 # It's probably important for hg that the source
451 # exists in the revision's parent, not just the
451 # exists in the revision's parent, not just the
452 # ent.copyfrom_rev
452 # ent.copyfrom_rev
453 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
453 fromkind = svn.ra.check_path(self.ra, copyfrom_path, ent.copyfrom_rev)
454 if fromkind != 0:
454 if fromkind != 0:
455 copies[self.recode(entry)] = self.recode(copyfrom_path)
455 copies[self.recode(entry)] = self.recode(copyfrom_path)
456 entries.append(self.recode(entry))
456 entries.append(self.recode(entry))
457 elif kind == 0: # gone, but had better be a deleted *file*
457 elif kind == 0: # gone, but had better be a deleted *file*
458 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
458 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
459
459
460 # if a branch is created but entries are removed in the same
460 # if a branch is created but entries are removed in the same
461 # changeset, get the right fromrev
461 # changeset, get the right fromrev
462 # parents cannot be empty here, you cannot remove things from
462 # parents cannot be empty here, you cannot remove things from
463 # a root revision.
463 # a root revision.
464 uuid, old_module, fromrev = self.revsplit(parents[0])
464 uuid, old_module, fromrev = self.revsplit(parents[0])
465
465
466 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
466 basepath = old_module + "/" + get_entry_from_path(path, module=self.module)
467 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
467 entrypath = old_module + "/" + get_entry_from_path(path, module=self.module)
468
468
469 def lookup_parts(p):
469 def lookup_parts(p):
470 rc = None
470 rc = None
471 parts = p.split("/")
471 parts = p.split("/")
472 for i in range(len(parts)):
472 for i in range(len(parts)):
473 part = "/".join(parts[:i])
473 part = "/".join(parts[:i])
474 info = part, copyfrom.get(part, None)
474 info = part, copyfrom.get(part, None)
475 if info[1] is not None:
475 if info[1] is not None:
476 self.ui.debug("Found parent directory %s\n" % info[1])
476 self.ui.debug("Found parent directory %s\n" % info[1])
477 rc = info
477 rc = info
478 return rc
478 return rc
479
479
480 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
480 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
481
481
482 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
482 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
483
483
484 # need to remove fragment from lookup_parts and replace with copyfrom_path
484 # need to remove fragment from lookup_parts and replace with copyfrom_path
485 if frompath is not None:
485 if frompath is not None:
486 self.ui.debug("munge-o-matic\n")
486 self.ui.debug("munge-o-matic\n")
487 self.ui.debug(entrypath + '\n')
487 self.ui.debug(entrypath + '\n')
488 self.ui.debug(entrypath[len(frompath):] + '\n')
488 self.ui.debug(entrypath[len(frompath):] + '\n')
489 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
489 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
490 fromrev = froment.copyfrom_rev
490 fromrev = froment.copyfrom_rev
491 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
491 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
492
492
493 # We can avoid the reparent calls if the module has not changed
493 # We can avoid the reparent calls if the module has not changed
494 # but it probably does not worth the pain.
494 # but it probably does not worth the pain.
495 self.reparent('')
495 self.reparent('')
496 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
496 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
497 self.reparent(self.module)
497 self.reparent(self.module)
498
498
499 if fromkind == svn.core.svn_node_file: # a deleted file
499 if fromkind == svn.core.svn_node_file: # a deleted file
500 entries.append(self.recode(entry))
500 entries.append(self.recode(entry))
501 elif fromkind == svn.core.svn_node_dir:
501 elif fromkind == svn.core.svn_node_dir:
502 # print "Deleted/moved non-file:", revnum, path, ent
502 # print "Deleted/moved non-file:", revnum, path, ent
503 # children = self._find_children(path, revnum - 1)
503 # children = self._find_children(path, revnum - 1)
504 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
504 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
505 # Sometimes this is tricky. For example: in
505 # Sometimes this is tricky. For example: in
506 # The Subversion Repository revision 6940 a dir
506 # The Subversion Repository revision 6940 a dir
507 # was copied and one of its files was deleted
507 # was copied and one of its files was deleted
508 # from the new location in the same commit. This
508 # from the new location in the same commit. This
509 # code can't deal with that yet.
509 # code can't deal with that yet.
510 if ent.action == 'C':
510 if ent.action == 'C':
511 children = self._find_children(path, fromrev)
511 children = self._find_children(path, fromrev)
512 else:
512 else:
513 oroot = entrypath.strip('/')
513 oroot = entrypath.strip('/')
514 nroot = path.strip('/')
514 nroot = path.strip('/')
515 children = self._find_children(oroot, fromrev)
515 children = self._find_children(oroot, fromrev)
516 children = [s.replace(oroot,nroot) for s in children]
516 children = [s.replace(oroot,nroot) for s in children]
517 # Mark all [files, not directories] as deleted.
517 # Mark all [files, not directories] as deleted.
518 for child in children:
518 for child in children:
519 # Can we move a child directory and its
519 # Can we move a child directory and its
520 # parent in the same commit? (probably can). Could
520 # parent in the same commit? (probably can). Could
521 # cause problems if instead of revnum -1,
521 # cause problems if instead of revnum -1,
522 # we have to look in (copyfrom_path, revnum - 1)
522 # we have to look in (copyfrom_path, revnum - 1)
523 entrypath = get_entry_from_path("/" + child, module=old_module)
523 entrypath = get_entry_from_path("/" + child, module=old_module)
524 if entrypath:
524 if entrypath:
525 entry = self.recode(entrypath.decode(self.encoding))
525 entry = self.recode(entrypath.decode(self.encoding))
526 if entry in copies:
526 if entry in copies:
527 # deleted file within a copy
527 # deleted file within a copy
528 del copies[entry]
528 del copies[entry]
529 else:
529 else:
530 entries.append(entry)
530 entries.append(entry)
531 else:
531 else:
532 self.ui.debug('unknown path in revision %d: %s\n' % \
532 self.ui.debug('unknown path in revision %d: %s\n' % \
533 (revnum, path))
533 (revnum, path))
534 elif kind == svn.core.svn_node_dir:
534 elif kind == svn.core.svn_node_dir:
535 # Should probably synthesize normal file entries
535 # Should probably synthesize normal file entries
536 # and handle as above to clean up copy/rename handling.
536 # and handle as above to clean up copy/rename handling.
537
537
538 # If the directory just had a prop change,
538 # If the directory just had a prop change,
539 # then we shouldn't need to look for its children.
539 # then we shouldn't need to look for its children.
540 if ent.action == 'M':
540 if ent.action == 'M':
541 continue
541 continue
542
542
543 # Also this could create duplicate entries. Not sure
543 # Also this could create duplicate entries. Not sure
544 # whether this will matter. Maybe should make entries a set.
544 # whether this will matter. Maybe should make entries a set.
545 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
545 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
546 # This will fail if a directory was copied
546 # This will fail if a directory was copied
547 # from another branch and then some of its files
547 # from another branch and then some of its files
548 # were deleted in the same transaction.
548 # were deleted in the same transaction.
549 children = self._find_children(path, revnum)
549 children = self._find_children(path, revnum)
550 children.sort()
550 children.sort()
551 for child in children:
551 for child in children:
552 # Can we move a child directory and its
552 # Can we move a child directory and its
553 # parent in the same commit? (probably can). Could
553 # parent in the same commit? (probably can). Could
554 # cause problems if instead of revnum -1,
554 # cause problems if instead of revnum -1,
555 # we have to look in (copyfrom_path, revnum - 1)
555 # we have to look in (copyfrom_path, revnum - 1)
556 entrypath = get_entry_from_path("/" + child, module=self.module)
556 entrypath = get_entry_from_path("/" + child, module=self.module)
557 # print child, self.module, entrypath
557 # print child, self.module, entrypath
558 if entrypath:
558 if entrypath:
559 # Need to filter out directories here...
559 # Need to filter out directories here...
560 kind = svn.ra.check_path(self.ra, entrypath, revnum)
560 kind = svn.ra.check_path(self.ra, entrypath, revnum)
561 if kind != svn.core.svn_node_dir:
561 if kind != svn.core.svn_node_dir:
562 entries.append(self.recode(entrypath))
562 entries.append(self.recode(entrypath))
563
563
564 # Copies here (must copy all from source)
564 # Copies here (must copy all from source)
565 # Probably not a real problem for us if
565 # Probably not a real problem for us if
566 # source does not exist
566 # source does not exist
567
567
568 # Can do this with the copy command "hg copy"
568 # Can do this with the copy command "hg copy"
569 # if ent.copyfrom_path:
569 # if ent.copyfrom_path:
570 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
570 # copyfrom_entry = get_entry_from_path(ent.copyfrom_path.decode(self.encoding),
571 # module=self.module)
571 # module=self.module)
572 # copyto_entry = entrypath
572 # copyto_entry = entrypath
573 #
573 #
574 # print "copy directory", copyfrom_entry, 'to', copyto_entry
574 # print "copy directory", copyfrom_entry, 'to', copyto_entry
575 #
575 #
576 # copies.append((copyfrom_entry, copyto_entry))
576 # copies.append((copyfrom_entry, copyto_entry))
577
577
578 if ent.copyfrom_path:
578 if ent.copyfrom_path:
579 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
579 copyfrom_path = ent.copyfrom_path.decode(self.encoding)
580 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
580 copyfrom_entry = get_entry_from_path(copyfrom_path, module=self.module)
581 if copyfrom_entry:
581 if copyfrom_entry:
582 copyfrom[path] = ent
582 copyfrom[path] = ent
583 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
583 self.ui.debug("mark %s came from %s\n" % (path, copyfrom[path]))
584
584
585 # Good, /probably/ a regular copy. Really should check
585 # Good, /probably/ a regular copy. Really should check
586 # to see whether the parent revision actually contains
586 # to see whether the parent revision actually contains
587 # the directory in question.
587 # the directory in question.
588 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
588 children = self._find_children(self.recode(copyfrom_path), ent.copyfrom_rev)
589 children.sort()
589 children.sort()
590 for child in children:
590 for child in children:
591 entrypath = get_entry_from_path("/" + child, module=self.module)
591 entrypath = get_entry_from_path("/" + child, module=self.module)
592 if entrypath:
592 if entrypath:
593 entry = entrypath.decode(self.encoding)
593 entry = entrypath.decode(self.encoding)
594 # print "COPY COPY From", copyfrom_entry, entry
594 # print "COPY COPY From", copyfrom_entry, entry
595 copyto_path = path + entry[len(copyfrom_entry):]
595 copyto_path = path + entry[len(copyfrom_entry):]
596 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
596 copyto_entry = get_entry_from_path(copyto_path, module=self.module)
597 # print "COPY", entry, "COPY To", copyto_entry
597 # print "COPY", entry, "COPY To", copyto_entry
598 copies[self.recode(copyto_entry)] = self.recode(entry)
598 copies[self.recode(copyto_entry)] = self.recode(entry)
599 # copy from quux splort/quuxfile
599 # copy from quux splort/quuxfile
600
600
601 return (util.unique(entries), copies)
601 return (util.unique(entries), copies)
602
602
603 def _fetch_revisions(self, from_revnum, to_revnum):
603 def _fetch_revisions(self, from_revnum, to_revnum):
604 if from_revnum < to_revnum:
604 if from_revnum < to_revnum:
605 from_revnum, to_revnum = to_revnum, from_revnum
605 from_revnum, to_revnum = to_revnum, from_revnum
606
606
607 self.child_cset = None
607 self.child_cset = None
608 def parselogentry(orig_paths, revnum, author, date, message):
608 def parselogentry(orig_paths, revnum, author, date, message):
609 """Return the parsed commit object or None, and True if
609 """Return the parsed commit object or None, and True if
610 the revision is a branch root.
610 the revision is a branch root.
611 """
611 """
612 self.ui.debug("parsing revision %d (%d changes)\n" %
612 self.ui.debug("parsing revision %d (%d changes)\n" %
613 (revnum, len(orig_paths)))
613 (revnum, len(orig_paths)))
614
614
615 rev = self.revid(revnum)
615 rev = self.revid(revnum)
616 # branch log might return entries for a parent we already have
616 # branch log might return entries for a parent we already have
617
617
618 if (rev in self.commits or revnum < to_revnum):
618 if (rev in self.commits or revnum < to_revnum):
619 return None, False
619 return None, False
620
620
621 parents = []
621 parents = []
622 # check whether this revision is the start of a branch
622 # check whether this revision is the start of a branch
623 if self.module in orig_paths:
623 if self.module in orig_paths:
624 ent = orig_paths[self.module]
624 ent = orig_paths[self.module]
625 if ent.copyfrom_path:
625 if ent.copyfrom_path:
626 # ent.copyfrom_rev may not be the actual last revision
626 # ent.copyfrom_rev may not be the actual last revision
627 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
627 prev = self.latest(ent.copyfrom_path, ent.copyfrom_rev)
628 parents = [self.revid(prev, ent.copyfrom_path)]
628 parents = [self.revid(prev, ent.copyfrom_path)]
629 self.ui.note('found parent of branch %s at %d: %s\n' % \
629 self.ui.note('found parent of branch %s at %d: %s\n' % \
630 (self.module, prev, ent.copyfrom_path))
630 (self.module, prev, ent.copyfrom_path))
631 else:
631 else:
632 self.ui.debug("No copyfrom path, don't know what to do.\n")
632 self.ui.debug("No copyfrom path, don't know what to do.\n")
633
633
634 orig_paths = orig_paths.items()
634 orig_paths = orig_paths.items()
635 orig_paths.sort()
635 orig_paths.sort()
636 paths = []
636 paths = []
637 # filter out unrelated paths
637 # filter out unrelated paths
638 for path, ent in orig_paths:
638 for path, ent in orig_paths:
639 if not path.startswith(self.module):
639 if not path.startswith(self.module):
640 self.ui.debug("boring@%s: %s\n" % (revnum, path))
640 self.ui.debug("boring@%s: %s\n" % (revnum, path))
641 continue
641 continue
642 paths.append((path, ent))
642 paths.append((path, ent))
643
643
644 # Example SVN datetime. Includes microseconds.
644 # Example SVN datetime. Includes microseconds.
645 # ISO-8601 conformant
645 # ISO-8601 conformant
646 # '2007-01-04T17:35:00.902377Z'
646 # '2007-01-04T17:35:00.902377Z'
647 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
647 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
648
648
649 log = message and self.recode(message)
649 log = message and self.recode(message) or ''
650 author = author and self.recode(author) or ''
650 author = author and self.recode(author) or ''
651 try:
651 try:
652 branch = self.module.split("/")[-1]
652 branch = self.module.split("/")[-1]
653 if branch == 'trunk':
653 if branch == 'trunk':
654 branch = ''
654 branch = ''
655 except IndexError:
655 except IndexError:
656 branch = None
656 branch = None
657
657
658 cset = commit(author=author,
658 cset = commit(author=author,
659 date=util.datestr(date),
659 date=util.datestr(date),
660 desc=log,
660 desc=log,
661 parents=parents,
661 parents=parents,
662 branch=branch,
662 branch=branch,
663 rev=rev.encode('utf-8'))
663 rev=rev.encode('utf-8'))
664
664
665 self.commits[rev] = cset
665 self.commits[rev] = cset
666 # The parents list is *shared* among self.paths and the
666 # The parents list is *shared* among self.paths and the
667 # commit object. Both will be updated below.
667 # commit object. Both will be updated below.
668 self.paths[rev] = (paths, cset.parents)
668 self.paths[rev] = (paths, cset.parents)
669 if self.child_cset and not self.child_cset.parents:
669 if self.child_cset and not self.child_cset.parents:
670 self.child_cset.parents[:] = [rev]
670 self.child_cset.parents[:] = [rev]
671 self.child_cset = cset
671 self.child_cset = cset
672 return cset, len(parents) > 0
672 return cset, len(parents) > 0
673
673
674 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
674 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
675 (self.module, from_revnum, to_revnum))
675 (self.module, from_revnum, to_revnum))
676
676
677 try:
677 try:
678 firstcset = None
678 firstcset = None
679 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
679 stream = get_log(self.url, [self.module], from_revnum, to_revnum)
680 try:
680 try:
681 for entry in stream:
681 for entry in stream:
682 paths, revnum, author, date, message = entry
682 paths, revnum, author, date, message = entry
683 if self.is_blacklisted(revnum):
683 if self.is_blacklisted(revnum):
684 self.ui.note('skipping blacklisted revision %d\n'
684 self.ui.note('skipping blacklisted revision %d\n'
685 % revnum)
685 % revnum)
686 continue
686 continue
687 if paths is None:
687 if paths is None:
688 self.ui.debug('revision %d has no entries\n' % revnum)
688 self.ui.debug('revision %d has no entries\n' % revnum)
689 continue
689 continue
690 cset, branched = parselogentry(paths, revnum, author,
690 cset, branched = parselogentry(paths, revnum, author,
691 date, message)
691 date, message)
692 if cset:
692 if cset:
693 firstcset = cset
693 firstcset = cset
694 if branched:
694 if branched:
695 break
695 break
696 finally:
696 finally:
697 stream.close()
697 stream.close()
698
698
699 if firstcset and not firstcset.parents:
699 if firstcset and not firstcset.parents:
700 # The first revision of the sequence (the last fetched one)
700 # The first revision of the sequence (the last fetched one)
701 # has invalid parents if not a branch root. Find the parent
701 # has invalid parents if not a branch root. Find the parent
702 # revision now, if any.
702 # revision now, if any.
703 try:
703 try:
704 firstrevnum = self.revnum(firstcset.rev)
704 firstrevnum = self.revnum(firstcset.rev)
705 if firstrevnum > 1:
705 if firstrevnum > 1:
706 latest = self.latest(self.module, firstrevnum - 1)
706 latest = self.latest(self.module, firstrevnum - 1)
707 firstcset.parents.append(self.revid(latest))
707 firstcset.parents.append(self.revid(latest))
708 except util.Abort:
708 except util.Abort:
709 pass
709 pass
710 except SubversionException, (inst, num):
710 except SubversionException, (inst, num):
711 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
711 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
712 raise NoSuchRevision(branch=self,
712 raise NoSuchRevision(branch=self,
713 revision="Revision number %d" % to_revnum)
713 revision="Revision number %d" % to_revnum)
714 raise
714 raise
715
715
716 def _getfile(self, file, rev):
716 def _getfile(self, file, rev):
717 io = StringIO()
717 io = StringIO()
718 # TODO: ra.get_file transmits the whole file instead of diffs.
718 # TODO: ra.get_file transmits the whole file instead of diffs.
719 mode = ''
719 mode = ''
720 try:
720 try:
721 new_module, revnum = self.revsplit(rev)[1:]
721 new_module, revnum = self.revsplit(rev)[1:]
722 if self.module != new_module:
722 if self.module != new_module:
723 self.module = new_module
723 self.module = new_module
724 self.reparent(self.module)
724 self.reparent(self.module)
725 info = svn.ra.get_file(self.ra, file, revnum, io)
725 info = svn.ra.get_file(self.ra, file, revnum, io)
726 if isinstance(info, list):
726 if isinstance(info, list):
727 info = info[-1]
727 info = info[-1]
728 mode = ("svn:executable" in info) and 'x' or ''
728 mode = ("svn:executable" in info) and 'x' or ''
729 mode = ("svn:special" in info) and 'l' or mode
729 mode = ("svn:special" in info) and 'l' or mode
730 except SubversionException, e:
730 except SubversionException, e:
731 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
731 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
732 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
732 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
733 if e.apr_err in notfound: # File not found
733 if e.apr_err in notfound: # File not found
734 raise IOError()
734 raise IOError()
735 raise
735 raise
736 data = io.getvalue()
736 data = io.getvalue()
737 if mode == 'l':
737 if mode == 'l':
738 link_prefix = "link "
738 link_prefix = "link "
739 if data.startswith(link_prefix):
739 if data.startswith(link_prefix):
740 data = data[len(link_prefix):]
740 data = data[len(link_prefix):]
741 return data, mode
741 return data, mode
742
742
743 def _find_children(self, path, revnum):
743 def _find_children(self, path, revnum):
744 path = path.strip('/')
744 path = path.strip('/')
745 pool = Pool()
745 pool = Pool()
746 rpath = '/'.join([self.base, path]).strip('/')
746 rpath = '/'.join([self.base, path]).strip('/')
747 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
747 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
748
748
749 pre_revprop_change = '''#!/bin/sh
749 pre_revprop_change = '''#!/bin/sh
750
750
751 REPOS="$1"
751 REPOS="$1"
752 REV="$2"
752 REV="$2"
753 USER="$3"
753 USER="$3"
754 PROPNAME="$4"
754 PROPNAME="$4"
755 ACTION="$5"
755 ACTION="$5"
756
756
757 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
757 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
758 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
758 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
759 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
759 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
760
760
761 echo "Changing prohibited revision property" >&2
761 echo "Changing prohibited revision property" >&2
762 exit 1
762 exit 1
763 '''
763 '''
764
764
765 class svn_sink(converter_sink, commandline):
765 class svn_sink(converter_sink, commandline):
766 commit_re = re.compile(r'Committed revision (\d+).', re.M)
766 commit_re = re.compile(r'Committed revision (\d+).', re.M)
767
767
768 def prerun(self):
768 def prerun(self):
769 if self.wc:
769 if self.wc:
770 os.chdir(self.wc)
770 os.chdir(self.wc)
771
771
772 def postrun(self):
772 def postrun(self):
773 if self.wc:
773 if self.wc:
774 os.chdir(self.cwd)
774 os.chdir(self.cwd)
775
775
776 def join(self, name):
776 def join(self, name):
777 return os.path.join(self.wc, '.svn', name)
777 return os.path.join(self.wc, '.svn', name)
778
778
779 def revmapfile(self):
779 def revmapfile(self):
780 return self.join('hg-shamap')
780 return self.join('hg-shamap')
781
781
782 def authorfile(self):
782 def authorfile(self):
783 return self.join('hg-authormap')
783 return self.join('hg-authormap')
784
784
785 def __init__(self, ui, path):
785 def __init__(self, ui, path):
786 converter_sink.__init__(self, ui, path)
786 converter_sink.__init__(self, ui, path)
787 commandline.__init__(self, ui, 'svn')
787 commandline.__init__(self, ui, 'svn')
788 self.delete = []
788 self.delete = []
789 self.setexec = []
789 self.setexec = []
790 self.delexec = []
790 self.delexec = []
791 self.copies = []
791 self.copies = []
792 self.wc = None
792 self.wc = None
793 self.cwd = os.getcwd()
793 self.cwd = os.getcwd()
794
794
795 path = os.path.realpath(path)
795 path = os.path.realpath(path)
796
796
797 created = False
797 created = False
798 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
798 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
799 self.wc = path
799 self.wc = path
800 self.run0('update')
800 self.run0('update')
801 else:
801 else:
802 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
802 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
803
803
804 if os.path.isdir(os.path.dirname(path)):
804 if os.path.isdir(os.path.dirname(path)):
805 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
805 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
806 ui.status(_('initializing svn repo %r\n') %
806 ui.status(_('initializing svn repo %r\n') %
807 os.path.basename(path))
807 os.path.basename(path))
808 commandline(ui, 'svnadmin').run0('create', path)
808 commandline(ui, 'svnadmin').run0('create', path)
809 created = path
809 created = path
810 path = util.normpath(path)
810 path = util.normpath(path)
811 if not path.startswith('/'):
811 if not path.startswith('/'):
812 path = '/' + path
812 path = '/' + path
813 path = 'file://' + path
813 path = 'file://' + path
814
814
815 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
815 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
816 self.run0('checkout', path, wcpath)
816 self.run0('checkout', path, wcpath)
817
817
818 self.wc = wcpath
818 self.wc = wcpath
819 self.opener = util.opener(self.wc)
819 self.opener = util.opener(self.wc)
820 self.wopener = util.opener(self.wc)
820 self.wopener = util.opener(self.wc)
821 self.childmap = mapfile(ui, self.join('hg-childmap'))
821 self.childmap = mapfile(ui, self.join('hg-childmap'))
822 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
822 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
823
823
824 if created:
824 if created:
825 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
825 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
826 fp = open(hook, 'w')
826 fp = open(hook, 'w')
827 fp.write(pre_revprop_change)
827 fp.write(pre_revprop_change)
828 fp.close()
828 fp.close()
829 util.set_flags(hook, "x")
829 util.set_flags(hook, "x")
830
830
831 xport = transport.SvnRaTransport(url=geturl(path))
831 xport = transport.SvnRaTransport(url=geturl(path))
832 self.uuid = svn.ra.get_uuid(xport.ra)
832 self.uuid = svn.ra.get_uuid(xport.ra)
833
833
834 def wjoin(self, *names):
834 def wjoin(self, *names):
835 return os.path.join(self.wc, *names)
835 return os.path.join(self.wc, *names)
836
836
837 def putfile(self, filename, flags, data):
837 def putfile(self, filename, flags, data):
838 if 'l' in flags:
838 if 'l' in flags:
839 self.wopener.symlink(data, filename)
839 self.wopener.symlink(data, filename)
840 else:
840 else:
841 try:
841 try:
842 if os.path.islink(self.wjoin(filename)):
842 if os.path.islink(self.wjoin(filename)):
843 os.unlink(filename)
843 os.unlink(filename)
844 except OSError:
844 except OSError:
845 pass
845 pass
846 self.wopener(filename, 'w').write(data)
846 self.wopener(filename, 'w').write(data)
847
847
848 if self.is_exec:
848 if self.is_exec:
849 was_exec = self.is_exec(self.wjoin(filename))
849 was_exec = self.is_exec(self.wjoin(filename))
850 else:
850 else:
851 # On filesystems not supporting execute-bit, there is no way
851 # On filesystems not supporting execute-bit, there is no way
852 # to know if it is set but asking subversion. Setting it
852 # to know if it is set but asking subversion. Setting it
853 # systematically is just as expensive and much simpler.
853 # systematically is just as expensive and much simpler.
854 was_exec = 'x' not in flags
854 was_exec = 'x' not in flags
855
855
856 util.set_flags(self.wjoin(filename), flags)
856 util.set_flags(self.wjoin(filename), flags)
857 if was_exec:
857 if was_exec:
858 if 'x' not in flags:
858 if 'x' not in flags:
859 self.delexec.append(filename)
859 self.delexec.append(filename)
860 else:
860 else:
861 if 'x' in flags:
861 if 'x' in flags:
862 self.setexec.append(filename)
862 self.setexec.append(filename)
863
863
864 def delfile(self, name):
864 def delfile(self, name):
865 self.delete.append(name)
865 self.delete.append(name)
866
866
867 def copyfile(self, source, dest):
867 def copyfile(self, source, dest):
868 self.copies.append([source, dest])
868 self.copies.append([source, dest])
869
869
870 def _copyfile(self, source, dest):
870 def _copyfile(self, source, dest):
871 # SVN's copy command pukes if the destination file exists, but
871 # SVN's copy command pukes if the destination file exists, but
872 # our copyfile method expects to record a copy that has
872 # our copyfile method expects to record a copy that has
873 # already occurred. Cross the semantic gap.
873 # already occurred. Cross the semantic gap.
874 wdest = self.wjoin(dest)
874 wdest = self.wjoin(dest)
875 exists = os.path.exists(wdest)
875 exists = os.path.exists(wdest)
876 if exists:
876 if exists:
877 fd, tempname = tempfile.mkstemp(
877 fd, tempname = tempfile.mkstemp(
878 prefix='hg-copy-', dir=os.path.dirname(wdest))
878 prefix='hg-copy-', dir=os.path.dirname(wdest))
879 os.close(fd)
879 os.close(fd)
880 os.unlink(tempname)
880 os.unlink(tempname)
881 os.rename(wdest, tempname)
881 os.rename(wdest, tempname)
882 try:
882 try:
883 self.run0('copy', source, dest)
883 self.run0('copy', source, dest)
884 finally:
884 finally:
885 if exists:
885 if exists:
886 try:
886 try:
887 os.unlink(wdest)
887 os.unlink(wdest)
888 except OSError:
888 except OSError:
889 pass
889 pass
890 os.rename(tempname, wdest)
890 os.rename(tempname, wdest)
891
891
892 def dirs_of(self, files):
892 def dirs_of(self, files):
893 dirs = set()
893 dirs = set()
894 for f in files:
894 for f in files:
895 if os.path.isdir(self.wjoin(f)):
895 if os.path.isdir(self.wjoin(f)):
896 dirs.add(f)
896 dirs.add(f)
897 for i in strutil.rfindall(f, '/'):
897 for i in strutil.rfindall(f, '/'):
898 dirs.add(f[:i])
898 dirs.add(f[:i])
899 return dirs
899 return dirs
900
900
901 def add_dirs(self, files):
901 def add_dirs(self, files):
902 add_dirs = [d for d in self.dirs_of(files)
902 add_dirs = [d for d in self.dirs_of(files)
903 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
903 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
904 if add_dirs:
904 if add_dirs:
905 add_dirs.sort()
905 add_dirs.sort()
906 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
906 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
907 return add_dirs
907 return add_dirs
908
908
909 def add_files(self, files):
909 def add_files(self, files):
910 if files:
910 if files:
911 self.xargs(files, 'add', quiet=True)
911 self.xargs(files, 'add', quiet=True)
912 return files
912 return files
913
913
914 def tidy_dirs(self, names):
914 def tidy_dirs(self, names):
915 dirs = list(self.dirs_of(names))
915 dirs = list(self.dirs_of(names))
916 dirs.sort(reverse=True)
916 dirs.sort(reverse=True)
917 deleted = []
917 deleted = []
918 for d in dirs:
918 for d in dirs:
919 wd = self.wjoin(d)
919 wd = self.wjoin(d)
920 if os.listdir(wd) == '.svn':
920 if os.listdir(wd) == '.svn':
921 self.run0('delete', d)
921 self.run0('delete', d)
922 deleted.append(d)
922 deleted.append(d)
923 return deleted
923 return deleted
924
924
925 def addchild(self, parent, child):
925 def addchild(self, parent, child):
926 self.childmap[parent] = child
926 self.childmap[parent] = child
927
927
928 def revid(self, rev):
928 def revid(self, rev):
929 return u"svn:%s@%s" % (self.uuid, rev)
929 return u"svn:%s@%s" % (self.uuid, rev)
930
930
931 def putcommit(self, files, parents, commit):
931 def putcommit(self, files, parents, commit):
932 for parent in parents:
932 for parent in parents:
933 try:
933 try:
934 return self.revid(self.childmap[parent])
934 return self.revid(self.childmap[parent])
935 except KeyError:
935 except KeyError:
936 pass
936 pass
937 entries = set(self.delete)
937 entries = set(self.delete)
938 files = util.frozenset(files)
938 files = util.frozenset(files)
939 entries.update(self.add_dirs(files.difference(entries)))
939 entries.update(self.add_dirs(files.difference(entries)))
940 if self.copies:
940 if self.copies:
941 for s, d in self.copies:
941 for s, d in self.copies:
942 self._copyfile(s, d)
942 self._copyfile(s, d)
943 self.copies = []
943 self.copies = []
944 if self.delete:
944 if self.delete:
945 self.xargs(self.delete, 'delete')
945 self.xargs(self.delete, 'delete')
946 self.delete = []
946 self.delete = []
947 entries.update(self.add_files(files.difference(entries)))
947 entries.update(self.add_files(files.difference(entries)))
948 entries.update(self.tidy_dirs(entries))
948 entries.update(self.tidy_dirs(entries))
949 if self.delexec:
949 if self.delexec:
950 self.xargs(self.delexec, 'propdel', 'svn:executable')
950 self.xargs(self.delexec, 'propdel', 'svn:executable')
951 self.delexec = []
951 self.delexec = []
952 if self.setexec:
952 if self.setexec:
953 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
953 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
954 self.setexec = []
954 self.setexec = []
955
955
956 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
956 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
957 fp = os.fdopen(fd, 'w')
957 fp = os.fdopen(fd, 'w')
958 fp.write(commit.desc)
958 fp.write(commit.desc)
959 fp.close()
959 fp.close()
960 try:
960 try:
961 output = self.run0('commit',
961 output = self.run0('commit',
962 username=util.shortuser(commit.author),
962 username=util.shortuser(commit.author),
963 file=messagefile,
963 file=messagefile,
964 encoding='utf-8')
964 encoding='utf-8')
965 try:
965 try:
966 rev = self.commit_re.search(output).group(1)
966 rev = self.commit_re.search(output).group(1)
967 except AttributeError:
967 except AttributeError:
968 self.ui.warn(_('unexpected svn output:\n'))
968 self.ui.warn(_('unexpected svn output:\n'))
969 self.ui.warn(output)
969 self.ui.warn(output)
970 raise util.Abort(_('unable to cope with svn output'))
970 raise util.Abort(_('unable to cope with svn output'))
971 if commit.rev:
971 if commit.rev:
972 self.run('propset', 'hg:convert-rev', commit.rev,
972 self.run('propset', 'hg:convert-rev', commit.rev,
973 revprop=True, revision=rev)
973 revprop=True, revision=rev)
974 if commit.branch and commit.branch != 'default':
974 if commit.branch and commit.branch != 'default':
975 self.run('propset', 'hg:convert-branch', commit.branch,
975 self.run('propset', 'hg:convert-branch', commit.branch,
976 revprop=True, revision=rev)
976 revprop=True, revision=rev)
977 for parent in parents:
977 for parent in parents:
978 self.addchild(parent, rev)
978 self.addchild(parent, rev)
979 return self.revid(rev)
979 return self.revid(rev)
980 finally:
980 finally:
981 os.unlink(messagefile)
981 os.unlink(messagefile)
982
982
983 def puttags(self, tags):
983 def puttags(self, tags):
984 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
984 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
@@ -1,1733 +1,1733 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
16 import cStringIO, errno, getpass, popen2, re, shutil, sys, tempfile, strutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 import re, urlparse
18 import urlparse
19
19
20 try:
20 try:
21 set = set
21 set = set
22 frozenset = frozenset
22 frozenset = frozenset
23 except NameError:
23 except NameError:
24 from sets import Set as set, ImmutableSet as frozenset
24 from sets import Set as set, ImmutableSet as frozenset
25
25
26 try:
26 try:
27 _encoding = os.environ.get("HGENCODING")
27 _encoding = os.environ.get("HGENCODING")
28 if sys.platform == 'darwin' and not _encoding:
28 if sys.platform == 'darwin' and not _encoding:
29 # On darwin, getpreferredencoding ignores the locale environment and
29 # On darwin, getpreferredencoding ignores the locale environment and
30 # always returns mac-roman. We override this if the environment is
30 # always returns mac-roman. We override this if the environment is
31 # not C (has been customized by the user).
31 # not C (has been customized by the user).
32 locale.setlocale(locale.LC_CTYPE, '')
32 locale.setlocale(locale.LC_CTYPE, '')
33 _encoding = locale.getlocale()[1]
33 _encoding = locale.getlocale()[1]
34 if not _encoding:
34 if not _encoding:
35 _encoding = locale.getpreferredencoding() or 'ascii'
35 _encoding = locale.getpreferredencoding() or 'ascii'
36 except locale.Error:
36 except locale.Error:
37 _encoding = 'ascii'
37 _encoding = 'ascii'
38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
38 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
39 _fallbackencoding = 'ISO-8859-1'
39 _fallbackencoding = 'ISO-8859-1'
40
40
41 def tolocal(s):
41 def tolocal(s):
42 """
42 """
43 Convert a string from internal UTF-8 to local encoding
43 Convert a string from internal UTF-8 to local encoding
44
44
45 All internal strings should be UTF-8 but some repos before the
45 All internal strings should be UTF-8 but some repos before the
46 implementation of locale support may contain latin1 or possibly
46 implementation of locale support may contain latin1 or possibly
47 other character sets. We attempt to decode everything strictly
47 other character sets. We attempt to decode everything strictly
48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
48 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
49 replace unknown characters.
49 replace unknown characters.
50 """
50 """
51 for e in ('UTF-8', _fallbackencoding):
51 for e in ('UTF-8', _fallbackencoding):
52 try:
52 try:
53 u = s.decode(e) # attempt strict decoding
53 u = s.decode(e) # attempt strict decoding
54 return u.encode(_encoding, "replace")
54 return u.encode(_encoding, "replace")
55 except LookupError, k:
55 except LookupError, k:
56 raise Abort(_("%s, please check your locale settings") % k)
56 raise Abort(_("%s, please check your locale settings") % k)
57 except UnicodeDecodeError:
57 except UnicodeDecodeError:
58 pass
58 pass
59 u = s.decode("utf-8", "replace") # last ditch
59 u = s.decode("utf-8", "replace") # last ditch
60 return u.encode(_encoding, "replace")
60 return u.encode(_encoding, "replace")
61
61
62 def fromlocal(s):
62 def fromlocal(s):
63 """
63 """
64 Convert a string from the local character encoding to UTF-8
64 Convert a string from the local character encoding to UTF-8
65
65
66 We attempt to decode strings using the encoding mode set by
66 We attempt to decode strings using the encoding mode set by
67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
67 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
68 characters will cause an error message. Other modes include
68 characters will cause an error message. Other modes include
69 'replace', which replaces unknown characters with a special
69 'replace', which replaces unknown characters with a special
70 Unicode character, and 'ignore', which drops the character.
70 Unicode character, and 'ignore', which drops the character.
71 """
71 """
72 try:
72 try:
73 return s.decode(_encoding, _encodingmode).encode("utf-8")
73 return s.decode(_encoding, _encodingmode).encode("utf-8")
74 except UnicodeDecodeError, inst:
74 except UnicodeDecodeError, inst:
75 sub = s[max(0, inst.start-10):inst.start+10]
75 sub = s[max(0, inst.start-10):inst.start+10]
76 raise Abort("decoding near '%s': %s!" % (sub, inst))
76 raise Abort("decoding near '%s': %s!" % (sub, inst))
77 except LookupError, k:
77 except LookupError, k:
78 raise Abort(_("%s, please check your locale settings") % k)
78 raise Abort(_("%s, please check your locale settings") % k)
79
79
80 def locallen(s):
80 def locallen(s):
81 """Find the length in characters of a local string"""
81 """Find the length in characters of a local string"""
82 return len(s.decode(_encoding, "replace"))
82 return len(s.decode(_encoding, "replace"))
83
83
84 # used by parsedate
84 # used by parsedate
85 defaultdateformats = (
85 defaultdateformats = (
86 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %H:%M:%S',
87 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %I:%M:%S%p',
88 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %H:%M',
89 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d %I:%M%p',
90 '%Y-%m-%d',
90 '%Y-%m-%d',
91 '%m-%d',
91 '%m-%d',
92 '%m/%d',
92 '%m/%d',
93 '%m/%d/%y',
93 '%m/%d/%y',
94 '%m/%d/%Y',
94 '%m/%d/%Y',
95 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %H:%M:%S %Y',
96 '%a %b %d %I:%M:%S%p %Y',
96 '%a %b %d %I:%M:%S%p %Y',
97 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
98 '%b %d %H:%M:%S %Y',
98 '%b %d %H:%M:%S %Y',
99 '%b %d %I:%M:%S%p %Y',
99 '%b %d %I:%M:%S%p %Y',
100 '%b %d %H:%M:%S',
100 '%b %d %H:%M:%S',
101 '%b %d %I:%M:%S%p',
101 '%b %d %I:%M:%S%p',
102 '%b %d %H:%M',
102 '%b %d %H:%M',
103 '%b %d %I:%M%p',
103 '%b %d %I:%M%p',
104 '%b %d %Y',
104 '%b %d %Y',
105 '%b %d',
105 '%b %d',
106 '%H:%M:%S',
106 '%H:%M:%S',
107 '%I:%M:%SP',
107 '%I:%M:%SP',
108 '%H:%M',
108 '%H:%M',
109 '%I:%M%p',
109 '%I:%M%p',
110 )
110 )
111
111
112 extendeddateformats = defaultdateformats + (
112 extendeddateformats = defaultdateformats + (
113 "%Y",
113 "%Y",
114 "%Y-%m",
114 "%Y-%m",
115 "%b",
115 "%b",
116 "%b %Y",
116 "%b %Y",
117 )
117 )
118
118
119 class SignalInterrupt(Exception):
119 class SignalInterrupt(Exception):
120 """Exception raised on SIGTERM and SIGHUP."""
120 """Exception raised on SIGTERM and SIGHUP."""
121
121
122 # differences from SafeConfigParser:
122 # differences from SafeConfigParser:
123 # - case-sensitive keys
123 # - case-sensitive keys
124 # - allows values that are not strings (this means that you may not
124 # - allows values that are not strings (this means that you may not
125 # be able to save the configuration to a file)
125 # be able to save the configuration to a file)
126 class configparser(ConfigParser.SafeConfigParser):
126 class configparser(ConfigParser.SafeConfigParser):
127 def optionxform(self, optionstr):
127 def optionxform(self, optionstr):
128 return optionstr
128 return optionstr
129
129
130 def set(self, section, option, value):
130 def set(self, section, option, value):
131 return ConfigParser.ConfigParser.set(self, section, option, value)
131 return ConfigParser.ConfigParser.set(self, section, option, value)
132
132
133 def _interpolate(self, section, option, rawval, vars):
133 def _interpolate(self, section, option, rawval, vars):
134 if not isinstance(rawval, basestring):
134 if not isinstance(rawval, basestring):
135 return rawval
135 return rawval
136 return ConfigParser.SafeConfigParser._interpolate(self, section,
136 return ConfigParser.SafeConfigParser._interpolate(self, section,
137 option, rawval, vars)
137 option, rawval, vars)
138
138
139 def cachefunc(func):
139 def cachefunc(func):
140 '''cache the result of function calls'''
140 '''cache the result of function calls'''
141 # XXX doesn't handle keywords args
141 # XXX doesn't handle keywords args
142 cache = {}
142 cache = {}
143 if func.func_code.co_argcount == 1:
143 if func.func_code.co_argcount == 1:
144 # we gain a small amount of time because
144 # we gain a small amount of time because
145 # we don't need to pack/unpack the list
145 # we don't need to pack/unpack the list
146 def f(arg):
146 def f(arg):
147 if arg not in cache:
147 if arg not in cache:
148 cache[arg] = func(arg)
148 cache[arg] = func(arg)
149 return cache[arg]
149 return cache[arg]
150 else:
150 else:
151 def f(*args):
151 def f(*args):
152 if args not in cache:
152 if args not in cache:
153 cache[args] = func(*args)
153 cache[args] = func(*args)
154 return cache[args]
154 return cache[args]
155
155
156 return f
156 return f
157
157
158 def pipefilter(s, cmd):
158 def pipefilter(s, cmd):
159 '''filter string S through command CMD, returning its output'''
159 '''filter string S through command CMD, returning its output'''
160 (pin, pout) = os.popen2(cmd, 'b')
160 (pin, pout) = os.popen2(cmd, 'b')
161 def writer():
161 def writer():
162 try:
162 try:
163 pin.write(s)
163 pin.write(s)
164 pin.close()
164 pin.close()
165 except IOError, inst:
165 except IOError, inst:
166 if inst.errno != errno.EPIPE:
166 if inst.errno != errno.EPIPE:
167 raise
167 raise
168
168
169 # we should use select instead on UNIX, but this will work on most
169 # we should use select instead on UNIX, but this will work on most
170 # systems, including Windows
170 # systems, including Windows
171 w = threading.Thread(target=writer)
171 w = threading.Thread(target=writer)
172 w.start()
172 w.start()
173 f = pout.read()
173 f = pout.read()
174 pout.close()
174 pout.close()
175 w.join()
175 w.join()
176 return f
176 return f
177
177
178 def tempfilter(s, cmd):
178 def tempfilter(s, cmd):
179 '''filter string S through a pair of temporary files with CMD.
179 '''filter string S through a pair of temporary files with CMD.
180 CMD is used as a template to create the real command to be run,
180 CMD is used as a template to create the real command to be run,
181 with the strings INFILE and OUTFILE replaced by the real names of
181 with the strings INFILE and OUTFILE replaced by the real names of
182 the temporary files generated.'''
182 the temporary files generated.'''
183 inname, outname = None, None
183 inname, outname = None, None
184 try:
184 try:
185 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
185 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
186 fp = os.fdopen(infd, 'wb')
186 fp = os.fdopen(infd, 'wb')
187 fp.write(s)
187 fp.write(s)
188 fp.close()
188 fp.close()
189 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
189 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
190 os.close(outfd)
190 os.close(outfd)
191 cmd = cmd.replace('INFILE', inname)
191 cmd = cmd.replace('INFILE', inname)
192 cmd = cmd.replace('OUTFILE', outname)
192 cmd = cmd.replace('OUTFILE', outname)
193 code = os.system(cmd)
193 code = os.system(cmd)
194 if sys.platform == 'OpenVMS' and code & 1:
194 if sys.platform == 'OpenVMS' and code & 1:
195 code = 0
195 code = 0
196 if code: raise Abort(_("command '%s' failed: %s") %
196 if code: raise Abort(_("command '%s' failed: %s") %
197 (cmd, explain_exit(code)))
197 (cmd, explain_exit(code)))
198 return open(outname, 'rb').read()
198 return open(outname, 'rb').read()
199 finally:
199 finally:
200 try:
200 try:
201 if inname: os.unlink(inname)
201 if inname: os.unlink(inname)
202 except: pass
202 except: pass
203 try:
203 try:
204 if outname: os.unlink(outname)
204 if outname: os.unlink(outname)
205 except: pass
205 except: pass
206
206
207 filtertable = {
207 filtertable = {
208 'tempfile:': tempfilter,
208 'tempfile:': tempfilter,
209 'pipe:': pipefilter,
209 'pipe:': pipefilter,
210 }
210 }
211
211
212 def filter(s, cmd):
212 def filter(s, cmd):
213 "filter a string through a command that transforms its input to its output"
213 "filter a string through a command that transforms its input to its output"
214 for name, fn in filtertable.iteritems():
214 for name, fn in filtertable.iteritems():
215 if cmd.startswith(name):
215 if cmd.startswith(name):
216 return fn(s, cmd[len(name):].lstrip())
216 return fn(s, cmd[len(name):].lstrip())
217 return pipefilter(s, cmd)
217 return pipefilter(s, cmd)
218
218
219 def binary(s):
219 def binary(s):
220 """return true if a string is binary data using diff's heuristic"""
220 """return true if a string is binary data using diff's heuristic"""
221 if s and '\0' in s[:4096]:
221 if s and '\0' in s[:4096]:
222 return True
222 return True
223 return False
223 return False
224
224
225 def unique(g):
225 def unique(g):
226 """return the uniq elements of iterable g"""
226 """return the uniq elements of iterable g"""
227 return dict.fromkeys(g).keys()
227 return dict.fromkeys(g).keys()
228
228
229 class Abort(Exception):
229 class Abort(Exception):
230 """Raised if a command needs to print an error and exit."""
230 """Raised if a command needs to print an error and exit."""
231
231
232 class UnexpectedOutput(Abort):
232 class UnexpectedOutput(Abort):
233 """Raised to print an error with part of output and exit."""
233 """Raised to print an error with part of output and exit."""
234
234
235 def always(fn): return True
235 def always(fn): return True
236 def never(fn): return False
236 def never(fn): return False
237
237
238 def expand_glob(pats):
238 def expand_glob(pats):
239 '''On Windows, expand the implicit globs in a list of patterns'''
239 '''On Windows, expand the implicit globs in a list of patterns'''
240 if os.name != 'nt':
240 if os.name != 'nt':
241 return list(pats)
241 return list(pats)
242 ret = []
242 ret = []
243 for p in pats:
243 for p in pats:
244 kind, name = patkind(p, None)
244 kind, name = patkind(p, None)
245 if kind is None:
245 if kind is None:
246 globbed = glob.glob(name)
246 globbed = glob.glob(name)
247 if globbed:
247 if globbed:
248 ret.extend(globbed)
248 ret.extend(globbed)
249 continue
249 continue
250 # if we couldn't expand the glob, just keep it around
250 # if we couldn't expand the glob, just keep it around
251 ret.append(p)
251 ret.append(p)
252 return ret
252 return ret
253
253
254 def patkind(name, dflt_pat='glob'):
254 def patkind(name, dflt_pat='glob'):
255 """Split a string into an optional pattern kind prefix and the
255 """Split a string into an optional pattern kind prefix and the
256 actual pattern."""
256 actual pattern."""
257 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
257 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
258 if name.startswith(prefix + ':'): return name.split(':', 1)
258 if name.startswith(prefix + ':'): return name.split(':', 1)
259 return dflt_pat, name
259 return dflt_pat, name
260
260
261 def globre(pat, head='^', tail='$'):
261 def globre(pat, head='^', tail='$'):
262 "convert a glob pattern into a regexp"
262 "convert a glob pattern into a regexp"
263 i, n = 0, len(pat)
263 i, n = 0, len(pat)
264 res = ''
264 res = ''
265 group = False
265 group = False
266 def peek(): return i < n and pat[i]
266 def peek(): return i < n and pat[i]
267 while i < n:
267 while i < n:
268 c = pat[i]
268 c = pat[i]
269 i = i+1
269 i = i+1
270 if c == '*':
270 if c == '*':
271 if peek() == '*':
271 if peek() == '*':
272 i += 1
272 i += 1
273 res += '.*'
273 res += '.*'
274 else:
274 else:
275 res += '[^/]*'
275 res += '[^/]*'
276 elif c == '?':
276 elif c == '?':
277 res += '.'
277 res += '.'
278 elif c == '[':
278 elif c == '[':
279 j = i
279 j = i
280 if j < n and pat[j] in '!]':
280 if j < n and pat[j] in '!]':
281 j += 1
281 j += 1
282 while j < n and pat[j] != ']':
282 while j < n and pat[j] != ']':
283 j += 1
283 j += 1
284 if j >= n:
284 if j >= n:
285 res += '\\['
285 res += '\\['
286 else:
286 else:
287 stuff = pat[i:j].replace('\\','\\\\')
287 stuff = pat[i:j].replace('\\','\\\\')
288 i = j + 1
288 i = j + 1
289 if stuff[0] == '!':
289 if stuff[0] == '!':
290 stuff = '^' + stuff[1:]
290 stuff = '^' + stuff[1:]
291 elif stuff[0] == '^':
291 elif stuff[0] == '^':
292 stuff = '\\' + stuff
292 stuff = '\\' + stuff
293 res = '%s[%s]' % (res, stuff)
293 res = '%s[%s]' % (res, stuff)
294 elif c == '{':
294 elif c == '{':
295 group = True
295 group = True
296 res += '(?:'
296 res += '(?:'
297 elif c == '}' and group:
297 elif c == '}' and group:
298 res += ')'
298 res += ')'
299 group = False
299 group = False
300 elif c == ',' and group:
300 elif c == ',' and group:
301 res += '|'
301 res += '|'
302 elif c == '\\':
302 elif c == '\\':
303 p = peek()
303 p = peek()
304 if p:
304 if p:
305 i += 1
305 i += 1
306 res += re.escape(p)
306 res += re.escape(p)
307 else:
307 else:
308 res += re.escape(c)
308 res += re.escape(c)
309 else:
309 else:
310 res += re.escape(c)
310 res += re.escape(c)
311 return head + res + tail
311 return head + res + tail
312
312
313 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
313 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
314
314
315 def pathto(root, n1, n2):
315 def pathto(root, n1, n2):
316 '''return the relative path from one place to another.
316 '''return the relative path from one place to another.
317 root should use os.sep to separate directories
317 root should use os.sep to separate directories
318 n1 should use os.sep to separate directories
318 n1 should use os.sep to separate directories
319 n2 should use "/" to separate directories
319 n2 should use "/" to separate directories
320 returns an os.sep-separated path.
320 returns an os.sep-separated path.
321
321
322 If n1 is a relative path, it's assumed it's
322 If n1 is a relative path, it's assumed it's
323 relative to root.
323 relative to root.
324 n2 should always be relative to root.
324 n2 should always be relative to root.
325 '''
325 '''
326 if not n1: return localpath(n2)
326 if not n1: return localpath(n2)
327 if os.path.isabs(n1):
327 if os.path.isabs(n1):
328 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
328 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
329 return os.path.join(root, localpath(n2))
329 return os.path.join(root, localpath(n2))
330 n2 = '/'.join((pconvert(root), n2))
330 n2 = '/'.join((pconvert(root), n2))
331 a, b = splitpath(n1), n2.split('/')
331 a, b = splitpath(n1), n2.split('/')
332 a.reverse()
332 a.reverse()
333 b.reverse()
333 b.reverse()
334 while a and b and a[-1] == b[-1]:
334 while a and b and a[-1] == b[-1]:
335 a.pop()
335 a.pop()
336 b.pop()
336 b.pop()
337 b.reverse()
337 b.reverse()
338 return os.sep.join((['..'] * len(a)) + b)
338 return os.sep.join((['..'] * len(a)) + b)
339
339
340 def canonpath(root, cwd, myname):
340 def canonpath(root, cwd, myname):
341 """return the canonical path of myname, given cwd and root"""
341 """return the canonical path of myname, given cwd and root"""
342 if root == os.sep:
342 if root == os.sep:
343 rootsep = os.sep
343 rootsep = os.sep
344 elif endswithsep(root):
344 elif endswithsep(root):
345 rootsep = root
345 rootsep = root
346 else:
346 else:
347 rootsep = root + os.sep
347 rootsep = root + os.sep
348 name = myname
348 name = myname
349 if not os.path.isabs(name):
349 if not os.path.isabs(name):
350 name = os.path.join(root, cwd, name)
350 name = os.path.join(root, cwd, name)
351 name = os.path.normpath(name)
351 name = os.path.normpath(name)
352 audit_path = path_auditor(root)
352 audit_path = path_auditor(root)
353 if name != rootsep and name.startswith(rootsep):
353 if name != rootsep and name.startswith(rootsep):
354 name = name[len(rootsep):]
354 name = name[len(rootsep):]
355 audit_path(name)
355 audit_path(name)
356 return pconvert(name)
356 return pconvert(name)
357 elif name == root:
357 elif name == root:
358 return ''
358 return ''
359 else:
359 else:
360 # Determine whether `name' is in the hierarchy at or beneath `root',
360 # Determine whether `name' is in the hierarchy at or beneath `root',
361 # by iterating name=dirname(name) until that causes no change (can't
361 # by iterating name=dirname(name) until that causes no change (can't
362 # check name == '/', because that doesn't work on windows). For each
362 # check name == '/', because that doesn't work on windows). For each
363 # `name', compare dev/inode numbers. If they match, the list `rel'
363 # `name', compare dev/inode numbers. If they match, the list `rel'
364 # holds the reversed list of components making up the relative file
364 # holds the reversed list of components making up the relative file
365 # name we want.
365 # name we want.
366 root_st = os.stat(root)
366 root_st = os.stat(root)
367 rel = []
367 rel = []
368 while True:
368 while True:
369 try:
369 try:
370 name_st = os.stat(name)
370 name_st = os.stat(name)
371 except OSError:
371 except OSError:
372 break
372 break
373 if samestat(name_st, root_st):
373 if samestat(name_st, root_st):
374 if not rel:
374 if not rel:
375 # name was actually the same as root (maybe a symlink)
375 # name was actually the same as root (maybe a symlink)
376 return ''
376 return ''
377 rel.reverse()
377 rel.reverse()
378 name = os.path.join(*rel)
378 name = os.path.join(*rel)
379 audit_path(name)
379 audit_path(name)
380 return pconvert(name)
380 return pconvert(name)
381 dirname, basename = os.path.split(name)
381 dirname, basename = os.path.split(name)
382 rel.append(basename)
382 rel.append(basename)
383 if dirname == name:
383 if dirname == name:
384 break
384 break
385 name = dirname
385 name = dirname
386
386
387 raise Abort('%s not under root' % myname)
387 raise Abort('%s not under root' % myname)
388
388
389 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
389 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None):
390 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
390 return _matcher(canonroot, cwd, names, inc, exc, 'glob', src)
391
391
392 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
392 def cmdmatcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None,
393 globbed=False, default=None):
393 globbed=False, default=None):
394 default = default or 'relpath'
394 default = default or 'relpath'
395 if default == 'relpath' and not globbed:
395 if default == 'relpath' and not globbed:
396 names = expand_glob(names)
396 names = expand_glob(names)
397 return _matcher(canonroot, cwd, names, inc, exc, default, src)
397 return _matcher(canonroot, cwd, names, inc, exc, default, src)
398
398
399 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
399 def _matcher(canonroot, cwd, names, inc, exc, dflt_pat, src):
400 """build a function to match a set of file patterns
400 """build a function to match a set of file patterns
401
401
402 arguments:
402 arguments:
403 canonroot - the canonical root of the tree you're matching against
403 canonroot - the canonical root of the tree you're matching against
404 cwd - the current working directory, if relevant
404 cwd - the current working directory, if relevant
405 names - patterns to find
405 names - patterns to find
406 inc - patterns to include
406 inc - patterns to include
407 exc - patterns to exclude
407 exc - patterns to exclude
408 dflt_pat - if a pattern in names has no explicit type, assume this one
408 dflt_pat - if a pattern in names has no explicit type, assume this one
409 src - where these patterns came from (e.g. .hgignore)
409 src - where these patterns came from (e.g. .hgignore)
410
410
411 a pattern is one of:
411 a pattern is one of:
412 'glob:<glob>' - a glob relative to cwd
412 'glob:<glob>' - a glob relative to cwd
413 're:<regexp>' - a regular expression
413 're:<regexp>' - a regular expression
414 'path:<path>' - a path relative to canonroot
414 'path:<path>' - a path relative to canonroot
415 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
415 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
416 'relpath:<path>' - a path relative to cwd
416 'relpath:<path>' - a path relative to cwd
417 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
417 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
418 '<something>' - one of the cases above, selected by the dflt_pat argument
418 '<something>' - one of the cases above, selected by the dflt_pat argument
419
419
420 returns:
420 returns:
421 a 3-tuple containing
421 a 3-tuple containing
422 - list of roots (places where one should start a recursive walk of the fs);
422 - list of roots (places where one should start a recursive walk of the fs);
423 this often matches the explicit non-pattern names passed in, but also
423 this often matches the explicit non-pattern names passed in, but also
424 includes the initial part of glob: patterns that has no glob characters
424 includes the initial part of glob: patterns that has no glob characters
425 - a bool match(filename) function
425 - a bool match(filename) function
426 - a bool indicating if any patterns were passed in
426 - a bool indicating if any patterns were passed in
427 """
427 """
428
428
429 # a common case: no patterns at all
429 # a common case: no patterns at all
430 if not names and not inc and not exc:
430 if not names and not inc and not exc:
431 return [], always, False
431 return [], always, False
432
432
433 def contains_glob(name):
433 def contains_glob(name):
434 for c in name:
434 for c in name:
435 if c in _globchars: return True
435 if c in _globchars: return True
436 return False
436 return False
437
437
438 def regex(kind, name, tail):
438 def regex(kind, name, tail):
439 '''convert a pattern into a regular expression'''
439 '''convert a pattern into a regular expression'''
440 if not name:
440 if not name:
441 return ''
441 return ''
442 if kind == 're':
442 if kind == 're':
443 return name
443 return name
444 elif kind == 'path':
444 elif kind == 'path':
445 return '^' + re.escape(name) + '(?:/|$)'
445 return '^' + re.escape(name) + '(?:/|$)'
446 elif kind == 'relglob':
446 elif kind == 'relglob':
447 return globre(name, '(?:|.*/)', tail)
447 return globre(name, '(?:|.*/)', tail)
448 elif kind == 'relpath':
448 elif kind == 'relpath':
449 return re.escape(name) + '(?:/|$)'
449 return re.escape(name) + '(?:/|$)'
450 elif kind == 'relre':
450 elif kind == 'relre':
451 if name.startswith('^'):
451 if name.startswith('^'):
452 return name
452 return name
453 return '.*' + name
453 return '.*' + name
454 return globre(name, '', tail)
454 return globre(name, '', tail)
455
455
456 def matchfn(pats, tail):
456 def matchfn(pats, tail):
457 """build a matching function from a set of patterns"""
457 """build a matching function from a set of patterns"""
458 if not pats:
458 if not pats:
459 return
459 return
460 try:
460 try:
461 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
461 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
462 return re.compile(pat).match
462 return re.compile(pat).match
463 except OverflowError:
463 except OverflowError:
464 # We're using a Python with a tiny regex engine and we
464 # We're using a Python with a tiny regex engine and we
465 # made it explode, so we'll divide the pattern list in two
465 # made it explode, so we'll divide the pattern list in two
466 # until it works
466 # until it works
467 l = len(pats)
467 l = len(pats)
468 if l < 2:
468 if l < 2:
469 raise
469 raise
470 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
470 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
471 return lambda s: a(s) or b(s)
471 return lambda s: a(s) or b(s)
472 except re.error:
472 except re.error:
473 for k, p in pats:
473 for k, p in pats:
474 try:
474 try:
475 re.compile('(?:%s)' % regex(k, p, tail))
475 re.compile('(?:%s)' % regex(k, p, tail))
476 except re.error:
476 except re.error:
477 if src:
477 if src:
478 raise Abort("%s: invalid pattern (%s): %s" %
478 raise Abort("%s: invalid pattern (%s): %s" %
479 (src, k, p))
479 (src, k, p))
480 else:
480 else:
481 raise Abort("invalid pattern (%s): %s" % (k, p))
481 raise Abort("invalid pattern (%s): %s" % (k, p))
482 raise Abort("invalid pattern")
482 raise Abort("invalid pattern")
483
483
484 def globprefix(pat):
484 def globprefix(pat):
485 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
485 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
486 root = []
486 root = []
487 for p in pat.split('/'):
487 for p in pat.split('/'):
488 if contains_glob(p): break
488 if contains_glob(p): break
489 root.append(p)
489 root.append(p)
490 return '/'.join(root) or '.'
490 return '/'.join(root) or '.'
491
491
492 def normalizepats(names, default):
492 def normalizepats(names, default):
493 pats = []
493 pats = []
494 roots = []
494 roots = []
495 anypats = False
495 anypats = False
496 for kind, name in [patkind(p, default) for p in names]:
496 for kind, name in [patkind(p, default) for p in names]:
497 if kind in ('glob', 'relpath'):
497 if kind in ('glob', 'relpath'):
498 name = canonpath(canonroot, cwd, name)
498 name = canonpath(canonroot, cwd, name)
499 elif kind in ('relglob', 'path'):
499 elif kind in ('relglob', 'path'):
500 name = normpath(name)
500 name = normpath(name)
501
501
502 pats.append((kind, name))
502 pats.append((kind, name))
503
503
504 if kind in ('glob', 're', 'relglob', 'relre'):
504 if kind in ('glob', 're', 'relglob', 'relre'):
505 anypats = True
505 anypats = True
506
506
507 if kind == 'glob':
507 if kind == 'glob':
508 root = globprefix(name)
508 root = globprefix(name)
509 roots.append(root)
509 roots.append(root)
510 elif kind in ('relpath', 'path'):
510 elif kind in ('relpath', 'path'):
511 roots.append(name or '.')
511 roots.append(name or '.')
512 elif kind == 'relglob':
512 elif kind == 'relglob':
513 roots.append('.')
513 roots.append('.')
514 return roots, pats, anypats
514 return roots, pats, anypats
515
515
516 roots, pats, anypats = normalizepats(names, dflt_pat)
516 roots, pats, anypats = normalizepats(names, dflt_pat)
517
517
518 patmatch = matchfn(pats, '$') or always
518 patmatch = matchfn(pats, '$') or always
519 incmatch = always
519 incmatch = always
520 if inc:
520 if inc:
521 dummy, inckinds, dummy = normalizepats(inc, 'glob')
521 dummy, inckinds, dummy = normalizepats(inc, 'glob')
522 incmatch = matchfn(inckinds, '(?:/|$)')
522 incmatch = matchfn(inckinds, '(?:/|$)')
523 excmatch = lambda fn: False
523 excmatch = lambda fn: False
524 if exc:
524 if exc:
525 dummy, exckinds, dummy = normalizepats(exc, 'glob')
525 dummy, exckinds, dummy = normalizepats(exc, 'glob')
526 excmatch = matchfn(exckinds, '(?:/|$)')
526 excmatch = matchfn(exckinds, '(?:/|$)')
527
527
528 if not names and inc and not exc:
528 if not names and inc and not exc:
529 # common case: hgignore patterns
529 # common case: hgignore patterns
530 match = incmatch
530 match = incmatch
531 else:
531 else:
532 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
532 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
533
533
534 return (roots, match, (inc or exc or anypats) and True)
534 return (roots, match, (inc or exc or anypats) and True)
535
535
536 _hgexecutable = None
536 _hgexecutable = None
537
537
538 def hgexecutable():
538 def hgexecutable():
539 """return location of the 'hg' executable.
539 """return location of the 'hg' executable.
540
540
541 Defaults to $HG or 'hg' in the search path.
541 Defaults to $HG or 'hg' in the search path.
542 """
542 """
543 if _hgexecutable is None:
543 if _hgexecutable is None:
544 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
544 set_hgexecutable(os.environ.get('HG') or find_exe('hg', 'hg'))
545 return _hgexecutable
545 return _hgexecutable
546
546
547 def set_hgexecutable(path):
547 def set_hgexecutable(path):
548 """set location of the 'hg' executable"""
548 """set location of the 'hg' executable"""
549 global _hgexecutable
549 global _hgexecutable
550 _hgexecutable = path
550 _hgexecutable = path
551
551
552 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
552 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
553 '''enhanced shell command execution.
553 '''enhanced shell command execution.
554 run with environment maybe modified, maybe in different dir.
554 run with environment maybe modified, maybe in different dir.
555
555
556 if command fails and onerr is None, return status. if ui object,
556 if command fails and onerr is None, return status. if ui object,
557 print error message and return status, else raise onerr object as
557 print error message and return status, else raise onerr object as
558 exception.'''
558 exception.'''
559 def py2shell(val):
559 def py2shell(val):
560 'convert python object into string that is useful to shell'
560 'convert python object into string that is useful to shell'
561 if val in (None, False):
561 if val in (None, False):
562 return '0'
562 return '0'
563 if val == True:
563 if val == True:
564 return '1'
564 return '1'
565 return str(val)
565 return str(val)
566 oldenv = {}
566 oldenv = {}
567 for k in environ:
567 for k in environ:
568 oldenv[k] = os.environ.get(k)
568 oldenv[k] = os.environ.get(k)
569 if cwd is not None:
569 if cwd is not None:
570 oldcwd = os.getcwd()
570 oldcwd = os.getcwd()
571 origcmd = cmd
571 origcmd = cmd
572 if os.name == 'nt':
572 if os.name == 'nt':
573 cmd = '"%s"' % cmd
573 cmd = '"%s"' % cmd
574 try:
574 try:
575 for k, v in environ.iteritems():
575 for k, v in environ.iteritems():
576 os.environ[k] = py2shell(v)
576 os.environ[k] = py2shell(v)
577 os.environ['HG'] = hgexecutable()
577 os.environ['HG'] = hgexecutable()
578 if cwd is not None and oldcwd != cwd:
578 if cwd is not None and oldcwd != cwd:
579 os.chdir(cwd)
579 os.chdir(cwd)
580 rc = os.system(cmd)
580 rc = os.system(cmd)
581 if sys.platform == 'OpenVMS' and rc & 1:
581 if sys.platform == 'OpenVMS' and rc & 1:
582 rc = 0
582 rc = 0
583 if rc and onerr:
583 if rc and onerr:
584 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
584 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
585 explain_exit(rc)[0])
585 explain_exit(rc)[0])
586 if errprefix:
586 if errprefix:
587 errmsg = '%s: %s' % (errprefix, errmsg)
587 errmsg = '%s: %s' % (errprefix, errmsg)
588 try:
588 try:
589 onerr.warn(errmsg + '\n')
589 onerr.warn(errmsg + '\n')
590 except AttributeError:
590 except AttributeError:
591 raise onerr(errmsg)
591 raise onerr(errmsg)
592 return rc
592 return rc
593 finally:
593 finally:
594 for k, v in oldenv.iteritems():
594 for k, v in oldenv.iteritems():
595 if v is None:
595 if v is None:
596 del os.environ[k]
596 del os.environ[k]
597 else:
597 else:
598 os.environ[k] = v
598 os.environ[k] = v
599 if cwd is not None and oldcwd != cwd:
599 if cwd is not None and oldcwd != cwd:
600 os.chdir(oldcwd)
600 os.chdir(oldcwd)
601
601
602 # os.path.lexists is not available on python2.3
602 # os.path.lexists is not available on python2.3
603 def lexists(filename):
603 def lexists(filename):
604 "test whether a file with this name exists. does not follow symlinks"
604 "test whether a file with this name exists. does not follow symlinks"
605 try:
605 try:
606 os.lstat(filename)
606 os.lstat(filename)
607 except:
607 except:
608 return False
608 return False
609 return True
609 return True
610
610
611 def rename(src, dst):
611 def rename(src, dst):
612 """forcibly rename a file"""
612 """forcibly rename a file"""
613 try:
613 try:
614 os.rename(src, dst)
614 os.rename(src, dst)
615 except OSError, err: # FIXME: check err (EEXIST ?)
615 except OSError, err: # FIXME: check err (EEXIST ?)
616 # on windows, rename to existing file is not allowed, so we
616 # on windows, rename to existing file is not allowed, so we
617 # must delete destination first. but if file is open, unlink
617 # must delete destination first. but if file is open, unlink
618 # schedules it for delete but does not delete it. rename
618 # schedules it for delete but does not delete it. rename
619 # happens immediately even for open files, so we create
619 # happens immediately even for open files, so we create
620 # temporary file, delete it, rename destination to that name,
620 # temporary file, delete it, rename destination to that name,
621 # then delete that. then rename is safe to do.
621 # then delete that. then rename is safe to do.
622 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
622 fd, temp = tempfile.mkstemp(dir=os.path.dirname(dst) or '.')
623 os.close(fd)
623 os.close(fd)
624 os.unlink(temp)
624 os.unlink(temp)
625 os.rename(dst, temp)
625 os.rename(dst, temp)
626 os.unlink(temp)
626 os.unlink(temp)
627 os.rename(src, dst)
627 os.rename(src, dst)
628
628
629 def unlink(f):
629 def unlink(f):
630 """unlink and remove the directory if it is empty"""
630 """unlink and remove the directory if it is empty"""
631 os.unlink(f)
631 os.unlink(f)
632 # try removing directories that might now be empty
632 # try removing directories that might now be empty
633 try:
633 try:
634 os.removedirs(os.path.dirname(f))
634 os.removedirs(os.path.dirname(f))
635 except OSError:
635 except OSError:
636 pass
636 pass
637
637
638 def copyfile(src, dest):
638 def copyfile(src, dest):
639 "copy a file, preserving mode"
639 "copy a file, preserving mode"
640 if os.path.islink(src):
640 if os.path.islink(src):
641 try:
641 try:
642 os.unlink(dest)
642 os.unlink(dest)
643 except:
643 except:
644 pass
644 pass
645 os.symlink(os.readlink(src), dest)
645 os.symlink(os.readlink(src), dest)
646 else:
646 else:
647 try:
647 try:
648 shutil.copyfile(src, dest)
648 shutil.copyfile(src, dest)
649 shutil.copymode(src, dest)
649 shutil.copymode(src, dest)
650 except shutil.Error, inst:
650 except shutil.Error, inst:
651 raise Abort(str(inst))
651 raise Abort(str(inst))
652
652
653 def copyfiles(src, dst, hardlink=None):
653 def copyfiles(src, dst, hardlink=None):
654 """Copy a directory tree using hardlinks if possible"""
654 """Copy a directory tree using hardlinks if possible"""
655
655
656 if hardlink is None:
656 if hardlink is None:
657 hardlink = (os.stat(src).st_dev ==
657 hardlink = (os.stat(src).st_dev ==
658 os.stat(os.path.dirname(dst)).st_dev)
658 os.stat(os.path.dirname(dst)).st_dev)
659
659
660 if os.path.isdir(src):
660 if os.path.isdir(src):
661 os.mkdir(dst)
661 os.mkdir(dst)
662 for name, kind in osutil.listdir(src):
662 for name, kind in osutil.listdir(src):
663 srcname = os.path.join(src, name)
663 srcname = os.path.join(src, name)
664 dstname = os.path.join(dst, name)
664 dstname = os.path.join(dst, name)
665 copyfiles(srcname, dstname, hardlink)
665 copyfiles(srcname, dstname, hardlink)
666 else:
666 else:
667 if hardlink:
667 if hardlink:
668 try:
668 try:
669 os_link(src, dst)
669 os_link(src, dst)
670 except (IOError, OSError):
670 except (IOError, OSError):
671 hardlink = False
671 hardlink = False
672 shutil.copy(src, dst)
672 shutil.copy(src, dst)
673 else:
673 else:
674 shutil.copy(src, dst)
674 shutil.copy(src, dst)
675
675
676 class path_auditor(object):
676 class path_auditor(object):
677 '''ensure that a filesystem path contains no banned components.
677 '''ensure that a filesystem path contains no banned components.
678 the following properties of a path are checked:
678 the following properties of a path are checked:
679
679
680 - under top-level .hg
680 - under top-level .hg
681 - starts at the root of a windows drive
681 - starts at the root of a windows drive
682 - contains ".."
682 - contains ".."
683 - traverses a symlink (e.g. a/symlink_here/b)
683 - traverses a symlink (e.g. a/symlink_here/b)
684 - inside a nested repository'''
684 - inside a nested repository'''
685
685
686 def __init__(self, root):
686 def __init__(self, root):
687 self.audited = set()
687 self.audited = set()
688 self.auditeddir = set()
688 self.auditeddir = set()
689 self.root = root
689 self.root = root
690
690
691 def __call__(self, path):
691 def __call__(self, path):
692 if path in self.audited:
692 if path in self.audited:
693 return
693 return
694 normpath = os.path.normcase(path)
694 normpath = os.path.normcase(path)
695 parts = splitpath(normpath)
695 parts = splitpath(normpath)
696 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
696 if (os.path.splitdrive(path)[0] or parts[0] in ('.hg', '')
697 or os.pardir in parts):
697 or os.pardir in parts):
698 raise Abort(_("path contains illegal component: %s") % path)
698 raise Abort(_("path contains illegal component: %s") % path)
699 def check(prefix):
699 def check(prefix):
700 curpath = os.path.join(self.root, prefix)
700 curpath = os.path.join(self.root, prefix)
701 try:
701 try:
702 st = os.lstat(curpath)
702 st = os.lstat(curpath)
703 except OSError, err:
703 except OSError, err:
704 # EINVAL can be raised as invalid path syntax under win32.
704 # EINVAL can be raised as invalid path syntax under win32.
705 # They must be ignored for patterns can be checked too.
705 # They must be ignored for patterns can be checked too.
706 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
706 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
707 raise
707 raise
708 else:
708 else:
709 if stat.S_ISLNK(st.st_mode):
709 if stat.S_ISLNK(st.st_mode):
710 raise Abort(_('path %r traverses symbolic link %r') %
710 raise Abort(_('path %r traverses symbolic link %r') %
711 (path, prefix))
711 (path, prefix))
712 elif (stat.S_ISDIR(st.st_mode) and
712 elif (stat.S_ISDIR(st.st_mode) and
713 os.path.isdir(os.path.join(curpath, '.hg'))):
713 os.path.isdir(os.path.join(curpath, '.hg'))):
714 raise Abort(_('path %r is inside repo %r') %
714 raise Abort(_('path %r is inside repo %r') %
715 (path, prefix))
715 (path, prefix))
716 parts.pop()
716 parts.pop()
717 prefixes = []
717 prefixes = []
718 for n in range(len(parts)):
718 for n in range(len(parts)):
719 prefix = os.sep.join(parts)
719 prefix = os.sep.join(parts)
720 if prefix in self.auditeddir:
720 if prefix in self.auditeddir:
721 break
721 break
722 check(prefix)
722 check(prefix)
723 prefixes.append(prefix)
723 prefixes.append(prefix)
724 parts.pop()
724 parts.pop()
725
725
726 self.audited.add(path)
726 self.audited.add(path)
727 # only add prefixes to the cache after checking everything: we don't
727 # only add prefixes to the cache after checking everything: we don't
728 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
728 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
729 self.auditeddir.update(prefixes)
729 self.auditeddir.update(prefixes)
730
730
731 def _makelock_file(info, pathname):
731 def _makelock_file(info, pathname):
732 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
732 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
733 os.write(ld, info)
733 os.write(ld, info)
734 os.close(ld)
734 os.close(ld)
735
735
736 def _readlock_file(pathname):
736 def _readlock_file(pathname):
737 return posixfile(pathname).read()
737 return posixfile(pathname).read()
738
738
739 def nlinks(pathname):
739 def nlinks(pathname):
740 """Return number of hardlinks for the given file."""
740 """Return number of hardlinks for the given file."""
741 return os.lstat(pathname).st_nlink
741 return os.lstat(pathname).st_nlink
742
742
743 if hasattr(os, 'link'):
743 if hasattr(os, 'link'):
744 os_link = os.link
744 os_link = os.link
745 else:
745 else:
746 def os_link(src, dst):
746 def os_link(src, dst):
747 raise OSError(0, _("Hardlinks not supported"))
747 raise OSError(0, _("Hardlinks not supported"))
748
748
749 def fstat(fp):
749 def fstat(fp):
750 '''stat file object that may not have fileno method.'''
750 '''stat file object that may not have fileno method.'''
751 try:
751 try:
752 return os.fstat(fp.fileno())
752 return os.fstat(fp.fileno())
753 except AttributeError:
753 except AttributeError:
754 return os.stat(fp.name)
754 return os.stat(fp.name)
755
755
756 posixfile = file
756 posixfile = file
757
757
758 def openhardlinks():
758 def openhardlinks():
759 '''return true if it is safe to hold open file handles to hardlinks'''
759 '''return true if it is safe to hold open file handles to hardlinks'''
760 return True
760 return True
761
761
762 getuser_fallback = None
762 getuser_fallback = None
763
763
764 def getuser():
764 def getuser():
765 '''return name of current user'''
765 '''return name of current user'''
766 try:
766 try:
767 return getpass.getuser()
767 return getpass.getuser()
768 except ImportError:
768 except ImportError:
769 # import of pwd will fail on windows - try fallback
769 # import of pwd will fail on windows - try fallback
770 if getuser_fallback:
770 if getuser_fallback:
771 return getuser_fallback()
771 return getuser_fallback()
772 # raised if win32api not available
772 # raised if win32api not available
773 raise Abort(_('user name not available - set USERNAME '
773 raise Abort(_('user name not available - set USERNAME '
774 'environment variable'))
774 'environment variable'))
775
775
776 def username(uid=None):
776 def username(uid=None):
777 """Return the name of the user with the given uid.
777 """Return the name of the user with the given uid.
778
778
779 If uid is None, return the name of the current user."""
779 If uid is None, return the name of the current user."""
780 try:
780 try:
781 import pwd
781 import pwd
782 if uid is None:
782 if uid is None:
783 uid = os.getuid()
783 uid = os.getuid()
784 try:
784 try:
785 return pwd.getpwuid(uid)[0]
785 return pwd.getpwuid(uid)[0]
786 except KeyError:
786 except KeyError:
787 return str(uid)
787 return str(uid)
788 except ImportError:
788 except ImportError:
789 return None
789 return None
790
790
791 def groupname(gid=None):
791 def groupname(gid=None):
792 """Return the name of the group with the given gid.
792 """Return the name of the group with the given gid.
793
793
794 If gid is None, return the name of the current group."""
794 If gid is None, return the name of the current group."""
795 try:
795 try:
796 import grp
796 import grp
797 if gid is None:
797 if gid is None:
798 gid = os.getgid()
798 gid = os.getgid()
799 try:
799 try:
800 return grp.getgrgid(gid)[0]
800 return grp.getgrgid(gid)[0]
801 except KeyError:
801 except KeyError:
802 return str(gid)
802 return str(gid)
803 except ImportError:
803 except ImportError:
804 return None
804 return None
805
805
806 # File system features
806 # File system features
807
807
808 def checkfolding(path):
808 def checkfolding(path):
809 """
809 """
810 Check whether the given path is on a case-sensitive filesystem
810 Check whether the given path is on a case-sensitive filesystem
811
811
812 Requires a path (like /foo/.hg) ending with a foldable final
812 Requires a path (like /foo/.hg) ending with a foldable final
813 directory component.
813 directory component.
814 """
814 """
815 s1 = os.stat(path)
815 s1 = os.stat(path)
816 d, b = os.path.split(path)
816 d, b = os.path.split(path)
817 p2 = os.path.join(d, b.upper())
817 p2 = os.path.join(d, b.upper())
818 if path == p2:
818 if path == p2:
819 p2 = os.path.join(d, b.lower())
819 p2 = os.path.join(d, b.lower())
820 try:
820 try:
821 s2 = os.stat(p2)
821 s2 = os.stat(p2)
822 if s2 == s1:
822 if s2 == s1:
823 return False
823 return False
824 return True
824 return True
825 except:
825 except:
826 return True
826 return True
827
827
828 def checkexec(path):
828 def checkexec(path):
829 """
829 """
830 Check whether the given path is on a filesystem with UNIX-like exec flags
830 Check whether the given path is on a filesystem with UNIX-like exec flags
831
831
832 Requires a directory (like /foo/.hg)
832 Requires a directory (like /foo/.hg)
833 """
833 """
834
834
835 # VFAT on some Linux versions can flip mode but it doesn't persist
835 # VFAT on some Linux versions can flip mode but it doesn't persist
836 # a FS remount. Frequently we can detect it if files are created
836 # a FS remount. Frequently we can detect it if files are created
837 # with exec bit on.
837 # with exec bit on.
838
838
839 try:
839 try:
840 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
840 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
841 fh, fn = tempfile.mkstemp("", "", path)
841 fh, fn = tempfile.mkstemp("", "", path)
842 try:
842 try:
843 os.close(fh)
843 os.close(fh)
844 m = os.stat(fn).st_mode & 0777
844 m = os.stat(fn).st_mode & 0777
845 new_file_has_exec = m & EXECFLAGS
845 new_file_has_exec = m & EXECFLAGS
846 os.chmod(fn, m ^ EXECFLAGS)
846 os.chmod(fn, m ^ EXECFLAGS)
847 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
847 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
848 finally:
848 finally:
849 os.unlink(fn)
849 os.unlink(fn)
850 except (IOError, OSError):
850 except (IOError, OSError):
851 # we don't care, the user probably won't be able to commit anyway
851 # we don't care, the user probably won't be able to commit anyway
852 return False
852 return False
853 return not (new_file_has_exec or exec_flags_cannot_flip)
853 return not (new_file_has_exec or exec_flags_cannot_flip)
854
854
855 def execfunc(path, fallback):
855 def execfunc(path, fallback):
856 '''return an is_exec() function with default to fallback'''
856 '''return an is_exec() function with default to fallback'''
857 if checkexec(path):
857 if checkexec(path):
858 return lambda x: is_exec(os.path.join(path, x))
858 return lambda x: is_exec(os.path.join(path, x))
859 return fallback
859 return fallback
860
860
861 def checklink(path):
861 def checklink(path):
862 """check whether the given path is on a symlink-capable filesystem"""
862 """check whether the given path is on a symlink-capable filesystem"""
863 # mktemp is not racy because symlink creation will fail if the
863 # mktemp is not racy because symlink creation will fail if the
864 # file already exists
864 # file already exists
865 name = tempfile.mktemp(dir=path)
865 name = tempfile.mktemp(dir=path)
866 try:
866 try:
867 os.symlink(".", name)
867 os.symlink(".", name)
868 os.unlink(name)
868 os.unlink(name)
869 return True
869 return True
870 except (OSError, AttributeError):
870 except (OSError, AttributeError):
871 return False
871 return False
872
872
873 def linkfunc(path, fallback):
873 def linkfunc(path, fallback):
874 '''return an is_link() function with default to fallback'''
874 '''return an is_link() function with default to fallback'''
875 if checklink(path):
875 if checklink(path):
876 return lambda x: os.path.islink(os.path.join(path, x))
876 return lambda x: os.path.islink(os.path.join(path, x))
877 return fallback
877 return fallback
878
878
879 _umask = os.umask(0)
879 _umask = os.umask(0)
880 os.umask(_umask)
880 os.umask(_umask)
881
881
882 def needbinarypatch():
882 def needbinarypatch():
883 """return True if patches should be applied in binary mode by default."""
883 """return True if patches should be applied in binary mode by default."""
884 return os.name == 'nt'
884 return os.name == 'nt'
885
885
886 def endswithsep(path):
886 def endswithsep(path):
887 '''Check path ends with os.sep or os.altsep.'''
887 '''Check path ends with os.sep or os.altsep.'''
888 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
888 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
889
889
890 def splitpath(path):
890 def splitpath(path):
891 '''Split path by os.sep.
891 '''Split path by os.sep.
892 Note that this function does not use os.altsep because this is
892 Note that this function does not use os.altsep because this is
893 an alternative of simple "xxx.split(os.sep)".
893 an alternative of simple "xxx.split(os.sep)".
894 It is recommended to use os.path.normpath() before using this
894 It is recommended to use os.path.normpath() before using this
895 function if need.'''
895 function if need.'''
896 return path.split(os.sep)
896 return path.split(os.sep)
897
897
898 # Platform specific variants
898 # Platform specific variants
899 if os.name == 'nt':
899 if os.name == 'nt':
900 import msvcrt
900 import msvcrt
901 nulldev = 'NUL:'
901 nulldev = 'NUL:'
902
902
903 class winstdout:
903 class winstdout:
904 '''stdout on windows misbehaves if sent through a pipe'''
904 '''stdout on windows misbehaves if sent through a pipe'''
905
905
906 def __init__(self, fp):
906 def __init__(self, fp):
907 self.fp = fp
907 self.fp = fp
908
908
909 def __getattr__(self, key):
909 def __getattr__(self, key):
910 return getattr(self.fp, key)
910 return getattr(self.fp, key)
911
911
912 def close(self):
912 def close(self):
913 try:
913 try:
914 self.fp.close()
914 self.fp.close()
915 except: pass
915 except: pass
916
916
917 def write(self, s):
917 def write(self, s):
918 try:
918 try:
919 # This is workaround for "Not enough space" error on
919 # This is workaround for "Not enough space" error on
920 # writing large size of data to console.
920 # writing large size of data to console.
921 limit = 16000
921 limit = 16000
922 l = len(s)
922 l = len(s)
923 start = 0
923 start = 0
924 while start < l:
924 while start < l:
925 end = start + limit
925 end = start + limit
926 self.fp.write(s[start:end])
926 self.fp.write(s[start:end])
927 start = end
927 start = end
928 except IOError, inst:
928 except IOError, inst:
929 if inst.errno != 0: raise
929 if inst.errno != 0: raise
930 self.close()
930 self.close()
931 raise IOError(errno.EPIPE, 'Broken pipe')
931 raise IOError(errno.EPIPE, 'Broken pipe')
932
932
933 def flush(self):
933 def flush(self):
934 try:
934 try:
935 return self.fp.flush()
935 return self.fp.flush()
936 except IOError, inst:
936 except IOError, inst:
937 if inst.errno != errno.EINVAL: raise
937 if inst.errno != errno.EINVAL: raise
938 self.close()
938 self.close()
939 raise IOError(errno.EPIPE, 'Broken pipe')
939 raise IOError(errno.EPIPE, 'Broken pipe')
940
940
941 sys.stdout = winstdout(sys.stdout)
941 sys.stdout = winstdout(sys.stdout)
942
942
943 def _is_win_9x():
943 def _is_win_9x():
944 '''return true if run on windows 95, 98 or me.'''
944 '''return true if run on windows 95, 98 or me.'''
945 try:
945 try:
946 return sys.getwindowsversion()[3] == 1
946 return sys.getwindowsversion()[3] == 1
947 except AttributeError:
947 except AttributeError:
948 return 'command' in os.environ.get('comspec', '')
948 return 'command' in os.environ.get('comspec', '')
949
949
950 def openhardlinks():
950 def openhardlinks():
951 return not _is_win_9x and "win32api" in locals()
951 return not _is_win_9x and "win32api" in locals()
952
952
953 def system_rcpath():
953 def system_rcpath():
954 try:
954 try:
955 return system_rcpath_win32()
955 return system_rcpath_win32()
956 except:
956 except:
957 return [r'c:\mercurial\mercurial.ini']
957 return [r'c:\mercurial\mercurial.ini']
958
958
959 def user_rcpath():
959 def user_rcpath():
960 '''return os-specific hgrc search path to the user dir'''
960 '''return os-specific hgrc search path to the user dir'''
961 try:
961 try:
962 userrc = user_rcpath_win32()
962 userrc = user_rcpath_win32()
963 except:
963 except:
964 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
964 userrc = os.path.join(os.path.expanduser('~'), 'mercurial.ini')
965 path = [userrc]
965 path = [userrc]
966 userprofile = os.environ.get('USERPROFILE')
966 userprofile = os.environ.get('USERPROFILE')
967 if userprofile:
967 if userprofile:
968 path.append(os.path.join(userprofile, 'mercurial.ini'))
968 path.append(os.path.join(userprofile, 'mercurial.ini'))
969 return path
969 return path
970
970
971 def parse_patch_output(output_line):
971 def parse_patch_output(output_line):
972 """parses the output produced by patch and returns the file name"""
972 """parses the output produced by patch and returns the file name"""
973 pf = output_line[14:]
973 pf = output_line[14:]
974 if pf[0] == '`':
974 if pf[0] == '`':
975 pf = pf[1:-1] # Remove the quotes
975 pf = pf[1:-1] # Remove the quotes
976 return pf
976 return pf
977
977
978 def sshargs(sshcmd, host, user, port):
978 def sshargs(sshcmd, host, user, port):
979 '''Build argument list for ssh or Plink'''
979 '''Build argument list for ssh or Plink'''
980 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
980 pflag = 'plink' in sshcmd.lower() and '-P' or '-p'
981 args = user and ("%s@%s" % (user, host)) or host
981 args = user and ("%s@%s" % (user, host)) or host
982 return port and ("%s %s %s" % (args, pflag, port)) or args
982 return port and ("%s %s %s" % (args, pflag, port)) or args
983
983
984 def testpid(pid):
984 def testpid(pid):
985 '''return False if pid dead, True if running or not known'''
985 '''return False if pid dead, True if running or not known'''
986 return True
986 return True
987
987
988 def set_flags(f, flags):
988 def set_flags(f, flags):
989 pass
989 pass
990
990
991 def set_binary(fd):
991 def set_binary(fd):
992 msvcrt.setmode(fd.fileno(), os.O_BINARY)
992 msvcrt.setmode(fd.fileno(), os.O_BINARY)
993
993
994 def pconvert(path):
994 def pconvert(path):
995 return '/'.join(splitpath(path))
995 return '/'.join(splitpath(path))
996
996
997 def localpath(path):
997 def localpath(path):
998 return path.replace('/', '\\')
998 return path.replace('/', '\\')
999
999
1000 def normpath(path):
1000 def normpath(path):
1001 return pconvert(os.path.normpath(path))
1001 return pconvert(os.path.normpath(path))
1002
1002
1003 makelock = _makelock_file
1003 makelock = _makelock_file
1004 readlock = _readlock_file
1004 readlock = _readlock_file
1005
1005
1006 def samestat(s1, s2):
1006 def samestat(s1, s2):
1007 return False
1007 return False
1008
1008
1009 # A sequence of backslashes is special iff it precedes a double quote:
1009 # A sequence of backslashes is special iff it precedes a double quote:
1010 # - if there's an even number of backslashes, the double quote is not
1010 # - if there's an even number of backslashes, the double quote is not
1011 # quoted (i.e. it ends the quoted region)
1011 # quoted (i.e. it ends the quoted region)
1012 # - if there's an odd number of backslashes, the double quote is quoted
1012 # - if there's an odd number of backslashes, the double quote is quoted
1013 # - in both cases, every pair of backslashes is unquoted into a single
1013 # - in both cases, every pair of backslashes is unquoted into a single
1014 # backslash
1014 # backslash
1015 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1015 # (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx )
1016 # So, to quote a string, we must surround it in double quotes, double
1016 # So, to quote a string, we must surround it in double quotes, double
1017 # the number of backslashes that preceed double quotes and add another
1017 # the number of backslashes that preceed double quotes and add another
1018 # backslash before every double quote (being careful with the double
1018 # backslash before every double quote (being careful with the double
1019 # quote we've appended to the end)
1019 # quote we've appended to the end)
1020 _quotere = None
1020 _quotere = None
1021 def shellquote(s):
1021 def shellquote(s):
1022 global _quotere
1022 global _quotere
1023 if _quotere is None:
1023 if _quotere is None:
1024 _quotere = re.compile(r'(\\*)("|\\$)')
1024 _quotere = re.compile(r'(\\*)("|\\$)')
1025 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1025 return '"%s"' % _quotere.sub(r'\1\1\\\2', s)
1026
1026
1027 def quotecommand(cmd):
1027 def quotecommand(cmd):
1028 """Build a command string suitable for os.popen* calls."""
1028 """Build a command string suitable for os.popen* calls."""
1029 # The extra quotes are needed because popen* runs the command
1029 # The extra quotes are needed because popen* runs the command
1030 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1030 # through the current COMSPEC. cmd.exe suppress enclosing quotes.
1031 return '"' + cmd + '"'
1031 return '"' + cmd + '"'
1032
1032
1033 def popen(command):
1033 def popen(command):
1034 # Work around "popen spawned process may not write to stdout
1034 # Work around "popen spawned process may not write to stdout
1035 # under windows"
1035 # under windows"
1036 # http://bugs.python.org/issue1366
1036 # http://bugs.python.org/issue1366
1037 command += " 2> %s" % nulldev
1037 command += " 2> %s" % nulldev
1038 return os.popen(quotecommand(command))
1038 return os.popen(quotecommand(command))
1039
1039
1040 def explain_exit(code):
1040 def explain_exit(code):
1041 return _("exited with status %d") % code, code
1041 return _("exited with status %d") % code, code
1042
1042
1043 # if you change this stub into a real check, please try to implement the
1043 # if you change this stub into a real check, please try to implement the
1044 # username and groupname functions above, too.
1044 # username and groupname functions above, too.
1045 def isowner(fp, st=None):
1045 def isowner(fp, st=None):
1046 return True
1046 return True
1047
1047
1048 def find_in_path(name, path, default=None):
1048 def find_in_path(name, path, default=None):
1049 '''find name in search path. path can be string (will be split
1049 '''find name in search path. path can be string (will be split
1050 with os.pathsep), or iterable thing that returns strings. if name
1050 with os.pathsep), or iterable thing that returns strings. if name
1051 found, return path to name. else return default. name is looked up
1051 found, return path to name. else return default. name is looked up
1052 using cmd.exe rules, using PATHEXT.'''
1052 using cmd.exe rules, using PATHEXT.'''
1053 if isinstance(path, str):
1053 if isinstance(path, str):
1054 path = path.split(os.pathsep)
1054 path = path.split(os.pathsep)
1055
1055
1056 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1056 pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD')
1057 pathext = pathext.lower().split(os.pathsep)
1057 pathext = pathext.lower().split(os.pathsep)
1058 isexec = os.path.splitext(name)[1].lower() in pathext
1058 isexec = os.path.splitext(name)[1].lower() in pathext
1059
1059
1060 for p in path:
1060 for p in path:
1061 p_name = os.path.join(p, name)
1061 p_name = os.path.join(p, name)
1062
1062
1063 if isexec and os.path.exists(p_name):
1063 if isexec and os.path.exists(p_name):
1064 return p_name
1064 return p_name
1065
1065
1066 for ext in pathext:
1066 for ext in pathext:
1067 p_name_ext = p_name + ext
1067 p_name_ext = p_name + ext
1068 if os.path.exists(p_name_ext):
1068 if os.path.exists(p_name_ext):
1069 return p_name_ext
1069 return p_name_ext
1070 return default
1070 return default
1071
1071
1072 def set_signal_handler():
1072 def set_signal_handler():
1073 try:
1073 try:
1074 set_signal_handler_win32()
1074 set_signal_handler_win32()
1075 except NameError:
1075 except NameError:
1076 pass
1076 pass
1077
1077
1078 try:
1078 try:
1079 # override functions with win32 versions if possible
1079 # override functions with win32 versions if possible
1080 from util_win32 import *
1080 from util_win32 import *
1081 if not _is_win_9x():
1081 if not _is_win_9x():
1082 posixfile = posixfile_nt
1082 posixfile = posixfile_nt
1083 except ImportError:
1083 except ImportError:
1084 pass
1084 pass
1085
1085
1086 else:
1086 else:
1087 nulldev = '/dev/null'
1087 nulldev = '/dev/null'
1088
1088
1089 def rcfiles(path):
1089 def rcfiles(path):
1090 rcs = [os.path.join(path, 'hgrc')]
1090 rcs = [os.path.join(path, 'hgrc')]
1091 rcdir = os.path.join(path, 'hgrc.d')
1091 rcdir = os.path.join(path, 'hgrc.d')
1092 try:
1092 try:
1093 rcs.extend([os.path.join(rcdir, f)
1093 rcs.extend([os.path.join(rcdir, f)
1094 for f, kind in osutil.listdir(rcdir)
1094 for f, kind in osutil.listdir(rcdir)
1095 if f.endswith(".rc")])
1095 if f.endswith(".rc")])
1096 except OSError:
1096 except OSError:
1097 pass
1097 pass
1098 return rcs
1098 return rcs
1099
1099
1100 def system_rcpath():
1100 def system_rcpath():
1101 path = []
1101 path = []
1102 # old mod_python does not set sys.argv
1102 # old mod_python does not set sys.argv
1103 if len(getattr(sys, 'argv', [])) > 0:
1103 if len(getattr(sys, 'argv', [])) > 0:
1104 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1104 path.extend(rcfiles(os.path.dirname(sys.argv[0]) +
1105 '/../etc/mercurial'))
1105 '/../etc/mercurial'))
1106 path.extend(rcfiles('/etc/mercurial'))
1106 path.extend(rcfiles('/etc/mercurial'))
1107 return path
1107 return path
1108
1108
1109 def user_rcpath():
1109 def user_rcpath():
1110 return [os.path.expanduser('~/.hgrc')]
1110 return [os.path.expanduser('~/.hgrc')]
1111
1111
1112 def parse_patch_output(output_line):
1112 def parse_patch_output(output_line):
1113 """parses the output produced by patch and returns the file name"""
1113 """parses the output produced by patch and returns the file name"""
1114 pf = output_line[14:]
1114 pf = output_line[14:]
1115 if os.sys.platform == 'OpenVMS':
1115 if os.sys.platform == 'OpenVMS':
1116 if pf[0] == '`':
1116 if pf[0] == '`':
1117 pf = pf[1:-1] # Remove the quotes
1117 pf = pf[1:-1] # Remove the quotes
1118 else:
1118 else:
1119 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1119 if pf.startswith("'") and pf.endswith("'") and " " in pf:
1120 pf = pf[1:-1] # Remove the quotes
1120 pf = pf[1:-1] # Remove the quotes
1121 return pf
1121 return pf
1122
1122
1123 def sshargs(sshcmd, host, user, port):
1123 def sshargs(sshcmd, host, user, port):
1124 '''Build argument list for ssh'''
1124 '''Build argument list for ssh'''
1125 args = user and ("%s@%s" % (user, host)) or host
1125 args = user and ("%s@%s" % (user, host)) or host
1126 return port and ("%s -p %s" % (args, port)) or args
1126 return port and ("%s -p %s" % (args, port)) or args
1127
1127
1128 def is_exec(f):
1128 def is_exec(f):
1129 """check whether a file is executable"""
1129 """check whether a file is executable"""
1130 return (os.lstat(f).st_mode & 0100 != 0)
1130 return (os.lstat(f).st_mode & 0100 != 0)
1131
1131
1132 def set_flags(f, flags):
1132 def set_flags(f, flags):
1133 s = os.lstat(f).st_mode
1133 s = os.lstat(f).st_mode
1134 x = "x" in flags
1134 x = "x" in flags
1135 l = "l" in flags
1135 l = "l" in flags
1136 if l:
1136 if l:
1137 if not stat.S_ISLNK(s):
1137 if not stat.S_ISLNK(s):
1138 # switch file to link
1138 # switch file to link
1139 data = file(f).read()
1139 data = file(f).read()
1140 os.unlink(f)
1140 os.unlink(f)
1141 os.symlink(data, f)
1141 os.symlink(data, f)
1142 # no chmod needed at this point
1142 # no chmod needed at this point
1143 return
1143 return
1144 if stat.S_ISLNK(s):
1144 if stat.S_ISLNK(s):
1145 # switch link to file
1145 # switch link to file
1146 data = os.readlink(f)
1146 data = os.readlink(f)
1147 os.unlink(f)
1147 os.unlink(f)
1148 file(f, "w").write(data)
1148 file(f, "w").write(data)
1149 s = 0666 & ~_umask # avoid restatting for chmod
1149 s = 0666 & ~_umask # avoid restatting for chmod
1150
1150
1151 sx = s & 0100
1151 sx = s & 0100
1152 if x and not sx:
1152 if x and not sx:
1153 # Turn on +x for every +r bit when making a file executable
1153 # Turn on +x for every +r bit when making a file executable
1154 # and obey umask.
1154 # and obey umask.
1155 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1155 os.chmod(f, s | (s & 0444) >> 2 & ~_umask)
1156 elif not x and sx:
1156 elif not x and sx:
1157 # Turn off all +x bits
1157 # Turn off all +x bits
1158 os.chmod(f, s & 0666)
1158 os.chmod(f, s & 0666)
1159
1159
1160 def set_binary(fd):
1160 def set_binary(fd):
1161 pass
1161 pass
1162
1162
1163 def pconvert(path):
1163 def pconvert(path):
1164 return path
1164 return path
1165
1165
1166 def localpath(path):
1166 def localpath(path):
1167 return path
1167 return path
1168
1168
1169 normpath = os.path.normpath
1169 normpath = os.path.normpath
1170 samestat = os.path.samestat
1170 samestat = os.path.samestat
1171
1171
1172 def makelock(info, pathname):
1172 def makelock(info, pathname):
1173 try:
1173 try:
1174 os.symlink(info, pathname)
1174 os.symlink(info, pathname)
1175 except OSError, why:
1175 except OSError, why:
1176 if why.errno == errno.EEXIST:
1176 if why.errno == errno.EEXIST:
1177 raise
1177 raise
1178 else:
1178 else:
1179 _makelock_file(info, pathname)
1179 _makelock_file(info, pathname)
1180
1180
1181 def readlock(pathname):
1181 def readlock(pathname):
1182 try:
1182 try:
1183 return os.readlink(pathname)
1183 return os.readlink(pathname)
1184 except OSError, why:
1184 except OSError, why:
1185 if why.errno in (errno.EINVAL, errno.ENOSYS):
1185 if why.errno in (errno.EINVAL, errno.ENOSYS):
1186 return _readlock_file(pathname)
1186 return _readlock_file(pathname)
1187 else:
1187 else:
1188 raise
1188 raise
1189
1189
1190 def shellquote(s):
1190 def shellquote(s):
1191 if os.sys.platform == 'OpenVMS':
1191 if os.sys.platform == 'OpenVMS':
1192 return '"%s"' % s
1192 return '"%s"' % s
1193 else:
1193 else:
1194 return "'%s'" % s.replace("'", "'\\''")
1194 return "'%s'" % s.replace("'", "'\\''")
1195
1195
1196 def quotecommand(cmd):
1196 def quotecommand(cmd):
1197 return cmd
1197 return cmd
1198
1198
1199 def popen(command):
1199 def popen(command):
1200 return os.popen(command)
1200 return os.popen(command)
1201
1201
1202 def testpid(pid):
1202 def testpid(pid):
1203 '''return False if pid dead, True if running or not sure'''
1203 '''return False if pid dead, True if running or not sure'''
1204 if os.sys.platform == 'OpenVMS':
1204 if os.sys.platform == 'OpenVMS':
1205 return True
1205 return True
1206 try:
1206 try:
1207 os.kill(pid, 0)
1207 os.kill(pid, 0)
1208 return True
1208 return True
1209 except OSError, inst:
1209 except OSError, inst:
1210 return inst.errno != errno.ESRCH
1210 return inst.errno != errno.ESRCH
1211
1211
1212 def explain_exit(code):
1212 def explain_exit(code):
1213 """return a 2-tuple (desc, code) describing a process's status"""
1213 """return a 2-tuple (desc, code) describing a process's status"""
1214 if os.WIFEXITED(code):
1214 if os.WIFEXITED(code):
1215 val = os.WEXITSTATUS(code)
1215 val = os.WEXITSTATUS(code)
1216 return _("exited with status %d") % val, val
1216 return _("exited with status %d") % val, val
1217 elif os.WIFSIGNALED(code):
1217 elif os.WIFSIGNALED(code):
1218 val = os.WTERMSIG(code)
1218 val = os.WTERMSIG(code)
1219 return _("killed by signal %d") % val, val
1219 return _("killed by signal %d") % val, val
1220 elif os.WIFSTOPPED(code):
1220 elif os.WIFSTOPPED(code):
1221 val = os.WSTOPSIG(code)
1221 val = os.WSTOPSIG(code)
1222 return _("stopped by signal %d") % val, val
1222 return _("stopped by signal %d") % val, val
1223 raise ValueError(_("invalid exit code"))
1223 raise ValueError(_("invalid exit code"))
1224
1224
1225 def isowner(fp, st=None):
1225 def isowner(fp, st=None):
1226 """Return True if the file object f belongs to the current user.
1226 """Return True if the file object f belongs to the current user.
1227
1227
1228 The return value of a util.fstat(f) may be passed as the st argument.
1228 The return value of a util.fstat(f) may be passed as the st argument.
1229 """
1229 """
1230 if st is None:
1230 if st is None:
1231 st = fstat(fp)
1231 st = fstat(fp)
1232 return st.st_uid == os.getuid()
1232 return st.st_uid == os.getuid()
1233
1233
1234 def find_in_path(name, path, default=None):
1234 def find_in_path(name, path, default=None):
1235 '''find name in search path. path can be string (will be split
1235 '''find name in search path. path can be string (will be split
1236 with os.pathsep), or iterable thing that returns strings. if name
1236 with os.pathsep), or iterable thing that returns strings. if name
1237 found, return path to name. else return default.'''
1237 found, return path to name. else return default.'''
1238 if isinstance(path, str):
1238 if isinstance(path, str):
1239 path = path.split(os.pathsep)
1239 path = path.split(os.pathsep)
1240 for p in path:
1240 for p in path:
1241 p_name = os.path.join(p, name)
1241 p_name = os.path.join(p, name)
1242 if os.path.exists(p_name):
1242 if os.path.exists(p_name):
1243 return p_name
1243 return p_name
1244 return default
1244 return default
1245
1245
1246 def set_signal_handler():
1246 def set_signal_handler():
1247 pass
1247 pass
1248
1248
1249 def find_exe(name, default=None):
1249 def find_exe(name, default=None):
1250 '''find path of an executable.
1250 '''find path of an executable.
1251 if name contains a path component, return it as is. otherwise,
1251 if name contains a path component, return it as is. otherwise,
1252 use normal executable search path.'''
1252 use normal executable search path.'''
1253
1253
1254 if os.sep in name or sys.platform == 'OpenVMS':
1254 if os.sep in name or sys.platform == 'OpenVMS':
1255 # don't check the executable bit. if the file isn't
1255 # don't check the executable bit. if the file isn't
1256 # executable, whoever tries to actually run it will give a
1256 # executable, whoever tries to actually run it will give a
1257 # much more useful error message.
1257 # much more useful error message.
1258 return name
1258 return name
1259 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1259 return find_in_path(name, os.environ.get('PATH', ''), default=default)
1260
1260
1261 def _buildencodefun():
1261 def _buildencodefun():
1262 e = '_'
1262 e = '_'
1263 win_reserved = [ord(x) for x in '\\:*?"<>|']
1263 win_reserved = [ord(x) for x in '\\:*?"<>|']
1264 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1264 cmap = dict([ (chr(x), chr(x)) for x in xrange(127) ])
1265 for x in (range(32) + range(126, 256) + win_reserved):
1265 for x in (range(32) + range(126, 256) + win_reserved):
1266 cmap[chr(x)] = "~%02x" % x
1266 cmap[chr(x)] = "~%02x" % x
1267 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1267 for x in range(ord("A"), ord("Z")+1) + [ord(e)]:
1268 cmap[chr(x)] = e + chr(x).lower()
1268 cmap[chr(x)] = e + chr(x).lower()
1269 dmap = {}
1269 dmap = {}
1270 for k, v in cmap.iteritems():
1270 for k, v in cmap.iteritems():
1271 dmap[v] = k
1271 dmap[v] = k
1272 def decode(s):
1272 def decode(s):
1273 i = 0
1273 i = 0
1274 while i < len(s):
1274 while i < len(s):
1275 for l in xrange(1, 4):
1275 for l in xrange(1, 4):
1276 try:
1276 try:
1277 yield dmap[s[i:i+l]]
1277 yield dmap[s[i:i+l]]
1278 i += l
1278 i += l
1279 break
1279 break
1280 except KeyError:
1280 except KeyError:
1281 pass
1281 pass
1282 else:
1282 else:
1283 raise KeyError
1283 raise KeyError
1284 return (lambda s: "".join([cmap[c] for c in s]),
1284 return (lambda s: "".join([cmap[c] for c in s]),
1285 lambda s: "".join(list(decode(s))))
1285 lambda s: "".join(list(decode(s))))
1286
1286
1287 encodefilename, decodefilename = _buildencodefun()
1287 encodefilename, decodefilename = _buildencodefun()
1288
1288
1289 def encodedopener(openerfn, fn):
1289 def encodedopener(openerfn, fn):
1290 def o(path, *args, **kw):
1290 def o(path, *args, **kw):
1291 return openerfn(fn(path), *args, **kw)
1291 return openerfn(fn(path), *args, **kw)
1292 return o
1292 return o
1293
1293
1294 def mktempcopy(name, emptyok=False):
1294 def mktempcopy(name, emptyok=False):
1295 """Create a temporary file with the same contents from name
1295 """Create a temporary file with the same contents from name
1296
1296
1297 The permission bits are copied from the original file.
1297 The permission bits are copied from the original file.
1298
1298
1299 If the temporary file is going to be truncated immediately, you
1299 If the temporary file is going to be truncated immediately, you
1300 can use emptyok=True as an optimization.
1300 can use emptyok=True as an optimization.
1301
1301
1302 Returns the name of the temporary file.
1302 Returns the name of the temporary file.
1303 """
1303 """
1304 d, fn = os.path.split(name)
1304 d, fn = os.path.split(name)
1305 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1305 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1306 os.close(fd)
1306 os.close(fd)
1307 # Temporary files are created with mode 0600, which is usually not
1307 # Temporary files are created with mode 0600, which is usually not
1308 # what we want. If the original file already exists, just copy
1308 # what we want. If the original file already exists, just copy
1309 # its mode. Otherwise, manually obey umask.
1309 # its mode. Otherwise, manually obey umask.
1310 try:
1310 try:
1311 st_mode = os.lstat(name).st_mode & 0777
1311 st_mode = os.lstat(name).st_mode & 0777
1312 except OSError, inst:
1312 except OSError, inst:
1313 if inst.errno != errno.ENOENT:
1313 if inst.errno != errno.ENOENT:
1314 raise
1314 raise
1315 st_mode = 0666 & ~_umask
1315 st_mode = 0666 & ~_umask
1316 os.chmod(temp, st_mode)
1316 os.chmod(temp, st_mode)
1317 if emptyok:
1317 if emptyok:
1318 return temp
1318 return temp
1319 try:
1319 try:
1320 try:
1320 try:
1321 ifp = posixfile(name, "rb")
1321 ifp = posixfile(name, "rb")
1322 except IOError, inst:
1322 except IOError, inst:
1323 if inst.errno == errno.ENOENT:
1323 if inst.errno == errno.ENOENT:
1324 return temp
1324 return temp
1325 if not getattr(inst, 'filename', None):
1325 if not getattr(inst, 'filename', None):
1326 inst.filename = name
1326 inst.filename = name
1327 raise
1327 raise
1328 ofp = posixfile(temp, "wb")
1328 ofp = posixfile(temp, "wb")
1329 for chunk in filechunkiter(ifp):
1329 for chunk in filechunkiter(ifp):
1330 ofp.write(chunk)
1330 ofp.write(chunk)
1331 ifp.close()
1331 ifp.close()
1332 ofp.close()
1332 ofp.close()
1333 except:
1333 except:
1334 try: os.unlink(temp)
1334 try: os.unlink(temp)
1335 except: pass
1335 except: pass
1336 raise
1336 raise
1337 return temp
1337 return temp
1338
1338
1339 class atomictempfile(posixfile):
1339 class atomictempfile(posixfile):
1340 """file-like object that atomically updates a file
1340 """file-like object that atomically updates a file
1341
1341
1342 All writes will be redirected to a temporary copy of the original
1342 All writes will be redirected to a temporary copy of the original
1343 file. When rename is called, the copy is renamed to the original
1343 file. When rename is called, the copy is renamed to the original
1344 name, making the changes visible.
1344 name, making the changes visible.
1345 """
1345 """
1346 def __init__(self, name, mode):
1346 def __init__(self, name, mode):
1347 self.__name = name
1347 self.__name = name
1348 self.temp = mktempcopy(name, emptyok=('w' in mode))
1348 self.temp = mktempcopy(name, emptyok=('w' in mode))
1349 posixfile.__init__(self, self.temp, mode)
1349 posixfile.__init__(self, self.temp, mode)
1350
1350
1351 def rename(self):
1351 def rename(self):
1352 if not self.closed:
1352 if not self.closed:
1353 posixfile.close(self)
1353 posixfile.close(self)
1354 rename(self.temp, localpath(self.__name))
1354 rename(self.temp, localpath(self.__name))
1355
1355
1356 def __del__(self):
1356 def __del__(self):
1357 if not self.closed:
1357 if not self.closed:
1358 try:
1358 try:
1359 os.unlink(self.temp)
1359 os.unlink(self.temp)
1360 except: pass
1360 except: pass
1361 posixfile.close(self)
1361 posixfile.close(self)
1362
1362
1363 class opener(object):
1363 class opener(object):
1364 """Open files relative to a base directory
1364 """Open files relative to a base directory
1365
1365
1366 This class is used to hide the details of COW semantics and
1366 This class is used to hide the details of COW semantics and
1367 remote file access from higher level code.
1367 remote file access from higher level code.
1368 """
1368 """
1369 def __init__(self, base, audit=True):
1369 def __init__(self, base, audit=True):
1370 self.base = base
1370 self.base = base
1371 if audit:
1371 if audit:
1372 self.audit_path = path_auditor(base)
1372 self.audit_path = path_auditor(base)
1373 else:
1373 else:
1374 self.audit_path = always
1374 self.audit_path = always
1375
1375
1376 def __getattr__(self, name):
1376 def __getattr__(self, name):
1377 if name == '_can_symlink':
1377 if name == '_can_symlink':
1378 self._can_symlink = checklink(self.base)
1378 self._can_symlink = checklink(self.base)
1379 return self._can_symlink
1379 return self._can_symlink
1380 raise AttributeError(name)
1380 raise AttributeError(name)
1381
1381
1382 def __call__(self, path, mode="r", text=False, atomictemp=False):
1382 def __call__(self, path, mode="r", text=False, atomictemp=False):
1383 self.audit_path(path)
1383 self.audit_path(path)
1384 f = os.path.join(self.base, path)
1384 f = os.path.join(self.base, path)
1385
1385
1386 if not text and "b" not in mode:
1386 if not text and "b" not in mode:
1387 mode += "b" # for that other OS
1387 mode += "b" # for that other OS
1388
1388
1389 if mode[0] != "r":
1389 if mode[0] != "r":
1390 try:
1390 try:
1391 nlink = nlinks(f)
1391 nlink = nlinks(f)
1392 except OSError:
1392 except OSError:
1393 nlink = 0
1393 nlink = 0
1394 d = os.path.dirname(f)
1394 d = os.path.dirname(f)
1395 if not os.path.isdir(d):
1395 if not os.path.isdir(d):
1396 os.makedirs(d)
1396 os.makedirs(d)
1397 if atomictemp:
1397 if atomictemp:
1398 return atomictempfile(f, mode)
1398 return atomictempfile(f, mode)
1399 if nlink > 1:
1399 if nlink > 1:
1400 rename(mktempcopy(f), f)
1400 rename(mktempcopy(f), f)
1401 return posixfile(f, mode)
1401 return posixfile(f, mode)
1402
1402
1403 def symlink(self, src, dst):
1403 def symlink(self, src, dst):
1404 self.audit_path(dst)
1404 self.audit_path(dst)
1405 linkname = os.path.join(self.base, dst)
1405 linkname = os.path.join(self.base, dst)
1406 try:
1406 try:
1407 os.unlink(linkname)
1407 os.unlink(linkname)
1408 except OSError:
1408 except OSError:
1409 pass
1409 pass
1410
1410
1411 dirname = os.path.dirname(linkname)
1411 dirname = os.path.dirname(linkname)
1412 if not os.path.exists(dirname):
1412 if not os.path.exists(dirname):
1413 os.makedirs(dirname)
1413 os.makedirs(dirname)
1414
1414
1415 if self._can_symlink:
1415 if self._can_symlink:
1416 try:
1416 try:
1417 os.symlink(src, linkname)
1417 os.symlink(src, linkname)
1418 except OSError, err:
1418 except OSError, err:
1419 raise OSError(err.errno, _('could not symlink to %r: %s') %
1419 raise OSError(err.errno, _('could not symlink to %r: %s') %
1420 (src, err.strerror), linkname)
1420 (src, err.strerror), linkname)
1421 else:
1421 else:
1422 f = self(dst, "w")
1422 f = self(dst, "w")
1423 f.write(src)
1423 f.write(src)
1424 f.close()
1424 f.close()
1425
1425
1426 class chunkbuffer(object):
1426 class chunkbuffer(object):
1427 """Allow arbitrary sized chunks of data to be efficiently read from an
1427 """Allow arbitrary sized chunks of data to be efficiently read from an
1428 iterator over chunks of arbitrary size."""
1428 iterator over chunks of arbitrary size."""
1429
1429
1430 def __init__(self, in_iter):
1430 def __init__(self, in_iter):
1431 """in_iter is the iterator that's iterating over the input chunks.
1431 """in_iter is the iterator that's iterating over the input chunks.
1432 targetsize is how big a buffer to try to maintain."""
1432 targetsize is how big a buffer to try to maintain."""
1433 self.iter = iter(in_iter)
1433 self.iter = iter(in_iter)
1434 self.buf = ''
1434 self.buf = ''
1435 self.targetsize = 2**16
1435 self.targetsize = 2**16
1436
1436
1437 def read(self, l):
1437 def read(self, l):
1438 """Read L bytes of data from the iterator of chunks of data.
1438 """Read L bytes of data from the iterator of chunks of data.
1439 Returns less than L bytes if the iterator runs dry."""
1439 Returns less than L bytes if the iterator runs dry."""
1440 if l > len(self.buf) and self.iter:
1440 if l > len(self.buf) and self.iter:
1441 # Clamp to a multiple of self.targetsize
1441 # Clamp to a multiple of self.targetsize
1442 targetsize = max(l, self.targetsize)
1442 targetsize = max(l, self.targetsize)
1443 collector = cStringIO.StringIO()
1443 collector = cStringIO.StringIO()
1444 collector.write(self.buf)
1444 collector.write(self.buf)
1445 collected = len(self.buf)
1445 collected = len(self.buf)
1446 for chunk in self.iter:
1446 for chunk in self.iter:
1447 collector.write(chunk)
1447 collector.write(chunk)
1448 collected += len(chunk)
1448 collected += len(chunk)
1449 if collected >= targetsize:
1449 if collected >= targetsize:
1450 break
1450 break
1451 if collected < targetsize:
1451 if collected < targetsize:
1452 self.iter = False
1452 self.iter = False
1453 self.buf = collector.getvalue()
1453 self.buf = collector.getvalue()
1454 if len(self.buf) == l:
1454 if len(self.buf) == l:
1455 s, self.buf = str(self.buf), ''
1455 s, self.buf = str(self.buf), ''
1456 else:
1456 else:
1457 s, self.buf = self.buf[:l], buffer(self.buf, l)
1457 s, self.buf = self.buf[:l], buffer(self.buf, l)
1458 return s
1458 return s
1459
1459
1460 def filechunkiter(f, size=65536, limit=None):
1460 def filechunkiter(f, size=65536, limit=None):
1461 """Create a generator that produces the data in the file size
1461 """Create a generator that produces the data in the file size
1462 (default 65536) bytes at a time, up to optional limit (default is
1462 (default 65536) bytes at a time, up to optional limit (default is
1463 to read all data). Chunks may be less than size bytes if the
1463 to read all data). Chunks may be less than size bytes if the
1464 chunk is the last chunk in the file, or the file is a socket or
1464 chunk is the last chunk in the file, or the file is a socket or
1465 some other type of file that sometimes reads less data than is
1465 some other type of file that sometimes reads less data than is
1466 requested."""
1466 requested."""
1467 assert size >= 0
1467 assert size >= 0
1468 assert limit is None or limit >= 0
1468 assert limit is None or limit >= 0
1469 while True:
1469 while True:
1470 if limit is None: nbytes = size
1470 if limit is None: nbytes = size
1471 else: nbytes = min(limit, size)
1471 else: nbytes = min(limit, size)
1472 s = nbytes and f.read(nbytes)
1472 s = nbytes and f.read(nbytes)
1473 if not s: break
1473 if not s: break
1474 if limit: limit -= len(s)
1474 if limit: limit -= len(s)
1475 yield s
1475 yield s
1476
1476
1477 def makedate():
1477 def makedate():
1478 lt = time.localtime()
1478 lt = time.localtime()
1479 if lt[8] == 1 and time.daylight:
1479 if lt[8] == 1 and time.daylight:
1480 tz = time.altzone
1480 tz = time.altzone
1481 else:
1481 else:
1482 tz = time.timezone
1482 tz = time.timezone
1483 return time.mktime(lt), tz
1483 return time.mktime(lt), tz
1484
1484
1485 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1485 def datestr(date=None, format='%a %b %d %H:%M:%S %Y', timezone=True, timezone_format=" %+03d%02d"):
1486 """represent a (unixtime, offset) tuple as a localized time.
1486 """represent a (unixtime, offset) tuple as a localized time.
1487 unixtime is seconds since the epoch, and offset is the time zone's
1487 unixtime is seconds since the epoch, and offset is the time zone's
1488 number of seconds away from UTC. if timezone is false, do not
1488 number of seconds away from UTC. if timezone is false, do not
1489 append time zone to string."""
1489 append time zone to string."""
1490 t, tz = date or makedate()
1490 t, tz = date or makedate()
1491 s = time.strftime(format, time.gmtime(float(t) - tz))
1491 s = time.strftime(format, time.gmtime(float(t) - tz))
1492 if timezone:
1492 if timezone:
1493 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1493 s += timezone_format % (-tz / 3600, ((-tz % 3600) / 60))
1494 return s
1494 return s
1495
1495
1496 def strdate(string, format, defaults=[]):
1496 def strdate(string, format, defaults=[]):
1497 """parse a localized time string and return a (unixtime, offset) tuple.
1497 """parse a localized time string and return a (unixtime, offset) tuple.
1498 if the string cannot be parsed, ValueError is raised."""
1498 if the string cannot be parsed, ValueError is raised."""
1499 def timezone(string):
1499 def timezone(string):
1500 tz = string.split()[-1]
1500 tz = string.split()[-1]
1501 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1501 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1502 tz = int(tz)
1502 tz = int(tz)
1503 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1503 offset = - 3600 * (tz / 100) - 60 * (tz % 100)
1504 return offset
1504 return offset
1505 if tz == "GMT" or tz == "UTC":
1505 if tz == "GMT" or tz == "UTC":
1506 return 0
1506 return 0
1507 return None
1507 return None
1508
1508
1509 # NOTE: unixtime = localunixtime + offset
1509 # NOTE: unixtime = localunixtime + offset
1510 offset, date = timezone(string), string
1510 offset, date = timezone(string), string
1511 if offset != None:
1511 if offset != None:
1512 date = " ".join(string.split()[:-1])
1512 date = " ".join(string.split()[:-1])
1513
1513
1514 # add missing elements from defaults
1514 # add missing elements from defaults
1515 for part in defaults:
1515 for part in defaults:
1516 found = [True for p in part if ("%"+p) in format]
1516 found = [True for p in part if ("%"+p) in format]
1517 if not found:
1517 if not found:
1518 date += "@" + defaults[part]
1518 date += "@" + defaults[part]
1519 format += "@%" + part[0]
1519 format += "@%" + part[0]
1520
1520
1521 timetuple = time.strptime(date, format)
1521 timetuple = time.strptime(date, format)
1522 localunixtime = int(calendar.timegm(timetuple))
1522 localunixtime = int(calendar.timegm(timetuple))
1523 if offset is None:
1523 if offset is None:
1524 # local timezone
1524 # local timezone
1525 unixtime = int(time.mktime(timetuple))
1525 unixtime = int(time.mktime(timetuple))
1526 offset = unixtime - localunixtime
1526 offset = unixtime - localunixtime
1527 else:
1527 else:
1528 unixtime = localunixtime + offset
1528 unixtime = localunixtime + offset
1529 return unixtime, offset
1529 return unixtime, offset
1530
1530
1531 def parsedate(string, formats=None, defaults=None):
1531 def parsedate(string, formats=None, defaults=None):
1532 """parse a localized time string and return a (unixtime, offset) tuple.
1532 """parse a localized time string and return a (unixtime, offset) tuple.
1533 The date may be a "unixtime offset" string or in one of the specified
1533 The date may be a "unixtime offset" string or in one of the specified
1534 formats."""
1534 formats."""
1535 if not string:
1535 if not string:
1536 return 0, 0
1536 return 0, 0
1537 if not formats:
1537 if not formats:
1538 formats = defaultdateformats
1538 formats = defaultdateformats
1539 string = string.strip()
1539 string = string.strip()
1540 try:
1540 try:
1541 when, offset = map(int, string.split(' '))
1541 when, offset = map(int, string.split(' '))
1542 except ValueError:
1542 except ValueError:
1543 # fill out defaults
1543 # fill out defaults
1544 if not defaults:
1544 if not defaults:
1545 defaults = {}
1545 defaults = {}
1546 now = makedate()
1546 now = makedate()
1547 for part in "d mb yY HI M S".split():
1547 for part in "d mb yY HI M S".split():
1548 if part not in defaults:
1548 if part not in defaults:
1549 if part[0] in "HMS":
1549 if part[0] in "HMS":
1550 defaults[part] = "00"
1550 defaults[part] = "00"
1551 elif part[0] in "dm":
1551 elif part[0] in "dm":
1552 defaults[part] = "1"
1552 defaults[part] = "1"
1553 else:
1553 else:
1554 defaults[part] = datestr(now, "%" + part[0], False)
1554 defaults[part] = datestr(now, "%" + part[0], False)
1555
1555
1556 for format in formats:
1556 for format in formats:
1557 try:
1557 try:
1558 when, offset = strdate(string, format, defaults)
1558 when, offset = strdate(string, format, defaults)
1559 except ValueError:
1559 except ValueError:
1560 pass
1560 pass
1561 else:
1561 else:
1562 break
1562 break
1563 else:
1563 else:
1564 raise Abort(_('invalid date: %r ') % string)
1564 raise Abort(_('invalid date: %r ') % string)
1565 # validate explicit (probably user-specified) date and
1565 # validate explicit (probably user-specified) date and
1566 # time zone offset. values must fit in signed 32 bits for
1566 # time zone offset. values must fit in signed 32 bits for
1567 # current 32-bit linux runtimes. timezones go from UTC-12
1567 # current 32-bit linux runtimes. timezones go from UTC-12
1568 # to UTC+14
1568 # to UTC+14
1569 if abs(when) > 0x7fffffff:
1569 if abs(when) > 0x7fffffff:
1570 raise Abort(_('date exceeds 32 bits: %d') % when)
1570 raise Abort(_('date exceeds 32 bits: %d') % when)
1571 if offset < -50400 or offset > 43200:
1571 if offset < -50400 or offset > 43200:
1572 raise Abort(_('impossible time zone offset: %d') % offset)
1572 raise Abort(_('impossible time zone offset: %d') % offset)
1573 return when, offset
1573 return when, offset
1574
1574
1575 def matchdate(date):
1575 def matchdate(date):
1576 """Return a function that matches a given date match specifier
1576 """Return a function that matches a given date match specifier
1577
1577
1578 Formats include:
1578 Formats include:
1579
1579
1580 '{date}' match a given date to the accuracy provided
1580 '{date}' match a given date to the accuracy provided
1581
1581
1582 '<{date}' on or before a given date
1582 '<{date}' on or before a given date
1583
1583
1584 '>{date}' on or after a given date
1584 '>{date}' on or after a given date
1585
1585
1586 """
1586 """
1587
1587
1588 def lower(date):
1588 def lower(date):
1589 return parsedate(date, extendeddateformats)[0]
1589 return parsedate(date, extendeddateformats)[0]
1590
1590
1591 def upper(date):
1591 def upper(date):
1592 d = dict(mb="12", HI="23", M="59", S="59")
1592 d = dict(mb="12", HI="23", M="59", S="59")
1593 for days in "31 30 29".split():
1593 for days in "31 30 29".split():
1594 try:
1594 try:
1595 d["d"] = days
1595 d["d"] = days
1596 return parsedate(date, extendeddateformats, d)[0]
1596 return parsedate(date, extendeddateformats, d)[0]
1597 except:
1597 except:
1598 pass
1598 pass
1599 d["d"] = "28"
1599 d["d"] = "28"
1600 return parsedate(date, extendeddateformats, d)[0]
1600 return parsedate(date, extendeddateformats, d)[0]
1601
1601
1602 if date[0] == "<":
1602 if date[0] == "<":
1603 when = upper(date[1:])
1603 when = upper(date[1:])
1604 return lambda x: x <= when
1604 return lambda x: x <= when
1605 elif date[0] == ">":
1605 elif date[0] == ">":
1606 when = lower(date[1:])
1606 when = lower(date[1:])
1607 return lambda x: x >= when
1607 return lambda x: x >= when
1608 elif date[0] == "-":
1608 elif date[0] == "-":
1609 try:
1609 try:
1610 days = int(date[1:])
1610 days = int(date[1:])
1611 except ValueError:
1611 except ValueError:
1612 raise Abort(_("invalid day spec: %s") % date[1:])
1612 raise Abort(_("invalid day spec: %s") % date[1:])
1613 when = makedate()[0] - days * 3600 * 24
1613 when = makedate()[0] - days * 3600 * 24
1614 return lambda x: x >= when
1614 return lambda x: x >= when
1615 elif " to " in date:
1615 elif " to " in date:
1616 a, b = date.split(" to ")
1616 a, b = date.split(" to ")
1617 start, stop = lower(a), upper(b)
1617 start, stop = lower(a), upper(b)
1618 return lambda x: x >= start and x <= stop
1618 return lambda x: x >= start and x <= stop
1619 else:
1619 else:
1620 start, stop = lower(date), upper(date)
1620 start, stop = lower(date), upper(date)
1621 return lambda x: x >= start and x <= stop
1621 return lambda x: x >= start and x <= stop
1622
1622
1623 def shortuser(user):
1623 def shortuser(user):
1624 """Return a short representation of a user name or email address."""
1624 """Return a short representation of a user name or email address."""
1625 f = user.find('@')
1625 f = user.find('@')
1626 if f >= 0:
1626 if f >= 0:
1627 user = user[:f]
1627 user = user[:f]
1628 f = user.find('<')
1628 f = user.find('<')
1629 if f >= 0:
1629 if f >= 0:
1630 user = user[f+1:]
1630 user = user[f+1:]
1631 f = user.find(' ')
1631 f = user.find(' ')
1632 if f >= 0:
1632 if f >= 0:
1633 user = user[:f]
1633 user = user[:f]
1634 f = user.find('.')
1634 f = user.find('.')
1635 if f >= 0:
1635 if f >= 0:
1636 user = user[:f]
1636 user = user[:f]
1637 return user
1637 return user
1638
1638
1639 def ellipsis(text, maxlength=400):
1639 def ellipsis(text, maxlength=400):
1640 """Trim string to at most maxlength (default: 400) characters."""
1640 """Trim string to at most maxlength (default: 400) characters."""
1641 if len(text) <= maxlength:
1641 if len(text) <= maxlength:
1642 return text
1642 return text
1643 else:
1643 else:
1644 return "%s..." % (text[:maxlength-3])
1644 return "%s..." % (text[:maxlength-3])
1645
1645
1646 def walkrepos(path):
1646 def walkrepos(path):
1647 '''yield every hg repository under path, recursively.'''
1647 '''yield every hg repository under path, recursively.'''
1648 def errhandler(err):
1648 def errhandler(err):
1649 if err.filename == path:
1649 if err.filename == path:
1650 raise err
1650 raise err
1651
1651
1652 for root, dirs, files in os.walk(path, onerror=errhandler):
1652 for root, dirs, files in os.walk(path, onerror=errhandler):
1653 for d in dirs:
1653 for d in dirs:
1654 if d == '.hg':
1654 if d == '.hg':
1655 yield root
1655 yield root
1656 dirs[:] = []
1656 dirs[:] = []
1657 break
1657 break
1658
1658
1659 _rcpath = None
1659 _rcpath = None
1660
1660
1661 def os_rcpath():
1661 def os_rcpath():
1662 '''return default os-specific hgrc search path'''
1662 '''return default os-specific hgrc search path'''
1663 path = system_rcpath()
1663 path = system_rcpath()
1664 path.extend(user_rcpath())
1664 path.extend(user_rcpath())
1665 path = [os.path.normpath(f) for f in path]
1665 path = [os.path.normpath(f) for f in path]
1666 return path
1666 return path
1667
1667
1668 def rcpath():
1668 def rcpath():
1669 '''return hgrc search path. if env var HGRCPATH is set, use it.
1669 '''return hgrc search path. if env var HGRCPATH is set, use it.
1670 for each item in path, if directory, use files ending in .rc,
1670 for each item in path, if directory, use files ending in .rc,
1671 else use item.
1671 else use item.
1672 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1672 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1673 if no HGRCPATH, use default os-specific path.'''
1673 if no HGRCPATH, use default os-specific path.'''
1674 global _rcpath
1674 global _rcpath
1675 if _rcpath is None:
1675 if _rcpath is None:
1676 if 'HGRCPATH' in os.environ:
1676 if 'HGRCPATH' in os.environ:
1677 _rcpath = []
1677 _rcpath = []
1678 for p in os.environ['HGRCPATH'].split(os.pathsep):
1678 for p in os.environ['HGRCPATH'].split(os.pathsep):
1679 if not p: continue
1679 if not p: continue
1680 if os.path.isdir(p):
1680 if os.path.isdir(p):
1681 for f, kind in osutil.listdir(p):
1681 for f, kind in osutil.listdir(p):
1682 if f.endswith('.rc'):
1682 if f.endswith('.rc'):
1683 _rcpath.append(os.path.join(p, f))
1683 _rcpath.append(os.path.join(p, f))
1684 else:
1684 else:
1685 _rcpath.append(p)
1685 _rcpath.append(p)
1686 else:
1686 else:
1687 _rcpath = os_rcpath()
1687 _rcpath = os_rcpath()
1688 return _rcpath
1688 return _rcpath
1689
1689
1690 def bytecount(nbytes):
1690 def bytecount(nbytes):
1691 '''return byte count formatted as readable string, with units'''
1691 '''return byte count formatted as readable string, with units'''
1692
1692
1693 units = (
1693 units = (
1694 (100, 1<<30, _('%.0f GB')),
1694 (100, 1<<30, _('%.0f GB')),
1695 (10, 1<<30, _('%.1f GB')),
1695 (10, 1<<30, _('%.1f GB')),
1696 (1, 1<<30, _('%.2f GB')),
1696 (1, 1<<30, _('%.2f GB')),
1697 (100, 1<<20, _('%.0f MB')),
1697 (100, 1<<20, _('%.0f MB')),
1698 (10, 1<<20, _('%.1f MB')),
1698 (10, 1<<20, _('%.1f MB')),
1699 (1, 1<<20, _('%.2f MB')),
1699 (1, 1<<20, _('%.2f MB')),
1700 (100, 1<<10, _('%.0f KB')),
1700 (100, 1<<10, _('%.0f KB')),
1701 (10, 1<<10, _('%.1f KB')),
1701 (10, 1<<10, _('%.1f KB')),
1702 (1, 1<<10, _('%.2f KB')),
1702 (1, 1<<10, _('%.2f KB')),
1703 (1, 1, _('%.0f bytes')),
1703 (1, 1, _('%.0f bytes')),
1704 )
1704 )
1705
1705
1706 for multiplier, divisor, format in units:
1706 for multiplier, divisor, format in units:
1707 if nbytes >= divisor * multiplier:
1707 if nbytes >= divisor * multiplier:
1708 return format % (nbytes / float(divisor))
1708 return format % (nbytes / float(divisor))
1709 return units[-1][2] % nbytes
1709 return units[-1][2] % nbytes
1710
1710
1711 def drop_scheme(scheme, path):
1711 def drop_scheme(scheme, path):
1712 sc = scheme + ':'
1712 sc = scheme + ':'
1713 if path.startswith(sc):
1713 if path.startswith(sc):
1714 path = path[len(sc):]
1714 path = path[len(sc):]
1715 if path.startswith('//'):
1715 if path.startswith('//'):
1716 path = path[2:]
1716 path = path[2:]
1717 return path
1717 return path
1718
1718
1719 def uirepr(s):
1719 def uirepr(s):
1720 # Avoid double backslash in Windows path repr()
1720 # Avoid double backslash in Windows path repr()
1721 return repr(s).replace('\\\\', '\\')
1721 return repr(s).replace('\\\\', '\\')
1722
1722
1723 def hidepassword(url):
1723 def hidepassword(url):
1724 '''hide user credential in a url string'''
1724 '''hide user credential in a url string'''
1725 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1725 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1726 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1726 netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc)
1727 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1727 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1728
1728
1729 def removeauth(url):
1729 def removeauth(url):
1730 '''remove all authentication information from a url string'''
1730 '''remove all authentication information from a url string'''
1731 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1731 scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
1732 netloc = netloc[netloc.find('@')+1:]
1732 netloc = netloc[netloc.find('@')+1:]
1733 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
1733 return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
@@ -1,73 +1,95 b''
1 #!/bin/sh
1 #!/bin/sh
2
2
3 "$TESTDIR/hghave" cvs cvsps || exit 80
3 "$TESTDIR/hghave" cvs cvsps || exit 80
4
4
5 cvscall()
5 cvscall()
6 {
6 {
7 cvs -f $@
7 cvs -f $@
8 }
8 }
9
9
10 echo "[extensions]" >> $HGRCPATH
10 echo "[extensions]" >> $HGRCPATH
11 echo "convert = " >> $HGRCPATH
11 echo "convert = " >> $HGRCPATH
12
12
13 echo % create cvs repository
13 echo % create cvs repository
14 mkdir cvsrepo
14 mkdir cvsrepo
15 cd cvsrepo
15 cd cvsrepo
16 export CVSROOT=`pwd`
16 export CVSROOT=`pwd`
17 export CVS_OPTIONS=-f
17 export CVS_OPTIONS=-f
18 cd ..
18 cd ..
19
19
20 cvscall -q -d "$CVSROOT" init
20 cvscall -q -d "$CVSROOT" init
21
21
22 echo % create source directory
22 echo % create source directory
23 mkdir src-temp
23 mkdir src-temp
24 cd src-temp
24 cd src-temp
25 echo a > a
25 echo a > a
26 mkdir b
26 mkdir b
27 cd b
27 cd b
28 echo c > c
28 echo c > c
29 cd ..
29 cd ..
30
30
31 echo % import source directory
31 echo % import source directory
32 cvscall -q import -m import src INITIAL start
32 cvscall -q import -m import src INITIAL start
33 cd ..
33 cd ..
34
34
35 echo % checkout source directory
35 echo % checkout source directory
36 cvscall -q checkout src
36 cvscall -q checkout src
37
37
38 echo % commit a new revision changing b/c
38 echo % commit a new revision changing b/c
39 cd src
39 cd src
40 echo c >> b/c
40 echo c >> b/c
41 cvscall -q commit -mci0 . | grep '<--' |\
41 cvscall -q commit -mci0 . | grep '<--' |\
42 sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
42 sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
43 cd ..
43 cd ..
44
44
45 echo % convert fresh repo
45 echo % convert fresh repo
46 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
46 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
47 cat src-hg/a
47 cat src-hg/a
48 cat src-hg/b/c
48 cat src-hg/b/c
49
49
50 echo % convert fresh repo with --filemap
50 echo % convert fresh repo with --filemap
51 echo include b/c > filemap
51 echo include b/c > filemap
52 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
52 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
53 cat src-hg/b/c
53 cat src-hg/b/c
54 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
54 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
55
55
56 echo % commit new file revisions
56 echo % commit new file revisions
57 cd src
57 cd src
58 echo a >> a
58 echo a >> a
59 echo c >> b/c
59 echo c >> b/c
60 cvscall -q commit -mci1 . | grep '<--' |\
60 cvscall -q commit -mci1 . | grep '<--' |\
61 sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
61 sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
62 cd ..
62 cd ..
63
63
64 echo % convert again
64 echo % convert again
65 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
65 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
66 cat src-hg/a
66 cat src-hg/a
67 cat src-hg/b/c
67 cat src-hg/b/c
68
68
69 echo % convert again with --filemap
69 echo % convert again with --filemap
70 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
70 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
71 cat src-hg/b/c
71 cat src-hg/b/c
72 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
72 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
73
73
74 echo % commit branch
75 cd src
76 cvs -q update -r1.1 b/c
77 cvs -q tag -b branch
78 cvs -q update -r branch
79 echo d >> b/c
80 cvs -q commit -mci2 . | grep '<--' |\
81 sed -e 's:.*src/\(.*\),v.*:checking in src/\1,v:g'
82 cd ..
83
84 echo % convert again
85 hg convert src src-hg | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
86 cat src-hg/a
87 cat src-hg/b/c
88
89 echo % convert again with --filemap
90 hg convert --filemap filemap src src-filemap | sed -e 's/connecting to.*cvsrepo/connecting to cvsrepo/g'
91 cat src-hg/b/c
92 hg -R src-filemap log --template '#rev# #desc# files: #files#\n'
93
94 echo "graphlog = " >> $HGRCPATH
95 hg -R src-hg glog --template '#rev# (#branches#) #desc# files: #files#\n'
@@ -1,69 +1,109 b''
1 % create cvs repository
1 % create cvs repository
2 % create source directory
2 % create source directory
3 % import source directory
3 % import source directory
4 N src/a
4 N src/a
5 N src/b/c
5 N src/b/c
6
6
7 No conflicts created by this import
7 No conflicts created by this import
8
8
9 % checkout source directory
9 % checkout source directory
10 U src/a
10 U src/a
11 U src/b/c
11 U src/b/c
12 % commit a new revision changing b/c
12 % commit a new revision changing b/c
13 checking in src/b/c,v
13 checking in src/b/c,v
14 % convert fresh repo
14 % convert fresh repo
15 initializing destination src-hg repository
15 initializing destination src-hg repository
16 connecting to cvsrepo
16 connecting to cvsrepo
17 scanning source...
17 scanning source...
18 sorting...
18 sorting...
19 converting...
19 converting...
20 2 Initial revision
20 2 Initial revision
21 1 import
21 1 import
22 0 ci0
22 0 ci0
23 updating tags
23 updating tags
24 a
24 a
25 c
25 c
26 c
26 c
27 % convert fresh repo with --filemap
27 % convert fresh repo with --filemap
28 initializing destination src-filemap repository
28 initializing destination src-filemap repository
29 connecting to cvsrepo
29 connecting to cvsrepo
30 scanning source...
30 scanning source...
31 sorting...
31 sorting...
32 converting...
32 converting...
33 2 Initial revision
33 2 Initial revision
34 1 import
34 1 import
35 rolling back last transaction
35 rolling back last transaction
36 0 ci0
36 0 ci0
37 updating tags
37 updating tags
38 c
38 c
39 c
39 c
40 2 update tags files: .hgtags
40 2 update tags files: .hgtags
41 1 ci0 files: b/c
41 1 ci0 files: b/c
42 0 Initial revision files: b/c
42 0 Initial revision files: b/c
43 % commit new file revisions
43 % commit new file revisions
44 checking in src/a,v
44 checking in src/a,v
45 checking in src/b/c,v
45 checking in src/b/c,v
46 % convert again
46 % convert again
47 connecting to cvsrepo
47 connecting to cvsrepo
48 scanning source...
48 scanning source...
49 sorting...
49 sorting...
50 converting...
50 converting...
51 0 ci1
51 0 ci1
52 a
52 a
53 a
53 a
54 c
54 c
55 c
55 c
56 c
56 c
57 % convert again with --filemap
57 % convert again with --filemap
58 connecting to cvsrepo
58 connecting to cvsrepo
59 scanning source...
59 scanning source...
60 sorting...
60 sorting...
61 converting...
61 converting...
62 0 ci1
62 0 ci1
63 c
63 c
64 c
64 c
65 c
65 c
66 3 ci1 files: b/c
66 3 ci1 files: b/c
67 2 update tags files: .hgtags
67 2 update tags files: .hgtags
68 1 ci0 files: b/c
68 1 ci0 files: b/c
69 0 Initial revision files: b/c
69 0 Initial revision files: b/c
70 % commit branch
71 U b/c
72 T a
73 T b/c
74 checking in src/b/c,v
75 % convert again
76 connecting to cvsrepo
77 scanning source...
78 sorting...
79 converting...
80 0 ci2
81 a
82 a
83 c
84 d
85 % convert again with --filemap
86 connecting to cvsrepo
87 scanning source...
88 sorting...
89 converting...
90 0 ci2
91 c
92 d
93 4 ci2 files: b/c
94 3 ci1 files: b/c
95 2 update tags files: .hgtags
96 1 ci0 files: b/c
97 0 Initial revision files: b/c
98 o 5 (branch) ci2 files: b/c
99 |
100 | o 4 () ci1 files: a b/c
101 | |
102 | o 3 () update tags files: .hgtags
103 | |
104 | o 2 () ci0 files: b/c
105 |/
106 | o 1 (INITIAL) import files:
107 |/
108 o 0 () Initial revision files: a b/c
109
General Comments 0
You need to be logged in to leave comments. Login now