##// END OF EJS Templates
i18n: mark strings for translation in convert extension
Martin Geisler -
r6956:12472a24 default
parent child Browse files
Show More
@@ -1,349 +1,349 b''
1 1 # common code for the convert extension
2 2 import base64, errno
3 3 import os
4 4 import cPickle as pickle
5 5 from mercurial import util
6 6 from mercurial.i18n import _
7 7
8 8 def encodeargs(args):
9 9 def encodearg(s):
10 10 lines = base64.encodestring(s)
11 11 lines = [l.splitlines()[0] for l in lines]
12 12 return ''.join(lines)
13 13
14 14 s = pickle.dumps(args)
15 15 return encodearg(s)
16 16
17 17 def decodeargs(s):
18 18 s = base64.decodestring(s)
19 19 return pickle.loads(s)
20 20
21 21 class MissingTool(Exception): pass
22 22
23 23 def checktool(exe, name=None, abort=True):
24 24 name = name or exe
25 25 if not util.find_exe(exe):
26 26 exc = abort and util.Abort or MissingTool
27 27 raise exc(_('cannot find required "%s" tool') % name)
28 28
29 29 class NoRepo(Exception): pass
30 30
31 31 SKIPREV = 'SKIP'
32 32
33 33 class commit(object):
34 34 def __init__(self, author, date, desc, parents, branch=None, rev=None,
35 35 extra={}):
36 36 self.author = author or 'unknown'
37 37 self.date = date or '0 0'
38 38 self.desc = desc
39 39 self.parents = parents
40 40 self.branch = branch
41 41 self.rev = rev
42 42 self.extra = extra
43 43
44 44 class converter_source(object):
45 45 """Conversion source interface"""
46 46
47 47 def __init__(self, ui, path=None, rev=None):
48 48 """Initialize conversion source (or raise NoRepo("message")
49 49 exception if path is not a valid repository)"""
50 50 self.ui = ui
51 51 self.path = path
52 52 self.rev = rev
53 53
54 54 self.encoding = 'utf-8'
55 55
56 56 def before(self):
57 57 pass
58 58
59 59 def after(self):
60 60 pass
61 61
62 62 def setrevmap(self, revmap):
63 63 """set the map of already-converted revisions"""
64 64 pass
65 65
66 66 def getheads(self):
67 67 """Return a list of this repository's heads"""
68 68 raise NotImplementedError()
69 69
70 70 def getfile(self, name, rev):
71 71 """Return file contents as a string"""
72 72 raise NotImplementedError()
73 73
74 74 def getmode(self, name, rev):
75 75 """Return file mode, eg. '', 'x', or 'l'"""
76 76 raise NotImplementedError()
77 77
78 78 def getchanges(self, version):
79 79 """Returns a tuple of (files, copies)
80 80 Files is a sorted list of (filename, id) tuples for all files changed
81 81 in version, where id is the source revision id of the file.
82 82
83 83 copies is a dictionary of dest: source
84 84 """
85 85 raise NotImplementedError()
86 86
87 87 def getcommit(self, version):
88 88 """Return the commit object for version"""
89 89 raise NotImplementedError()
90 90
91 91 def gettags(self):
92 92 """Return the tags as a dictionary of name: revision"""
93 93 raise NotImplementedError()
94 94
95 95 def recode(self, s, encoding=None):
96 96 if not encoding:
97 97 encoding = self.encoding or 'utf-8'
98 98
99 99 if isinstance(s, unicode):
100 100 return s.encode("utf-8")
101 101 try:
102 102 return s.decode(encoding).encode("utf-8")
103 103 except:
104 104 try:
105 105 return s.decode("latin-1").encode("utf-8")
106 106 except:
107 107 return s.decode(encoding, "replace").encode("utf-8")
108 108
109 109 def getchangedfiles(self, rev, i):
110 110 """Return the files changed by rev compared to parent[i].
111 111
112 112 i is an index selecting one of the parents of rev. The return
113 113 value should be the list of files that are different in rev and
114 114 this parent.
115 115
116 116 If rev has no parents, i is None.
117 117
118 118 This function is only needed to support --filemap
119 119 """
120 120 raise NotImplementedError()
121 121
122 122 def converted(self, rev, sinkrev):
123 123 '''Notify the source that a revision has been converted.'''
124 124 pass
125 125
126 126
127 127 class converter_sink(object):
128 128 """Conversion sink (target) interface"""
129 129
130 130 def __init__(self, ui, path):
131 131 """Initialize conversion sink (or raise NoRepo("message")
132 132 exception if path is not a valid repository)
133 133
134 134 created is a list of paths to remove if a fatal error occurs
135 135 later"""
136 136 self.ui = ui
137 137 self.path = path
138 138 self.created = []
139 139
140 140 def getheads(self):
141 141 """Return a list of this repository's heads"""
142 142 raise NotImplementedError()
143 143
144 144 def revmapfile(self):
145 145 """Path to a file that will contain lines
146 146 source_rev_id sink_rev_id
147 147 mapping equivalent revision identifiers for each system."""
148 148 raise NotImplementedError()
149 149
150 150 def authorfile(self):
151 151 """Path to a file that will contain lines
152 152 srcauthor=dstauthor
153 153 mapping equivalent authors identifiers for each system."""
154 154 return None
155 155
156 156 def putcommit(self, files, copies, parents, commit, source):
157 157 """Create a revision with all changed files listed in 'files'
158 158 and having listed parents. 'commit' is a commit object containing
159 159 at a minimum the author, date, and message for this changeset.
160 160 'files' is a list of (path, version) tuples, 'copies'is a dictionary
161 161 mapping destinations to sources, and 'source' is the source repository.
162 162 Only getfile() and getmode() should be called on 'source'.
163 163
164 164 Note that the sink repository is not told to update itself to
165 165 a particular revision (or even what that revision would be)
166 166 before it receives the file data.
167 167 """
168 168 raise NotImplementedError()
169 169
170 170 def puttags(self, tags):
171 171 """Put tags into sink.
172 172 tags: {tagname: sink_rev_id, ...}"""
173 173 raise NotImplementedError()
174 174
175 175 def setbranch(self, branch, pbranches):
176 176 """Set the current branch name. Called before the first putcommit
177 177 on the branch.
178 178 branch: branch name for subsequent commits
179 179 pbranches: (converted parent revision, parent branch) tuples"""
180 180 pass
181 181
182 182 def setfilemapmode(self, active):
183 183 """Tell the destination that we're using a filemap
184 184
185 185 Some converter_sources (svn in particular) can claim that a file
186 186 was changed in a revision, even if there was no change. This method
187 187 tells the destination that we're using a filemap and that it should
188 188 filter empty revisions.
189 189 """
190 190 pass
191 191
192 192 def before(self):
193 193 pass
194 194
195 195 def after(self):
196 196 pass
197 197
198 198
199 199 class commandline(object):
200 200 def __init__(self, ui, command):
201 201 self.ui = ui
202 202 self.command = command
203 203
204 204 def prerun(self):
205 205 pass
206 206
207 207 def postrun(self):
208 208 pass
209 209
210 210 def _cmdline(self, cmd, *args, **kwargs):
211 211 cmdline = [self.command, cmd] + list(args)
212 212 for k, v in kwargs.iteritems():
213 213 if len(k) == 1:
214 214 cmdline.append('-' + k)
215 215 else:
216 216 cmdline.append('--' + k.replace('_', '-'))
217 217 try:
218 218 if len(k) == 1:
219 219 cmdline.append('' + v)
220 220 else:
221 221 cmdline[-1] += '=' + v
222 222 except TypeError:
223 223 pass
224 224 cmdline = [util.shellquote(arg) for arg in cmdline]
225 225 cmdline += ['2>', util.nulldev, '<', util.nulldev]
226 226 cmdline = ' '.join(cmdline)
227 227 return cmdline
228 228
229 229 def _run(self, cmd, *args, **kwargs):
230 230 cmdline = self._cmdline(cmd, *args, **kwargs)
231 self.ui.debug('running: %s\n' % (cmdline,))
231 self.ui.debug(_('running: %s\n') % (cmdline,))
232 232 self.prerun()
233 233 try:
234 234 return util.popen(cmdline)
235 235 finally:
236 236 self.postrun()
237 237
238 238 def run(self, cmd, *args, **kwargs):
239 239 fp = self._run(cmd, *args, **kwargs)
240 240 output = fp.read()
241 241 self.ui.debug(output)
242 242 return output, fp.close()
243 243
244 244 def runlines(self, cmd, *args, **kwargs):
245 245 fp = self._run(cmd, *args, **kwargs)
246 246 output = fp.readlines()
247 247 self.ui.debug(''.join(output))
248 248 return output, fp.close()
249 249
250 250 def checkexit(self, status, output=''):
251 251 if status:
252 252 if output:
253 253 self.ui.warn(_('%s error:\n') % self.command)
254 254 self.ui.warn(output)
255 255 msg = util.explain_exit(status)[0]
256 256 raise util.Abort(_('%s %s') % (self.command, msg))
257 257
258 258 def run0(self, cmd, *args, **kwargs):
259 259 output, status = self.run(cmd, *args, **kwargs)
260 260 self.checkexit(status, output)
261 261 return output
262 262
263 263 def runlines0(self, cmd, *args, **kwargs):
264 264 output, status = self.runlines(cmd, *args, **kwargs)
265 265 self.checkexit(status, ''.join(output))
266 266 return output
267 267
268 268 def getargmax(self):
269 269 if '_argmax' in self.__dict__:
270 270 return self._argmax
271 271
272 272 # POSIX requires at least 4096 bytes for ARG_MAX
273 273 self._argmax = 4096
274 274 try:
275 275 self._argmax = os.sysconf("SC_ARG_MAX")
276 276 except:
277 277 pass
278 278
279 279 # Windows shells impose their own limits on command line length,
280 280 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
281 281 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
282 282 # details about cmd.exe limitations.
283 283
284 284 # Since ARG_MAX is for command line _and_ environment, lower our limit
285 285 # (and make happy Windows shells while doing this).
286 286
287 287 self._argmax = self._argmax/2 - 1
288 288 return self._argmax
289 289
290 290 def limit_arglist(self, arglist, cmd, *args, **kwargs):
291 291 limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
292 292 bytes = 0
293 293 fl = []
294 294 for fn in arglist:
295 295 b = len(fn) + 3
296 296 if bytes + b < limit or len(fl) == 0:
297 297 fl.append(fn)
298 298 bytes += b
299 299 else:
300 300 yield fl
301 301 fl = [fn]
302 302 bytes = b
303 303 if fl:
304 304 yield fl
305 305
306 306 def xargs(self, arglist, cmd, *args, **kwargs):
307 307 for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
308 308 self.run0(cmd, *(list(args) + l), **kwargs)
309 309
310 310 class mapfile(dict):
311 311 def __init__(self, ui, path):
312 312 super(mapfile, self).__init__()
313 313 self.ui = ui
314 314 self.path = path
315 315 self.fp = None
316 316 self.order = []
317 317 self._read()
318 318
319 319 def _read(self):
320 320 if self.path is None:
321 321 return
322 322 try:
323 323 fp = open(self.path, 'r')
324 324 except IOError, err:
325 325 if err.errno != errno.ENOENT:
326 326 raise
327 327 return
328 328 for line in fp:
329 329 key, value = line[:-1].split(' ', 1)
330 330 if key not in self:
331 331 self.order.append(key)
332 332 super(mapfile, self).__setitem__(key, value)
333 333 fp.close()
334 334
335 335 def __setitem__(self, key, value):
336 336 if self.fp is None:
337 337 try:
338 338 self.fp = open(self.path, 'a')
339 339 except IOError, err:
340 340 raise util.Abort(_('could not open map file %r: %s') %
341 341 (self.path, err.strerror))
342 342 self.fp.write('%s %s\n' % (key, value))
343 343 self.fp.flush()
344 344 super(mapfile, self).__setitem__(key, value)
345 345
346 346 def close(self):
347 347 if self.fp:
348 348 self.fp.close()
349 349 self.fp = None
@@ -1,337 +1,337 b''
1 1 # convcmd - convert extension commands definition
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms
6 6 # of the GNU General Public License, incorporated herein by reference.
7 7
8 8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 9 from cvs import convert_cvs
10 10 from darcs import darcs_source
11 11 from git import convert_git
12 12 from hg import mercurial_source, mercurial_sink
13 13 from subversion import debugsvnlog, svn_source, svn_sink
14 14 from monotone import monotone_source
15 15 from gnuarch import gnuarch_source
16 16 import filemap
17 17
18 18 import os, shutil
19 19 from mercurial import hg, util
20 20 from mercurial.i18n import _
21 21
22 22 orig_encoding = 'ascii'
23 23
24 24 def recode(s):
25 25 if isinstance(s, unicode):
26 26 return s.encode(orig_encoding, 'replace')
27 27 else:
28 28 return s.decode('utf-8').encode(orig_encoding, 'replace')
29 29
30 30 source_converters = [
31 31 ('cvs', convert_cvs),
32 32 ('git', convert_git),
33 33 ('svn', svn_source),
34 34 ('hg', mercurial_source),
35 35 ('darcs', darcs_source),
36 36 ('mtn', monotone_source),
37 37 ('gnuarch', gnuarch_source),
38 38 ]
39 39
40 40 sink_converters = [
41 41 ('hg', mercurial_sink),
42 42 ('svn', svn_sink),
43 43 ]
44 44
45 45 def convertsource(ui, path, type, rev):
46 46 exceptions = []
47 47 for name, source in source_converters:
48 48 try:
49 49 if not type or name == type:
50 50 return source(ui, path, rev)
51 51 except (NoRepo, MissingTool), inst:
52 52 exceptions.append(inst)
53 53 if not ui.quiet:
54 54 for inst in exceptions:
55 55 ui.write("%s\n" % inst)
56 56 raise util.Abort(_('%s: unknown repository type') % path)
57 57
58 58 def convertsink(ui, path, type):
59 59 for name, sink in sink_converters:
60 60 try:
61 61 if not type or name == type:
62 62 return sink(ui, path)
63 63 except NoRepo, inst:
64 64 ui.note(_("convert: %s\n") % inst)
65 65 raise util.Abort(_('%s: unknown repository type') % path)
66 66
67 67 class converter(object):
68 68 def __init__(self, ui, source, dest, revmapfile, opts):
69 69
70 70 self.source = source
71 71 self.dest = dest
72 72 self.ui = ui
73 73 self.opts = opts
74 74 self.commitcache = {}
75 75 self.authors = {}
76 76 self.authorfile = None
77 77
78 78 self.map = mapfile(ui, revmapfile)
79 79
80 80 # Read first the dst author map if any
81 81 authorfile = self.dest.authorfile()
82 82 if authorfile and os.path.exists(authorfile):
83 83 self.readauthormap(authorfile)
84 84 # Extend/Override with new author map if necessary
85 85 if opts.get('authors'):
86 86 self.readauthormap(opts.get('authors'))
87 87 self.authorfile = self.dest.authorfile()
88 88
89 89 self.splicemap = mapfile(ui, opts.get('splicemap'))
90 90
91 91 def walktree(self, heads):
92 92 '''Return a mapping that identifies the uncommitted parents of every
93 93 uncommitted changeset.'''
94 94 visit = heads
95 95 known = {}
96 96 parents = {}
97 97 while visit:
98 98 n = visit.pop(0)
99 99 if n in known or n in self.map: continue
100 100 known[n] = 1
101 101 commit = self.cachecommit(n)
102 102 parents[n] = []
103 103 for p in commit.parents:
104 104 parents[n].append(p)
105 105 visit.append(p)
106 106
107 107 return parents
108 108
109 109 def toposort(self, parents):
110 110 '''Return an ordering such that every uncommitted changeset is
111 111 preceeded by all its uncommitted ancestors.'''
112 112 visit = parents.keys()
113 113 seen = {}
114 114 children = {}
115 115 actives = []
116 116
117 117 while visit:
118 118 n = visit.pop(0)
119 119 if n in seen: continue
120 120 seen[n] = 1
121 121 # Ensure that nodes without parents are present in the 'children'
122 122 # mapping.
123 123 children.setdefault(n, [])
124 124 hasparent = False
125 125 for p in parents[n]:
126 126 if not p in self.map:
127 127 visit.append(p)
128 128 hasparent = True
129 129 children.setdefault(p, []).append(n)
130 130 if not hasparent:
131 131 actives.append(n)
132 132
133 133 del seen
134 134 del visit
135 135
136 136 if self.opts.get('datesort'):
137 137 dates = {}
138 138 def getdate(n):
139 139 if n not in dates:
140 140 dates[n] = util.parsedate(self.commitcache[n].date)
141 141 return dates[n]
142 142
143 143 def picknext(nodes):
144 144 return min([(getdate(n), n) for n in nodes])[1]
145 145 else:
146 146 prev = [None]
147 147 def picknext(nodes):
148 148 # Return the first eligible child of the previously converted
149 149 # revision, or any of them.
150 150 next = nodes[0]
151 151 for n in nodes:
152 152 if prev[0] in parents[n]:
153 153 next = n
154 154 break
155 155 prev[0] = next
156 156 return next
157 157
158 158 s = []
159 159 pendings = {}
160 160 while actives:
161 161 n = picknext(actives)
162 162 actives.remove(n)
163 163 s.append(n)
164 164
165 165 # Update dependents list
166 166 for c in children.get(n, []):
167 167 if c not in pendings:
168 168 pendings[c] = [p for p in parents[c] if p not in self.map]
169 169 try:
170 170 pendings[c].remove(n)
171 171 except ValueError:
172 172 raise util.Abort(_('cycle detected between %s and %s')
173 173 % (recode(c), recode(n)))
174 174 if not pendings[c]:
175 175 # Parents are converted, node is eligible
176 176 actives.insert(0, c)
177 177 pendings[c] = None
178 178
179 179 if len(s) != len(parents):
180 180 raise util.Abort(_("not all revisions were sorted"))
181 181
182 182 return s
183 183
184 184 def writeauthormap(self):
185 185 authorfile = self.authorfile
186 186 if authorfile:
187 self.ui.status('Writing author map file %s\n' % authorfile)
187 self.ui.status(_('Writing author map file %s\n') % authorfile)
188 188 ofile = open(authorfile, 'w+')
189 189 for author in self.authors:
190 190 ofile.write("%s=%s\n" % (author, self.authors[author]))
191 191 ofile.close()
192 192
193 193 def readauthormap(self, authorfile):
194 194 afile = open(authorfile, 'r')
195 195 for line in afile:
196 196 if line.strip() == '':
197 197 continue
198 198 try:
199 199 srcauthor, dstauthor = line.split('=', 1)
200 200 srcauthor = srcauthor.strip()
201 201 dstauthor = dstauthor.strip()
202 202 if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
203 203 self.ui.status(
204 'Overriding mapping for author %s, was %s, will be %s\n'
204 _('Overriding mapping for author %s, was %s, will be %s\n')
205 205 % (srcauthor, self.authors[srcauthor], dstauthor))
206 206 else:
207 self.ui.debug('Mapping author %s to %s\n'
207 self.ui.debug(_('Mapping author %s to %s\n')
208 208 % (srcauthor, dstauthor))
209 209 self.authors[srcauthor] = dstauthor
210 210 except IndexError:
211 211 self.ui.warn(
212 'Ignoring bad line in author map file %s: %s\n'
212 _('Ignoring bad line in author map file %s: %s\n')
213 213 % (authorfile, line.rstrip()))
214 214 afile.close()
215 215
216 216 def cachecommit(self, rev):
217 217 commit = self.source.getcommit(rev)
218 218 commit.author = self.authors.get(commit.author, commit.author)
219 219 self.commitcache[rev] = commit
220 220 return commit
221 221
222 222 def copy(self, rev):
223 223 commit = self.commitcache[rev]
224 224
225 225 changes = self.source.getchanges(rev)
226 226 if isinstance(changes, basestring):
227 227 if changes == SKIPREV:
228 228 dest = SKIPREV
229 229 else:
230 230 dest = self.map[changes]
231 231 self.map[rev] = dest
232 232 return
233 233 files, copies = changes
234 234 pbranches = []
235 235 if commit.parents:
236 236 for prev in commit.parents:
237 237 if prev not in self.commitcache:
238 238 self.cachecommit(prev)
239 239 pbranches.append((self.map[prev],
240 240 self.commitcache[prev].branch))
241 241 self.dest.setbranch(commit.branch, pbranches)
242 242 try:
243 243 parents = self.splicemap[rev].replace(',', ' ').split()
244 self.ui.status('spliced in %s as parents of %s\n' %
244 self.ui.status(_('spliced in %s as parents of %s\n') %
245 245 (parents, rev))
246 246 parents = [self.map.get(p, p) for p in parents]
247 247 except KeyError:
248 248 parents = [b[0] for b in pbranches]
249 249 newnode = self.dest.putcommit(files, copies, parents, commit, self.source)
250 250 self.source.converted(rev, newnode)
251 251 self.map[rev] = newnode
252 252
253 253 def convert(self):
254 254
255 255 try:
256 256 self.source.before()
257 257 self.dest.before()
258 258 self.source.setrevmap(self.map)
259 self.ui.status("scanning source...\n")
259 self.ui.status(_("scanning source...\n"))
260 260 heads = self.source.getheads()
261 261 parents = self.walktree(heads)
262 self.ui.status("sorting...\n")
262 self.ui.status(_("sorting...\n"))
263 263 t = self.toposort(parents)
264 264 num = len(t)
265 265 c = None
266 266
267 self.ui.status("converting...\n")
267 self.ui.status(_("converting...\n"))
268 268 for c in t:
269 269 num -= 1
270 270 desc = self.commitcache[c].desc
271 271 if "\n" in desc:
272 272 desc = desc.splitlines()[0]
273 273 # convert log message to local encoding without using
274 274 # tolocal() because util._encoding conver() use it as
275 275 # 'utf-8'
276 276 self.ui.status("%d %s\n" % (num, recode(desc)))
277 277 self.ui.note(_("source: %s\n") % recode(c))
278 278 self.copy(c)
279 279
280 280 tags = self.source.gettags()
281 281 ctags = {}
282 282 for k in tags:
283 283 v = tags[k]
284 284 if self.map.get(v, SKIPREV) != SKIPREV:
285 285 ctags[k] = self.map[v]
286 286
287 287 if c and ctags:
288 288 nrev = self.dest.puttags(ctags)
289 289 # write another hash correspondence to override the previous
290 290 # one so we don't end up with extra tag heads
291 291 if nrev:
292 292 self.map[c] = nrev
293 293
294 294 self.writeauthormap()
295 295 finally:
296 296 self.cleanup()
297 297
298 298 def cleanup(self):
299 299 try:
300 300 self.dest.after()
301 301 finally:
302 302 self.source.after()
303 303 self.map.close()
304 304
305 305 def convert(ui, src, dest=None, revmapfile=None, **opts):
306 306 global orig_encoding
307 307 orig_encoding = util._encoding
308 308 util._encoding = 'UTF-8'
309 309
310 310 if not dest:
311 311 dest = hg.defaultdest(src) + "-hg"
312 ui.status("assuming destination %s\n" % dest)
312 ui.status(_("assuming destination %s\n") % dest)
313 313
314 314 destc = convertsink(ui, dest, opts.get('dest_type'))
315 315
316 316 try:
317 317 srcc = convertsource(ui, src, opts.get('source_type'),
318 318 opts.get('rev'))
319 319 except Exception:
320 320 for path in destc.created:
321 321 shutil.rmtree(path, True)
322 322 raise
323 323
324 324 fmap = opts.get('filemap')
325 325 if fmap:
326 326 srcc = filemap.filemap_source(ui, srcc, fmap)
327 327 destc.setfilemapmode(True)
328 328
329 329 if not revmapfile:
330 330 try:
331 331 revmapfile = destc.revmapfile()
332 332 except:
333 333 revmapfile = os.path.join(destc, "map")
334 334
335 335 c = converter(ui, srcc, destc, revmapfile, opts)
336 336 c.convert()
337 337
@@ -1,349 +1,349 b''
1 1 # CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 2
3 3 import os, locale, re, socket
4 4 from cStringIO import StringIO
5 5 from mercurial import util
6 6 from mercurial.i18n import _
7 7
8 8 from common import NoRepo, commit, converter_source, checktool
9 9 import cvsps
10 10
11 11 class convert_cvs(converter_source):
12 12 def __init__(self, ui, path, rev=None):
13 13 super(convert_cvs, self).__init__(ui, path, rev=rev)
14 14
15 15 cvs = os.path.join(path, "CVS")
16 16 if not os.path.exists(cvs):
17 17 raise NoRepo("%s does not look like a CVS checkout" % path)
18 18
19 19 checktool('cvs')
20 20 self.cmd = ui.config('convert', 'cvsps', 'cvsps -A -u --cvs-direct -q')
21 21 cvspsexe = self.cmd.split(None, 1)[0]
22 22 self.builtin = cvspsexe == 'builtin'
23 23
24 24 if not self.builtin:
25 25 checktool(cvspsexe)
26 26
27 27 self.changeset = {}
28 28 self.files = {}
29 29 self.tags = {}
30 30 self.lastbranch = {}
31 31 self.parent = {}
32 32 self.socket = None
33 33 self.cvsroot = file(os.path.join(cvs, "Root")).read()[:-1]
34 34 self.cvsrepo = file(os.path.join(cvs, "Repository")).read()[:-1]
35 35 self.encoding = locale.getpreferredencoding()
36 36
37 37 self._parse(ui)
38 38 self._connect()
39 39
40 40 def _parse(self, ui):
41 41 if self.changeset:
42 42 return
43 43
44 44 maxrev = 0
45 45 cmd = self.cmd
46 46 if self.rev:
47 47 # TODO: handle tags
48 48 try:
49 49 # patchset number?
50 50 maxrev = int(self.rev)
51 51 except ValueError:
52 52 try:
53 53 # date
54 54 util.parsedate(self.rev, ['%Y/%m/%d %H:%M:%S'])
55 55 cmd = '%s -d "1970/01/01 00:00:01" -d "%s"' % (cmd, self.rev)
56 56 except util.Abort:
57 raise util.Abort('revision %s is not a patchset number or date' % self.rev)
57 raise util.Abort(_('revision %s is not a patchset number or date') % self.rev)
58 58
59 59 d = os.getcwd()
60 60 try:
61 61 os.chdir(self.path)
62 62 id = None
63 63 state = 0
64 64 filerevids = {}
65 65
66 66 if self.builtin:
67 67 # builtin cvsps code
68 68 ui.status(_('using builtin cvsps\n'))
69 69
70 70 db = cvsps.createlog(ui, cache='update')
71 71 db = cvsps.createchangeset(ui, db,
72 72 fuzz=int(ui.config('convert', 'cvsps.fuzz', 60)),
73 73 mergeto=ui.config('convert', 'cvsps.mergeto', None),
74 74 mergefrom=ui.config('convert', 'cvsps.mergefrom', None))
75 75
76 76 for cs in db:
77 77 if maxrev and cs.id>maxrev:
78 78 break
79 79 id = str(cs.id)
80 80 cs.author = self.recode(cs.author)
81 81 self.lastbranch[cs.branch] = id
82 82 cs.comment = self.recode(cs.comment)
83 83 date = util.datestr(cs.date)
84 84 self.tags.update(dict.fromkeys(cs.tags, id))
85 85
86 86 files = {}
87 87 for f in cs.entries:
88 88 files[f.file] = "%s%s" % ('.'.join([str(x) for x in f.revision]),
89 89 ['', '(DEAD)'][f.dead])
90 90
91 91 # add current commit to set
92 92 c = commit(author=cs.author, date=date,
93 93 parents=[str(p.id) for p in cs.parents],
94 94 desc=cs.comment, branch=cs.branch or '')
95 95 self.changeset[id] = c
96 96 self.files[id] = files
97 97 else:
98 98 # external cvsps
99 99 for l in util.popen(cmd):
100 100 if state == 0: # header
101 101 if l.startswith("PatchSet"):
102 102 id = l[9:-2]
103 103 if maxrev and int(id) > maxrev:
104 104 # ignore everything
105 105 state = 3
106 106 elif l.startswith("Date"):
107 107 date = util.parsedate(l[6:-1], ["%Y/%m/%d %H:%M:%S"])
108 108 date = util.datestr(date)
109 109 elif l.startswith("Branch"):
110 110 branch = l[8:-1]
111 111 self.parent[id] = self.lastbranch.get(branch, 'bad')
112 112 self.lastbranch[branch] = id
113 113 elif l.startswith("Ancestor branch"):
114 114 ancestor = l[17:-1]
115 115 # figure out the parent later
116 116 self.parent[id] = self.lastbranch[ancestor]
117 117 elif l.startswith("Author"):
118 118 author = self.recode(l[8:-1])
119 119 elif l.startswith("Tag:") or l.startswith("Tags:"):
120 120 t = l[l.index(':')+1:]
121 121 t = [ut.strip() for ut in t.split(',')]
122 122 if (len(t) > 1) or (t[0] and (t[0] != "(none)")):
123 123 self.tags.update(dict.fromkeys(t, id))
124 124 elif l.startswith("Log:"):
125 125 # switch to gathering log
126 126 state = 1
127 127 log = ""
128 128 elif state == 1: # log
129 129 if l == "Members: \n":
130 130 # switch to gathering members
131 131 files = {}
132 132 oldrevs = []
133 133 log = self.recode(log[:-1])
134 134 state = 2
135 135 else:
136 136 # gather log
137 137 log += l
138 138 elif state == 2: # members
139 139 if l == "\n": # start of next entry
140 140 state = 0
141 141 p = [self.parent[id]]
142 142 if id == "1":
143 143 p = []
144 144 if branch == "HEAD":
145 145 branch = ""
146 146 if branch:
147 147 latest = None
148 148 # the last changeset that contains a base
149 149 # file is our parent
150 150 for r in oldrevs:
151 151 latest = max(filerevids.get(r, None), latest)
152 152 if latest:
153 153 p = [latest]
154 154
155 155 # add current commit to set
156 156 c = commit(author=author, date=date, parents=p,
157 157 desc=log, branch=branch)
158 158 self.changeset[id] = c
159 159 self.files[id] = files
160 160 else:
161 161 colon = l.rfind(':')
162 162 file = l[1:colon]
163 163 rev = l[colon+1:-2]
164 164 oldrev, rev = rev.split("->")
165 165 files[file] = rev
166 166
167 167 # save some information for identifying branch points
168 168 oldrevs.append("%s:%s" % (oldrev, file))
169 169 filerevids["%s:%s" % (rev, file)] = id
170 170 elif state == 3:
171 171 # swallow all input
172 172 continue
173 173
174 174 self.heads = self.lastbranch.values()
175 175 finally:
176 176 os.chdir(d)
177 177
178 178 def _connect(self):
179 179 root = self.cvsroot
180 180 conntype = None
181 181 user, host = None, None
182 182 cmd = ['cvs', 'server']
183 183
184 self.ui.status("connecting to %s\n" % root)
184 self.ui.status(_("connecting to %s\n") % root)
185 185
186 186 if root.startswith(":pserver:"):
187 187 root = root[9:]
188 188 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
189 189 root)
190 190 if m:
191 191 conntype = "pserver"
192 192 user, passw, serv, port, root = m.groups()
193 193 if not user:
194 194 user = "anonymous"
195 195 if not port:
196 196 port = 2401
197 197 else:
198 198 port = int(port)
199 199 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
200 200 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
201 201
202 202 if not passw:
203 203 passw = "A"
204 204 pf = open(os.path.expanduser("~/.cvspass"))
205 205 for line in pf.read().splitlines():
206 206 part1, part2 = line.split(' ', 1)
207 207 if part1 == '/1':
208 208 # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
209 209 part1, part2 = part2.split(' ', 1)
210 210 format = format1
211 211 else:
212 212 # :pserver:user@example.com:/cvsroot/foo Ah<Z
213 213 format = format0
214 214 if part1 == format:
215 215 passw = part2
216 216 break
217 217 pf.close()
218 218
219 219 sck = socket.socket()
220 220 sck.connect((serv, port))
221 221 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
222 222 "END AUTH REQUEST", ""]))
223 223 if sck.recv(128) != "I LOVE YOU\n":
224 raise util.Abort("CVS pserver authentication failed")
224 raise util.Abort(_("CVS pserver authentication failed"))
225 225
226 226 self.writep = self.readp = sck.makefile('r+')
227 227
228 228 if not conntype and root.startswith(":local:"):
229 229 conntype = "local"
230 230 root = root[7:]
231 231
232 232 if not conntype:
233 233 # :ext:user@host/home/user/path/to/cvsroot
234 234 if root.startswith(":ext:"):
235 235 root = root[5:]
236 236 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
237 237 # Do not take Windows path "c:\foo\bar" for a connection strings
238 238 if os.path.isdir(root) or not m:
239 239 conntype = "local"
240 240 else:
241 241 conntype = "rsh"
242 242 user, host, root = m.group(1), m.group(2), m.group(3)
243 243
244 244 if conntype != "pserver":
245 245 if conntype == "rsh":
246 246 rsh = os.environ.get("CVS_RSH") or "ssh"
247 247 if user:
248 248 cmd = [rsh, '-l', user, host] + cmd
249 249 else:
250 250 cmd = [rsh, host] + cmd
251 251
252 252 # popen2 does not support argument lists under Windows
253 253 cmd = [util.shellquote(arg) for arg in cmd]
254 254 cmd = util.quotecommand(' '.join(cmd))
255 255 self.writep, self.readp = os.popen2(cmd, 'b')
256 256
257 257 self.realroot = root
258 258
259 259 self.writep.write("Root %s\n" % root)
260 260 self.writep.write("Valid-responses ok error Valid-requests Mode"
261 261 " M Mbinary E Checked-in Created Updated"
262 262 " Merged Removed\n")
263 263 self.writep.write("valid-requests\n")
264 264 self.writep.flush()
265 265 r = self.readp.readline()
266 266 if not r.startswith("Valid-requests"):
267 raise util.Abort("server sucks")
267 raise util.Abort(_("server sucks"))
268 268 if "UseUnchanged" in r:
269 269 self.writep.write("UseUnchanged\n")
270 270 self.writep.flush()
271 271 r = self.readp.readline()
272 272
273 273 def getheads(self):
274 274 return self.heads
275 275
276 276 def _getfile(self, name, rev):
277 277
278 278 def chunkedread(fp, count):
279 279 # file-objects returned by socked.makefile() do not handle
280 280 # large read() requests very well.
281 281 chunksize = 65536
282 282 output = StringIO()
283 283 while count > 0:
284 284 data = fp.read(min(count, chunksize))
285 285 if not data:
286 raise util.Abort("%d bytes missing from remote file" % count)
286 raise util.Abort(_("%d bytes missing from remote file") % count)
287 287 count -= len(data)
288 288 output.write(data)
289 289 return output.getvalue()
290 290
291 291 if rev.endswith("(DEAD)"):
292 292 raise IOError
293 293
294 294 args = ("-N -P -kk -r %s --" % rev).split()
295 295 args.append(self.cvsrepo + '/' + name)
296 296 for x in args:
297 297 self.writep.write("Argument %s\n" % x)
298 298 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
299 299 self.writep.flush()
300 300
301 301 data = ""
302 302 while 1:
303 303 line = self.readp.readline()
304 304 if line.startswith("Created ") or line.startswith("Updated "):
305 305 self.readp.readline() # path
306 306 self.readp.readline() # entries
307 307 mode = self.readp.readline()[:-1]
308 308 count = int(self.readp.readline()[:-1])
309 309 data = chunkedread(self.readp, count)
310 310 elif line.startswith(" "):
311 311 data += line[1:]
312 312 elif line.startswith("M "):
313 313 pass
314 314 elif line.startswith("Mbinary "):
315 315 count = int(self.readp.readline()[:-1])
316 316 data = chunkedread(self.readp, count)
317 317 else:
318 318 if line == "ok\n":
319 319 return (data, "x" in mode and "x" or "")
320 320 elif line.startswith("E "):
321 self.ui.warn("cvs server: %s\n" % line[2:])
321 self.ui.warn(_("cvs server: %s\n") % line[2:])
322 322 elif line.startswith("Remove"):
323 323 l = self.readp.readline()
324 324 l = self.readp.readline()
325 325 if l != "ok\n":
326 raise util.Abort("unknown CVS response: %s" % l)
326 raise util.Abort(_("unknown CVS response: %s") % l)
327 327 else:
328 raise util.Abort("unknown CVS response: %s" % line)
328 raise util.Abort(_("unknown CVS response: %s") % line)
329 329
330 330 def getfile(self, file, rev):
331 331 data, mode = self._getfile(file, rev)
332 332 self.modecache[(file, rev)] = mode
333 333 return data
334 334
335 335 def getmode(self, file, rev):
336 336 return self.modecache[(file, rev)]
337 337
338 338 def getchanges(self, rev):
339 339 self.modecache = {}
340 340 return util.sort(self.files[rev].items()), {}
341 341
342 342 def getcommit(self, rev):
343 343 return self.changeset[rev]
344 344
345 345 def gettags(self):
346 346 return self.tags
347 347
348 348 def getchangedfiles(self, rev, i):
349 349 return util.sort(self.files[rev].keys())
@@ -1,548 +1,548 b''
1 1 #
2 2 # Mercurial built-in replacement for cvsps.
3 3 #
4 4 # Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
5 5 #
6 6 # This software may be used and distributed according to the terms
7 7 # of the GNU General Public License, incorporated herein by reference.
8 8
9 9 import os
10 10 import re
11 11 import sys
12 12 import cPickle as pickle
13 13 from mercurial import util
14 14 from mercurial.i18n import _
15 15
16 16 def listsort(list, key):
17 17 "helper to sort by key in Python 2.3"
18 18 try:
19 19 list.sort(key=key)
20 20 except TypeError:
21 21 list.sort(lambda l, r: cmp(key(l), key(r)))
22 22
23 23 class logentry(object):
24 24 '''Class logentry has the following attributes:
25 25 .author - author name as CVS knows it
26 26 .branch - name of branch this revision is on
27 27 .branches - revision tuple of branches starting at this revision
28 28 .comment - commit message
29 29 .date - the commit date as a (time, tz) tuple
30 30 .dead - true if file revision is dead
31 31 .file - Name of file
32 32 .lines - a tuple (+lines, -lines) or None
33 33 .parent - Previous revision of this entry
34 34 .rcs - name of file as returned from CVS
35 35 .revision - revision number as tuple
36 36 .tags - list of tags on the file
37 37 '''
38 38 def __init__(self, **entries):
39 39 self.__dict__.update(entries)
40 40
41 41 class logerror(Exception):
42 42 pass
43 43
44 44 def createlog(ui, directory=None, root="", rlog=True, cache=None):
45 45 '''Collect the CVS rlog'''
46 46
47 47 # Because we store many duplicate commit log messages, reusing strings
48 48 # saves a lot of memory and pickle storage space.
49 49 _scache = {}
50 50 def scache(s):
51 51 "return a shared version of a string"
52 52 return _scache.setdefault(s, s)
53 53
54 54 ui.status(_('collecting CVS rlog\n'))
55 55
56 56 log = [] # list of logentry objects containing the CVS state
57 57
58 58 # patterns to match in CVS (r)log output, by state of use
59 59 re_00 = re.compile('RCS file: (.+)$')
60 60 re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
61 61 re_02 = re.compile('cvs (r?log|server): (.+)\n$')
62 62 re_03 = re.compile("(Cannot access.+CVSROOT)|(can't create temporary directory.+)$")
63 63 re_10 = re.compile('Working file: (.+)$')
64 64 re_20 = re.compile('symbolic names:')
65 65 re_30 = re.compile('\t(.+): ([\\d.]+)$')
66 66 re_31 = re.compile('----------------------------$')
67 67 re_32 = re.compile('=============================================================================$')
68 68 re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
69 69 re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?')
70 70 re_70 = re.compile('branches: (.+);$')
71 71
72 72 prefix = '' # leading path to strip of what we get from CVS
73 73
74 74 if directory is None:
75 75 # Current working directory
76 76
77 77 # Get the real directory in the repository
78 78 try:
79 79 prefix = file(os.path.join('CVS','Repository')).read().strip()
80 80 if prefix == ".":
81 81 prefix = ""
82 82 directory = prefix
83 83 except IOError:
84 84 raise logerror('Not a CVS sandbox')
85 85
86 86 if prefix and not prefix.endswith('/'):
87 87 prefix += '/'
88 88
89 89 # Use the Root file in the sandbox, if it exists
90 90 try:
91 91 root = file(os.path.join('CVS','Root')).read().strip()
92 92 except IOError:
93 93 pass
94 94
95 95 if not root:
96 96 root = os.environ.get('CVSROOT', '')
97 97
98 98 # read log cache if one exists
99 99 oldlog = []
100 100 date = None
101 101
102 102 if cache:
103 103 cachedir = os.path.expanduser('~/.hg.cvsps')
104 104 if not os.path.exists(cachedir):
105 105 os.mkdir(cachedir)
106 106
107 107 # The cvsps cache pickle needs a uniquified name, based on the
108 108 # repository location. The address may have all sort of nasties
109 109 # in it, slashes, colons and such. So here we take just the
110 110 # alphanumerics, concatenated in a way that does not mix up the
111 111 # various components, so that
112 112 # :pserver:user@server:/path
113 113 # and
114 114 # /pserver/user/server/path
115 115 # are mapped to different cache file names.
116 116 cachefile = root.split(":") + [directory, "cache"]
117 117 cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
118 118 cachefile = os.path.join(cachedir,
119 119 '.'.join([s for s in cachefile if s]))
120 120
121 121 if cache == 'update':
122 122 try:
123 123 ui.note(_('reading cvs log cache %s\n') % cachefile)
124 124 oldlog = pickle.load(file(cachefile))
125 125 ui.note(_('cache has %d log entries\n') % len(oldlog))
126 126 except Exception, e:
127 127 ui.note(_('error reading cache: %r\n') % e)
128 128
129 129 if oldlog:
130 130 date = oldlog[-1].date # last commit date as a (time,tz) tuple
131 131 date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
132 132
133 133 # build the CVS commandline
134 134 cmd = ['cvs', '-q']
135 135 if root:
136 136 cmd.append('-d%s' % root)
137 137 p = root.split(':')[-1]
138 138 if not p.endswith('/'):
139 139 p += '/'
140 140 prefix = p + prefix
141 141 cmd.append(['log', 'rlog'][rlog])
142 142 if date:
143 143 # no space between option and date string
144 144 cmd.append('-d>%s' % date)
145 145 cmd.append(directory)
146 146
147 147 # state machine begins here
148 148 tags = {} # dictionary of revisions on current file with their tags
149 149 state = 0
150 150 store = False # set when a new record can be appended
151 151
152 152 cmd = [util.shellquote(arg) for arg in cmd]
153 ui.note("running %s\n" % (' '.join(cmd)))
154 ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
153 ui.note(_("running %s\n") % (' '.join(cmd)))
154 ui.debug(_("prefix=%r directory=%r root=%r\n") % (prefix, directory, root))
155 155
156 156 for line in util.popen(' '.join(cmd)):
157 157 if line.endswith('\n'):
158 158 line = line[:-1]
159 159 #ui.debug('state=%d line=%r\n' % (state, line))
160 160
161 161 if state == 0:
162 162 # initial state, consume input until we see 'RCS file'
163 163 match = re_00.match(line)
164 164 if match:
165 165 rcs = match.group(1)
166 166 tags = {}
167 167 if rlog:
168 168 filename = rcs[:-2]
169 169 if filename.startswith(prefix):
170 170 filename = filename[len(prefix):]
171 171 if filename.startswith('/'):
172 172 filename = filename[1:]
173 173 if filename.startswith('Attic/'):
174 174 filename = filename[6:]
175 175 else:
176 176 filename = filename.replace('/Attic/', '/')
177 177 state = 2
178 178 continue
179 179 state = 1
180 180 continue
181 181 match = re_01.match(line)
182 182 if match:
183 183 raise Exception(match.group(1))
184 184 match = re_02.match(line)
185 185 if match:
186 186 raise Exception(match.group(2))
187 187 if re_03.match(line):
188 188 raise Exception(line)
189 189
190 190 elif state == 1:
191 191 # expect 'Working file' (only when using log instead of rlog)
192 192 match = re_10.match(line)
193 193 assert match, _('RCS file must be followed by working file')
194 194 filename = match.group(1)
195 195 state = 2
196 196
197 197 elif state == 2:
198 198 # expect 'symbolic names'
199 199 if re_20.match(line):
200 200 state = 3
201 201
202 202 elif state == 3:
203 203 # read the symbolic names and store as tags
204 204 match = re_30.match(line)
205 205 if match:
206 206 rev = [int(x) for x in match.group(2).split('.')]
207 207
208 208 # Convert magic branch number to an odd-numbered one
209 209 revn = len(rev)
210 210 if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
211 211 rev = rev[:-2] + rev[-1:]
212 212 rev = tuple(rev)
213 213
214 214 if rev not in tags:
215 215 tags[rev] = []
216 216 tags[rev].append(match.group(1))
217 217
218 218 elif re_31.match(line):
219 219 state = 5
220 220 elif re_32.match(line):
221 221 state = 0
222 222
223 223 elif state == 4:
224 224 # expecting '------' separator before first revision
225 225 if re_31.match(line):
226 226 state = 5
227 227 else:
228 228 assert not re_32.match(line), _('Must have at least some revisions')
229 229
230 230 elif state == 5:
231 231 # expecting revision number and possibly (ignored) lock indication
232 232 # we create the logentry here from values stored in states 0 to 4,
233 233 # as this state is re-entered for subsequent revisions of a file.
234 234 match = re_50.match(line)
235 235 assert match, _('expected revision number')
236 236 e = logentry(rcs=scache(rcs), file=scache(filename),
237 237 revision=tuple([int(x) for x in match.group(1).split('.')]),
238 238 branches=[], parent=None)
239 239 state = 6
240 240
241 241 elif state == 6:
242 242 # expecting date, author, state, lines changed
243 243 match = re_60.match(line)
244 244 assert match, _('revision must be followed by date line')
245 245 d = match.group(1)
246 246 if d[2] == '/':
247 247 # Y2K
248 248 d = '19' + d
249 249
250 250 if len(d.split()) != 3:
251 251 # cvs log dates always in GMT
252 252 d = d + ' UTC'
253 253 e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', '%Y/%m/%d %H:%M:%S', '%Y-%m-%d %H:%M:%S'])
254 254 e.author = scache(match.group(2))
255 255 e.dead = match.group(3).lower() == 'dead'
256 256
257 257 if match.group(5):
258 258 if match.group(6):
259 259 e.lines = (int(match.group(5)), int(match.group(6)))
260 260 else:
261 261 e.lines = (int(match.group(5)), 0)
262 262 elif match.group(6):
263 263 e.lines = (0, int(match.group(6)))
264 264 else:
265 265 e.lines = None
266 266 e.comment = []
267 267 state = 7
268 268
269 269 elif state == 7:
270 270 # read the revision numbers of branches that start at this revision
271 271 # or store the commit log message otherwise
272 272 m = re_70.match(line)
273 273 if m:
274 274 e.branches = [tuple([int(y) for y in x.strip().split('.')])
275 275 for x in m.group(1).split(';')]
276 276 state = 8
277 277 elif re_31.match(line):
278 278 state = 5
279 279 store = True
280 280 elif re_32.match(line):
281 281 state = 0
282 282 store = True
283 283 else:
284 284 e.comment.append(line)
285 285
286 286 elif state == 8:
287 287 # store commit log message
288 288 if re_31.match(line):
289 289 state = 5
290 290 store = True
291 291 elif re_32.match(line):
292 292 state = 0
293 293 store = True
294 294 else:
295 295 e.comment.append(line)
296 296
297 297 if store:
298 298 # clean up the results and save in the log.
299 299 store = False
300 300 e.tags = util.sort([scache(x) for x in tags.get(e.revision, [])])
301 301 e.comment = scache('\n'.join(e.comment))
302 302
303 303 revn = len(e.revision)
304 304 if revn > 3 and (revn % 2) == 0:
305 305 e.branch = tags.get(e.revision[:-1], [None])[0]
306 306 else:
307 307 e.branch = None
308 308
309 309 log.append(e)
310 310
311 311 if len(log) % 100 == 0:
312 312 ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
313 313
314 314 listsort(log, key=lambda x:(x.rcs, x.revision))
315 315
316 316 # find parent revisions of individual files
317 317 versions = {}
318 318 for e in log:
319 319 branch = e.revision[:-1]
320 320 p = versions.get((e.rcs, branch), None)
321 321 if p is None:
322 322 p = e.revision[:-2]
323 323 e.parent = p
324 324 versions[(e.rcs, branch)] = e.revision
325 325
326 326 # update the log cache
327 327 if cache:
328 328 if log:
329 329 # join up the old and new logs
330 330 listsort(log, key=lambda x:x.date)
331 331
332 332 if oldlog and oldlog[-1].date >= log[0].date:
333 333 raise logerror('Log cache overlaps with new log entries,'
334 334 ' re-run without cache.')
335 335
336 336 log = oldlog + log
337 337
338 338 # write the new cachefile
339 339 ui.note(_('writing cvs log cache %s\n') % cachefile)
340 340 pickle.dump(log, file(cachefile, 'w'))
341 341 else:
342 342 log = oldlog
343 343
344 344 ui.status(_('%d log entries\n') % len(log))
345 345
346 346 return log
347 347
348 348
349 349 class changeset(object):
350 350 '''Class changeset has the following attributes:
351 351 .author - author name as CVS knows it
352 352 .branch - name of branch this changeset is on, or None
353 353 .comment - commit message
354 354 .date - the commit date as a (time,tz) tuple
355 355 .entries - list of logentry objects in this changeset
356 356 .parents - list of one or two parent changesets
357 357 .tags - list of tags on this changeset
358 358 '''
359 359 def __init__(self, **entries):
360 360 self.__dict__.update(entries)
361 361
362 362 def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
363 363 '''Convert log into changesets.'''
364 364
365 365 ui.status(_('creating changesets\n'))
366 366
367 367 # Merge changesets
368 368
369 369 listsort(log, key=lambda x:(x.comment, x.author, x.branch, x.date))
370 370
371 371 changesets = []
372 372 files = {}
373 373 c = None
374 374 for i, e in enumerate(log):
375 375
376 376 # Check if log entry belongs to the current changeset or not.
377 377 if not (c and
378 378 e.comment == c.comment and
379 379 e.author == c.author and
380 380 e.branch == c.branch and
381 381 ((c.date[0] + c.date[1]) <=
382 382 (e.date[0] + e.date[1]) <=
383 383 (c.date[0] + c.date[1]) + fuzz) and
384 384 e.file not in files):
385 385 c = changeset(comment=e.comment, author=e.author,
386 386 branch=e.branch, date=e.date, entries=[])
387 387 changesets.append(c)
388 388 files = {}
389 389 if len(changesets) % 100 == 0:
390 390 t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
391 391 ui.status(util.ellipsis(t, 80) + '\n')
392 392
393 393 c.entries.append(e)
394 394 files[e.file] = True
395 395 c.date = e.date # changeset date is date of latest commit in it
396 396
397 397 # Sort files in each changeset
398 398
399 399 for c in changesets:
400 400 def pathcompare(l, r):
401 401 'Mimic cvsps sorting order'
402 402 l = l.split('/')
403 403 r = r.split('/')
404 404 nl = len(l)
405 405 nr = len(r)
406 406 n = min(nl, nr)
407 407 for i in range(n):
408 408 if i + 1 == nl and nl < nr:
409 409 return -1
410 410 elif i + 1 == nr and nl > nr:
411 411 return +1
412 412 elif l[i] < r[i]:
413 413 return -1
414 414 elif l[i] > r[i]:
415 415 return +1
416 416 return 0
417 417 def entitycompare(l, r):
418 418 return pathcompare(l.file, r.file)
419 419
420 420 c.entries.sort(entitycompare)
421 421
422 422 # Sort changesets by date
423 423
424 424 def cscmp(l, r):
425 425 d = sum(l.date) - sum(r.date)
426 426 if d:
427 427 return d
428 428
429 429 # detect vendor branches and initial commits on a branch
430 430 le = {}
431 431 for e in l.entries:
432 432 le[e.rcs] = e.revision
433 433 re = {}
434 434 for e in r.entries:
435 435 re[e.rcs] = e.revision
436 436
437 437 d = 0
438 438 for e in l.entries:
439 439 if re.get(e.rcs, None) == e.parent:
440 440 assert not d
441 441 d = 1
442 442 break
443 443
444 444 for e in r.entries:
445 445 if le.get(e.rcs, None) == e.parent:
446 446 assert not d
447 447 d = -1
448 448 break
449 449
450 450 return d
451 451
452 452 changesets.sort(cscmp)
453 453
454 454 # Collect tags
455 455
456 456 globaltags = {}
457 457 for c in changesets:
458 458 tags = {}
459 459 for e in c.entries:
460 460 for tag in e.tags:
461 461 # remember which is the latest changeset to have this tag
462 462 globaltags[tag] = c
463 463
464 464 for c in changesets:
465 465 tags = {}
466 466 for e in c.entries:
467 467 for tag in e.tags:
468 468 tags[tag] = True
469 469 # remember tags only if this is the latest changeset to have it
470 470 c.tags = util.sort([tag for tag in tags if globaltags[tag] is c])
471 471
472 472 # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
473 473 # by inserting dummy changesets with two parents, and handle
474 474 # {{mergefrombranch BRANCHNAME}} by setting two parents.
475 475
476 476 if mergeto is None:
477 477 mergeto = r'{{mergetobranch ([-\w]+)}}'
478 478 if mergeto:
479 479 mergeto = re.compile(mergeto)
480 480
481 481 if mergefrom is None:
482 482 mergefrom = r'{{mergefrombranch ([-\w]+)}}'
483 483 if mergefrom:
484 484 mergefrom = re.compile(mergefrom)
485 485
486 486 versions = {} # changeset index where we saw any particular file version
487 487 branches = {} # changeset index where we saw a branch
488 488 n = len(changesets)
489 489 i = 0
490 490 while i<n:
491 491 c = changesets[i]
492 492
493 493 for f in c.entries:
494 494 versions[(f.rcs, f.revision)] = i
495 495
496 496 p = None
497 497 if c.branch in branches:
498 498 p = branches[c.branch]
499 499 else:
500 500 for f in c.entries:
501 501 p = max(p, versions.get((f.rcs, f.parent), None))
502 502
503 503 c.parents = []
504 504 if p is not None:
505 505 c.parents.append(changesets[p])
506 506
507 507 if mergefrom:
508 508 m = mergefrom.search(c.comment)
509 509 if m:
510 510 m = m.group(1)
511 511 if m == 'HEAD':
512 512 m = None
513 513 if m in branches and c.branch != m:
514 514 c.parents.append(changesets[branches[m]])
515 515
516 516 if mergeto:
517 517 m = mergeto.search(c.comment)
518 518 if m:
519 519 try:
520 520 m = m.group(1)
521 521 if m == 'HEAD':
522 522 m = None
523 523 except:
524 524 m = None # if no group found then merge to HEAD
525 525 if m in branches and c.branch != m:
526 526 # insert empty changeset for merge
527 527 cc = changeset(author=c.author, branch=m, date=c.date,
528 528 comment='convert-repo: CVS merge from branch %s' % c.branch,
529 529 entries=[], tags=[], parents=[changesets[branches[m]], c])
530 530 changesets.insert(i + 1, cc)
531 531 branches[m] = i + 1
532 532
533 533 # adjust our loop counters now we have inserted a new entry
534 534 n += 1
535 535 i += 2
536 536 continue
537 537
538 538 branches[c.branch] = i
539 539 i += 1
540 540
541 541 # Number changesets
542 542
543 543 for i, c in enumerate(changesets):
544 544 c.id = i + 1
545 545
546 546 ui.status(_('%d changeset entries\n') % len(changesets))
547 547
548 548 return changesets
@@ -1,126 +1,126 b''
1 1 # darcs support for the convert extension
2 2
3 3 from common import NoRepo, checktool, commandline, commit, converter_source
4 4 from mercurial.i18n import _
5 5 from mercurial import util
6 6 import os, shutil, tempfile
7 7
8 8 # The naming drift of ElementTree is fun!
9 9
10 10 try: from xml.etree.cElementTree import ElementTree
11 11 except ImportError:
12 12 try: from xml.etree.ElementTree import ElementTree
13 13 except ImportError:
14 14 try: from elementtree.cElementTree import ElementTree
15 15 except ImportError:
16 16 try: from elementtree.ElementTree import ElementTree
17 17 except ImportError: ElementTree = None
18 18
19 19
20 20 class darcs_source(converter_source, commandline):
21 21 def __init__(self, ui, path, rev=None):
22 22 converter_source.__init__(self, ui, path, rev=rev)
23 23 commandline.__init__(self, ui, 'darcs')
24 24
25 25 # check for _darcs, ElementTree, _darcs/inventory so that we can
26 26 # easily skip test-convert-darcs if ElementTree is not around
27 27 if not os.path.exists(os.path.join(path, '_darcs')):
28 28 raise NoRepo("%s does not look like a darcs repo" % path)
29 29
30 30 checktool('darcs')
31 31
32 32 if ElementTree is None:
33 33 raise util.Abort(_("Python ElementTree module is not available"))
34 34
35 35 if not os.path.exists(os.path.join(path, '_darcs', 'inventory')):
36 36 raise NoRepo("%s does not look like a darcs repo" % path)
37 37
38 38 self.path = os.path.realpath(path)
39 39
40 40 self.lastrev = None
41 41 self.changes = {}
42 42 self.parents = {}
43 43 self.tags = {}
44 44
45 45 def before(self):
46 46 self.tmppath = tempfile.mkdtemp(
47 47 prefix='convert-' + os.path.basename(self.path) + '-')
48 48 output, status = self.run('init', repodir=self.tmppath)
49 49 self.checkexit(status)
50 50
51 51 tree = self.xml('changes', xml_output=True, summary=True,
52 52 repodir=self.path)
53 53 tagname = None
54 54 child = None
55 55 for elt in tree.findall('patch'):
56 56 node = elt.get('hash')
57 57 name = elt.findtext('name', '')
58 58 if name.startswith('TAG '):
59 59 tagname = name[4:].strip()
60 60 elif tagname is not None:
61 61 self.tags[tagname] = node
62 62 tagname = None
63 63 self.changes[node] = elt
64 64 self.parents[child] = [node]
65 65 child = node
66 66 self.parents[child] = []
67 67
68 68 def after(self):
69 self.ui.debug('cleaning up %s\n' % self.tmppath)
69 self.ui.debug(_('cleaning up %s\n') % self.tmppath)
70 70 shutil.rmtree(self.tmppath, ignore_errors=True)
71 71
72 72 def xml(self, cmd, **kwargs):
73 73 etree = ElementTree()
74 74 fp = self._run(cmd, **kwargs)
75 75 etree.parse(fp)
76 76 self.checkexit(fp.close())
77 77 return etree.getroot()
78 78
79 79 def getheads(self):
80 80 return self.parents[None]
81 81
82 82 def getcommit(self, rev):
83 83 elt = self.changes[rev]
84 84 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
85 85 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
86 86 return commit(author=elt.get('author'), date=util.datestr(date),
87 87 desc=desc.strip(), parents=self.parents[rev])
88 88
89 89 def pull(self, rev):
90 90 output, status = self.run('pull', self.path, all=True,
91 91 match='hash %s' % rev,
92 92 no_test=True, no_posthook=True,
93 93 external_merge='/bin/false',
94 94 repodir=self.tmppath)
95 95 if status:
96 96 if output.find('We have conflicts in') == -1:
97 97 self.checkexit(status, output)
98 98 output, status = self.run('revert', all=True, repodir=self.tmppath)
99 99 self.checkexit(status, output)
100 100
101 101 def getchanges(self, rev):
102 102 self.pull(rev)
103 103 copies = {}
104 104 changes = []
105 105 for elt in self.changes[rev].find('summary').getchildren():
106 106 if elt.tag in ('add_directory', 'remove_directory'):
107 107 continue
108 108 if elt.tag == 'move':
109 109 changes.append((elt.get('from'), rev))
110 110 copies[elt.get('from')] = elt.get('to')
111 111 else:
112 112 changes.append((elt.text.strip(), rev))
113 113 self.lastrev = rev
114 114 return util.sort(changes), copies
115 115
116 116 def getfile(self, name, rev):
117 117 if rev != self.lastrev:
118 118 raise util.Abort(_('internal calling inconsistency'))
119 119 return open(os.path.join(self.tmppath, name), 'rb').read()
120 120
121 121 def getmode(self, name, rev):
122 122 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
123 123 return (mode & 0111) and 'x' or ''
124 124
125 125 def gettags(self):
126 126 return self.tags
@@ -1,317 +1,317 b''
1 1 # hg backend for convert extension
2 2
3 3 # Notes for hg->hg conversion:
4 4 #
5 5 # * Old versions of Mercurial didn't trim the whitespace from the ends
6 6 # of commit messages, but new versions do. Changesets created by
7 7 # those older versions, then converted, may thus have different
8 8 # hashes for changesets that are otherwise identical.
9 9 #
10 10 # * By default, the source revision is stored in the converted
11 11 # revision. This will cause the converted revision to have a
12 12 # different identity than the source. To avoid this, use the
13 13 # following option: "--config convert.hg.saverev=false"
14 14
15 15
16 16 import os, time
17 17 from mercurial.i18n import _
18 18 from mercurial.repo import RepoError
19 19 from mercurial.node import bin, hex, nullid
20 20 from mercurial import hg, revlog, util, context
21 21
22 22 from common import NoRepo, commit, converter_source, converter_sink
23 23
24 24 class mercurial_sink(converter_sink):
25 25 def __init__(self, ui, path):
26 26 converter_sink.__init__(self, ui, path)
27 27 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
28 28 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
29 29 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
30 30 self.lastbranch = None
31 31 if os.path.isdir(path) and len(os.listdir(path)) > 0:
32 32 try:
33 33 self.repo = hg.repository(self.ui, path)
34 34 if not self.repo.local():
35 35 raise NoRepo(_('%s is not a local Mercurial repo') % path)
36 36 except RepoError, err:
37 37 ui.print_exc()
38 38 raise NoRepo(err.args[0])
39 39 else:
40 40 try:
41 41 ui.status(_('initializing destination %s repository\n') % path)
42 42 self.repo = hg.repository(self.ui, path, create=True)
43 43 if not self.repo.local():
44 44 raise NoRepo(_('%s is not a local Mercurial repo') % path)
45 45 self.created.append(path)
46 46 except RepoError, err:
47 47 ui.print_exc()
48 48 raise NoRepo("could not create hg repo %s as sink" % path)
49 49 self.lock = None
50 50 self.wlock = None
51 51 self.filemapmode = False
52 52
53 53 def before(self):
54 54 self.ui.debug(_('run hg sink pre-conversion action\n'))
55 55 self.wlock = self.repo.wlock()
56 56 self.lock = self.repo.lock()
57 57
58 58 def after(self):
59 59 self.ui.debug(_('run hg sink post-conversion action\n'))
60 60 self.lock = None
61 61 self.wlock = None
62 62
63 63 def revmapfile(self):
64 64 return os.path.join(self.path, ".hg", "shamap")
65 65
66 66 def authorfile(self):
67 67 return os.path.join(self.path, ".hg", "authormap")
68 68
69 69 def getheads(self):
70 70 h = self.repo.changelog.heads()
71 71 return [ hex(x) for x in h ]
72 72
73 73 def setbranch(self, branch, pbranches):
74 74 if not self.clonebranches:
75 75 return
76 76
77 77 setbranch = (branch != self.lastbranch)
78 78 self.lastbranch = branch
79 79 if not branch:
80 80 branch = 'default'
81 81 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
82 82 pbranch = pbranches and pbranches[0][1] or 'default'
83 83
84 84 branchpath = os.path.join(self.path, branch)
85 85 if setbranch:
86 86 self.after()
87 87 try:
88 88 self.repo = hg.repository(self.ui, branchpath)
89 89 except:
90 90 self.repo = hg.repository(self.ui, branchpath, create=True)
91 91 self.before()
92 92
93 93 # pbranches may bring revisions from other branches (merge parents)
94 94 # Make sure we have them, or pull them.
95 95 missings = {}
96 96 for b in pbranches:
97 97 try:
98 98 self.repo.lookup(b[0])
99 99 except:
100 100 missings.setdefault(b[1], []).append(b[0])
101 101
102 102 if missings:
103 103 self.after()
104 104 for pbranch, heads in missings.iteritems():
105 105 pbranchpath = os.path.join(self.path, pbranch)
106 106 prepo = hg.repository(self.ui, pbranchpath)
107 107 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
108 108 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
109 109 self.before()
110 110
111 111 def putcommit(self, files, copies, parents, commit, source):
112 112
113 113 files = dict(files)
114 114 def getfilectx(repo, memctx, f):
115 115 v = files[f]
116 116 data = source.getfile(f, v)
117 117 e = source.getmode(f, v)
118 118 return context.memfilectx(f, data, 'l' in e, 'x' in e, copies.get(f))
119 119
120 120 pl = []
121 121 for p in parents:
122 122 if p not in pl:
123 123 pl.append(p)
124 124 parents = pl
125 125 nparents = len(parents)
126 126 if self.filemapmode and nparents == 1:
127 127 m1node = self.repo.changelog.read(bin(parents[0]))[0]
128 128 parent = parents[0]
129 129
130 130 if len(parents) < 2: parents.append("0" * 40)
131 131 if len(parents) < 2: parents.append("0" * 40)
132 132 p2 = parents.pop(0)
133 133
134 134 text = commit.desc
135 135 extra = commit.extra.copy()
136 136 if self.branchnames and commit.branch:
137 137 extra['branch'] = commit.branch
138 138 if commit.rev:
139 139 extra['convert_revision'] = commit.rev
140 140
141 141 while parents:
142 142 p1 = p2
143 143 p2 = parents.pop(0)
144 144 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), getfilectx,
145 145 commit.author, commit.date, extra)
146 146 a = self.repo.commitctx(ctx)
147 147 text = "(octopus merge fixup)\n"
148 148 p2 = hex(self.repo.changelog.tip())
149 149
150 150 if self.filemapmode and nparents == 1:
151 151 man = self.repo.manifest
152 152 mnode = self.repo.changelog.read(bin(p2))[0]
153 153 if not man.cmp(m1node, man.revision(mnode)):
154 154 self.repo.rollback()
155 155 return parent
156 156 return p2
157 157
158 158 def puttags(self, tags):
159 159 try:
160 160 parentctx = self.repo[self.tagsbranch]
161 161 tagparent = parentctx.node()
162 162 except RepoError, inst:
163 163 parentctx = None
164 164 tagparent = nullid
165 165
166 166 try:
167 167 oldlines = util.sort(parentctx['.hgtags'].data().splitlines(1))
168 168 except:
169 169 oldlines = []
170 170
171 171 newlines = util.sort([("%s %s\n" % (tags[tag], tag)) for tag in tags])
172 172
173 173 if newlines == oldlines:
174 174 return None
175 175 data = "".join(newlines)
176 176
177 177 def getfilectx(repo, memctx, f):
178 178 return context.memfilectx(f, data, False, False, None)
179 179
180 self.ui.status("updating tags\n")
180 self.ui.status(_("updating tags\n"))
181 181 date = "%s 0" % int(time.mktime(time.gmtime()))
182 182 extra = {'branch': self.tagsbranch}
183 183 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
184 184 [".hgtags"], getfilectx, "convert-repo", date,
185 185 extra)
186 186 self.repo.commitctx(ctx)
187 187 return hex(self.repo.changelog.tip())
188 188
189 189 def setfilemapmode(self, active):
190 190 self.filemapmode = active
191 191
192 192 class mercurial_source(converter_source):
193 193 def __init__(self, ui, path, rev=None):
194 194 converter_source.__init__(self, ui, path, rev)
195 195 self.saverev = ui.configbool('convert', 'hg.saverev', True)
196 196 try:
197 197 self.repo = hg.repository(self.ui, path)
198 198 # try to provoke an exception if this isn't really a hg
199 199 # repo, but some other bogus compatible-looking url
200 200 if not self.repo.local():
201 201 raise RepoError()
202 202 except RepoError:
203 203 ui.print_exc()
204 204 raise NoRepo("%s is not a local Mercurial repo" % path)
205 205 self.lastrev = None
206 206 self.lastctx = None
207 207 self._changescache = None
208 208 self.convertfp = None
209 209 # Restrict converted revisions to startrev descendants
210 210 startnode = ui.config('convert', 'hg.startrev')
211 211 if startnode is not None:
212 212 try:
213 213 startnode = self.repo.lookup(startnode)
214 214 except repo.RepoError:
215 215 raise util.Abort(_('%s is not a valid start revision')
216 216 % startnode)
217 217 startrev = self.repo.changelog.rev(startnode)
218 218 children = {startnode: 1}
219 219 for rev in self.repo.changelog.descendants(startrev):
220 220 children[self.repo.changelog.node(rev)] = 1
221 221 self.keep = children.__contains__
222 222 else:
223 223 self.keep = util.always
224 224
225 225 def changectx(self, rev):
226 226 if self.lastrev != rev:
227 227 self.lastctx = self.repo[rev]
228 228 self.lastrev = rev
229 229 return self.lastctx
230 230
231 231 def parents(self, ctx):
232 232 return [p.node() for p in ctx.parents()
233 233 if p and self.keep(p.node())]
234 234
235 235 def getheads(self):
236 236 if self.rev:
237 237 heads = [self.repo[self.rev].node()]
238 238 else:
239 239 heads = self.repo.heads()
240 240 return [hex(h) for h in heads if self.keep(h)]
241 241
242 242 def getfile(self, name, rev):
243 243 try:
244 244 return self.changectx(rev)[name].data()
245 245 except revlog.LookupError, err:
246 246 raise IOError(err)
247 247
248 248 def getmode(self, name, rev):
249 249 return self.changectx(rev).manifest().flags(name)
250 250
251 251 def getchanges(self, rev):
252 252 ctx = self.changectx(rev)
253 253 parents = self.parents(ctx)
254 254 if not parents:
255 255 files = util.sort(ctx.manifest().keys())
256 256 return [(f, rev) for f in files], {}
257 257 if self._changescache and self._changescache[0] == rev:
258 258 m, a, r = self._changescache[1]
259 259 else:
260 260 m, a, r = self.repo.status(parents[0], ctx.node())[:3]
261 261 changes = [(name, rev) for name in m + a + r]
262 262 return util.sort(changes), self.getcopies(ctx, m + a)
263 263
264 264 def getcopies(self, ctx, files):
265 265 copies = {}
266 266 for name in files:
267 267 try:
268 268 copynode = ctx.filectx(name).renamed()[0]
269 269 if self.keep(copynode):
270 270 copies[name] = copynode
271 271 except TypeError:
272 272 pass
273 273 return copies
274 274
275 275 def getcommit(self, rev):
276 276 ctx = self.changectx(rev)
277 277 parents = [hex(p) for p in self.parents(ctx)]
278 278 if self.saverev:
279 279 crev = rev
280 280 else:
281 281 crev = None
282 282 return commit(author=ctx.user(), date=util.datestr(ctx.date()),
283 283 desc=ctx.description(), rev=crev, parents=parents,
284 284 branch=ctx.branch(), extra=ctx.extra())
285 285
286 286 def gettags(self):
287 287 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
288 288 return dict([(name, hex(node)) for name, node in tags
289 289 if self.keep(node)])
290 290
291 291 def getchangedfiles(self, rev, i):
292 292 ctx = self.changectx(rev)
293 293 parents = self.parents(ctx)
294 294 if not parents and i is None:
295 295 i = 0
296 296 changes = [], ctx.manifest().keys(), []
297 297 else:
298 298 i = i or 0
299 299 changes = self.repo.status(parents[i], ctx.node())[:3]
300 300
301 301 if i == 0:
302 302 self._changescache = (rev, changes)
303 303
304 304 return changes[0] + changes[1] + changes[2]
305 305
306 306 def converted(self, rev, destrev):
307 307 if self.convertfp is None:
308 308 self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
309 309 'a')
310 310 self.convertfp.write('%s %s\n' % (destrev, rev))
311 311 self.convertfp.flush()
312 312
313 313 def before(self):
314 314 self.ui.debug(_('run hg source pre-conversion action\n'))
315 315
316 316 def after(self):
317 317 self.ui.debug(_('run hg source post-conversion action\n'))
@@ -1,1161 +1,1161 b''
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4 #
5 5 # Configuration options:
6 6 #
7 7 # convert.svn.trunk
8 8 # Relative path to the trunk (default: "trunk")
9 9 # convert.svn.branches
10 10 # Relative path to tree of branches (default: "branches")
11 11 # convert.svn.tags
12 12 # Relative path to tree of tags (default: "tags")
13 13 #
14 14 # Set these in a hgrc, or on the command line as follows:
15 15 #
16 16 # hg convert --config convert.svn.trunk=wackoname [...]
17 17
18 18 import locale
19 19 import os
20 20 import re
21 21 import sys
22 22 import cPickle as pickle
23 23 import tempfile
24 24
25 25 from mercurial import strutil, util
26 26 from mercurial.i18n import _
27 27
28 28 # Subversion stuff. Works best with very recent Python SVN bindings
29 29 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
30 30 # these bindings.
31 31
32 32 from cStringIO import StringIO
33 33
34 34 from common import NoRepo, commit, converter_source, encodeargs, decodeargs
35 35 from common import commandline, converter_sink, mapfile
36 36
37 37 try:
38 38 from svn.core import SubversionException, Pool
39 39 import svn
40 40 import svn.client
41 41 import svn.core
42 42 import svn.ra
43 43 import svn.delta
44 44 import transport
45 45 except ImportError:
46 46 pass
47 47
48 48 def geturl(path):
49 49 try:
50 50 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
51 51 except SubversionException:
52 52 pass
53 53 if os.path.isdir(path):
54 54 path = os.path.normpath(os.path.abspath(path))
55 55 if os.name == 'nt':
56 56 path = '/' + util.normpath(path)
57 57 return 'file://%s' % path
58 58 return path
59 59
60 60 def optrev(number):
61 61 optrev = svn.core.svn_opt_revision_t()
62 62 optrev.kind = svn.core.svn_opt_revision_number
63 63 optrev.value.number = number
64 64 return optrev
65 65
66 66 class changedpath(object):
67 67 def __init__(self, p):
68 68 self.copyfrom_path = p.copyfrom_path
69 69 self.copyfrom_rev = p.copyfrom_rev
70 70 self.action = p.action
71 71
72 72 def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
73 73 strict_node_history=False):
74 74 protocol = -1
75 75 def receiver(orig_paths, revnum, author, date, message, pool):
76 76 if orig_paths is not None:
77 77 for k, v in orig_paths.iteritems():
78 78 orig_paths[k] = changedpath(v)
79 79 pickle.dump((orig_paths, revnum, author, date, message),
80 80 fp, protocol)
81 81
82 82 try:
83 83 # Use an ra of our own so that our parent can consume
84 84 # our results without confusing the server.
85 85 t = transport.SvnRaTransport(url=url)
86 86 svn.ra.get_log(t.ra, paths, start, end, limit,
87 87 discover_changed_paths,
88 88 strict_node_history,
89 89 receiver)
90 90 except SubversionException, (inst, num):
91 91 pickle.dump(num, fp, protocol)
92 92 except IOError:
93 93 # Caller may interrupt the iteration
94 94 pickle.dump(None, fp, protocol)
95 95 else:
96 96 pickle.dump(None, fp, protocol)
97 97 fp.close()
98 98 # With large history, cleanup process goes crazy and suddenly
99 99 # consumes *huge* amount of memory. The output file being closed,
100 100 # there is no need for clean termination.
101 101 os._exit(0)
102 102
103 103 def debugsvnlog(ui, **opts):
104 104 """Fetch SVN log in a subprocess and channel them back to parent to
105 105 avoid memory collection issues.
106 106 """
107 107 util.set_binary(sys.stdin)
108 108 util.set_binary(sys.stdout)
109 109 args = decodeargs(sys.stdin.read())
110 110 get_log_child(sys.stdout, *args)
111 111
112 112 class logstream:
113 113 """Interruptible revision log iterator."""
114 114 def __init__(self, stdout):
115 115 self._stdout = stdout
116 116
117 117 def __iter__(self):
118 118 while True:
119 119 entry = pickle.load(self._stdout)
120 120 try:
121 121 orig_paths, revnum, author, date, message = entry
122 122 except:
123 123 if entry is None:
124 124 break
125 125 raise SubversionException("child raised exception", entry)
126 126 yield entry
127 127
128 128 def close(self):
129 129 if self._stdout:
130 130 self._stdout.close()
131 131 self._stdout = None
132 132
133 133 # SVN conversion code stolen from bzr-svn and tailor
134 134 #
135 135 # Subversion looks like a versioned filesystem, branches structures
136 136 # are defined by conventions and not enforced by the tool. First,
137 137 # we define the potential branches (modules) as "trunk" and "branches"
138 138 # children directories. Revisions are then identified by their
139 139 # module and revision number (and a repository identifier).
140 140 #
141 141 # The revision graph is really a tree (or a forest). By default, a
142 142 # revision parent is the previous revision in the same module. If the
143 143 # module directory is copied/moved from another module then the
144 144 # revision is the module root and its parent the source revision in
145 145 # the parent module. A revision has at most one parent.
146 146 #
147 147 class svn_source(converter_source):
148 148 def __init__(self, ui, url, rev=None):
149 149 super(svn_source, self).__init__(ui, url, rev=rev)
150 150
151 151 try:
152 152 SubversionException
153 153 except NameError:
154 154 raise NoRepo('Subversion python bindings could not be loaded')
155 155
156 156 self.encoding = locale.getpreferredencoding()
157 157 self.lastrevs = {}
158 158
159 159 latest = None
160 160 try:
161 161 # Support file://path@rev syntax. Useful e.g. to convert
162 162 # deleted branches.
163 163 at = url.rfind('@')
164 164 if at >= 0:
165 165 latest = int(url[at+1:])
166 166 url = url[:at]
167 167 except ValueError, e:
168 168 pass
169 169 self.url = geturl(url)
170 170 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
171 171 try:
172 172 self.transport = transport.SvnRaTransport(url=self.url)
173 173 self.ra = self.transport.ra
174 174 self.ctx = self.transport.client
175 175 self.base = svn.ra.get_repos_root(self.ra)
176 176 # Module is either empty or a repository path starting with
177 177 # a slash and not ending with a slash.
178 178 self.module = self.url[len(self.base):]
179 179 self.prevmodule = None
180 180 self.rootmodule = self.module
181 181 self.commits = {}
182 182 self.paths = {}
183 183 self.uuid = svn.ra.get_uuid(self.ra).decode(self.encoding)
184 184 except SubversionException, e:
185 185 ui.print_exc()
186 186 raise NoRepo("%s does not look like a Subversion repo" % self.url)
187 187
188 188 if rev:
189 189 try:
190 190 latest = int(rev)
191 191 except ValueError:
192 raise util.Abort('svn: revision %s is not an integer' % rev)
192 raise util.Abort(_('svn: revision %s is not an integer') % rev)
193 193
194 194 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
195 195 try:
196 196 self.startrev = int(self.startrev)
197 197 if self.startrev < 0:
198 198 self.startrev = 0
199 199 except ValueError:
200 200 raise util.Abort(_('svn: start revision %s is not an integer')
201 201 % self.startrev)
202 202
203 203 try:
204 204 self.get_blacklist()
205 205 except IOError, e:
206 206 pass
207 207
208 208 self.head = self.latest(self.module, latest)
209 209 if not self.head:
210 210 raise util.Abort(_('no revision found in module %s') %
211 211 self.module.encode(self.encoding))
212 212 self.last_changed = self.revnum(self.head)
213 213
214 214 self._changescache = None
215 215
216 216 if os.path.exists(os.path.join(url, '.svn/entries')):
217 217 self.wc = url
218 218 else:
219 219 self.wc = None
220 220 self.convertfp = None
221 221
222 222 def setrevmap(self, revmap):
223 223 lastrevs = {}
224 224 for revid in revmap.iterkeys():
225 225 uuid, module, revnum = self.revsplit(revid)
226 226 lastrevnum = lastrevs.setdefault(module, revnum)
227 227 if revnum > lastrevnum:
228 228 lastrevs[module] = revnum
229 229 self.lastrevs = lastrevs
230 230
231 231 def exists(self, path, optrev):
232 232 try:
233 233 svn.client.ls(self.url.rstrip('/') + '/' + path,
234 234 optrev, False, self.ctx)
235 235 return True
236 236 except SubversionException, err:
237 237 return False
238 238
239 239 def getheads(self):
240 240
241 241 def isdir(path, revnum):
242 242 kind = self._checkpath(path, revnum)
243 243 return kind == svn.core.svn_node_dir
244 244
245 245 def getcfgpath(name, rev):
246 246 cfgpath = self.ui.config('convert', 'svn.' + name)
247 247 if cfgpath is not None and cfgpath.strip() == '':
248 248 return None
249 249 path = (cfgpath or name).strip('/')
250 250 if not self.exists(path, rev):
251 251 if cfgpath:
252 252 raise util.Abort(_('expected %s to be at %r, but not found')
253 253 % (name, path))
254 254 return None
255 255 self.ui.note(_('found %s at %r\n') % (name, path))
256 256 return path
257 257
258 258 rev = optrev(self.last_changed)
259 259 oldmodule = ''
260 260 trunk = getcfgpath('trunk', rev)
261 261 self.tags = getcfgpath('tags', rev)
262 262 branches = getcfgpath('branches', rev)
263 263
264 264 # If the project has a trunk or branches, we will extract heads
265 265 # from them. We keep the project root otherwise.
266 266 if trunk:
267 267 oldmodule = self.module or ''
268 268 self.module += '/' + trunk
269 269 self.head = self.latest(self.module, self.last_changed)
270 270 if not self.head:
271 271 raise util.Abort(_('no revision found in module %s') %
272 272 self.module.encode(self.encoding))
273 273
274 274 # First head in the list is the module's head
275 275 self.heads = [self.head]
276 276 if self.tags is not None:
277 277 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
278 278
279 279 # Check if branches bring a few more heads to the list
280 280 if branches:
281 281 rpath = self.url.strip('/')
282 282 branchnames = svn.client.ls(rpath + '/' + branches, rev, False,
283 283 self.ctx)
284 284 for branch in branchnames.keys():
285 285 module = '%s/%s/%s' % (oldmodule, branches, branch)
286 286 if not isdir(module, self.last_changed):
287 287 continue
288 288 brevid = self.latest(module, self.last_changed)
289 289 if not brevid:
290 290 self.ui.note(_('ignoring empty branch %s\n') %
291 291 branch.encode(self.encoding))
292 292 continue
293 self.ui.note('found branch %s at %d\n' %
293 self.ui.note(_('found branch %s at %d\n') %
294 294 (branch, self.revnum(brevid)))
295 295 self.heads.append(brevid)
296 296
297 297 if self.startrev and self.heads:
298 298 if len(self.heads) > 1:
299 299 raise util.Abort(_('svn: start revision is not supported with '
300 300 'with more than one branch'))
301 301 revnum = self.revnum(self.heads[0])
302 302 if revnum < self.startrev:
303 303 raise util.Abort(_('svn: no revision found after start revision %d')
304 304 % self.startrev)
305 305
306 306 return self.heads
307 307
308 308 def getfile(self, file, rev):
309 309 data, mode = self._getfile(file, rev)
310 310 self.modecache[(file, rev)] = mode
311 311 return data
312 312
313 313 def getmode(self, file, rev):
314 314 return self.modecache[(file, rev)]
315 315
316 316 def getchanges(self, rev):
317 317 if self._changescache and self._changescache[0] == rev:
318 318 return self._changescache[1]
319 319 self._changescache = None
320 320 self.modecache = {}
321 321 (paths, parents) = self.paths[rev]
322 322 if parents:
323 323 files, copies = self.expandpaths(rev, paths, parents)
324 324 else:
325 325 # Perform a full checkout on roots
326 326 uuid, module, revnum = self.revsplit(rev)
327 327 entries = svn.client.ls(self.base + module, optrev(revnum),
328 328 True, self.ctx)
329 329 files = [n for n,e in entries.iteritems()
330 330 if e.kind == svn.core.svn_node_file]
331 331 copies = {}
332 332
333 333 files.sort()
334 334 files = zip(files, [rev] * len(files))
335 335
336 336 # caller caches the result, so free it here to release memory
337 337 del self.paths[rev]
338 338 return (files, copies)
339 339
340 340 def getchangedfiles(self, rev, i):
341 341 changes = self.getchanges(rev)
342 342 self._changescache = (rev, changes)
343 343 return [f[0] for f in changes[0]]
344 344
345 345 def getcommit(self, rev):
346 346 if rev not in self.commits:
347 347 uuid, module, revnum = self.revsplit(rev)
348 348 self.module = module
349 349 self.reparent(module)
350 350 # We assume that:
351 351 # - requests for revisions after "stop" come from the
352 352 # revision graph backward traversal. Cache all of them
353 353 # down to stop, they will be used eventually.
354 354 # - requests for revisions before "stop" come to get
355 355 # isolated branches parents. Just fetch what is needed.
356 356 stop = self.lastrevs.get(module, 0)
357 357 if revnum < stop:
358 358 stop = revnum + 1
359 359 self._fetch_revisions(revnum, stop)
360 360 commit = self.commits[rev]
361 361 # caller caches the result, so free it here to release memory
362 362 del self.commits[rev]
363 363 return commit
364 364
365 365 def gettags(self):
366 366 tags = {}
367 367 if self.tags is None:
368 368 return tags
369 369
370 370 # svn tags are just a convention, project branches left in a
371 371 # 'tags' directory. There is no other relationship than
372 372 # ancestry, which is expensive to discover and makes them hard
373 373 # to update incrementally. Worse, past revisions may be
374 374 # referenced by tags far away in the future, requiring a deep
375 375 # history traversal on every calculation. Current code
376 376 # performs a single backward traversal, tracking moves within
377 377 # the tags directory (tag renaming) and recording a new tag
378 378 # everytime a project is copied from outside the tags
379 379 # directory. It also lists deleted tags, this behaviour may
380 380 # change in the future.
381 381 pendings = []
382 382 tagspath = self.tags
383 383 start = svn.ra.get_latest_revnum(self.ra)
384 384 try:
385 385 for entry in self._getlog([self.tags], start, self.startrev):
386 386 origpaths, revnum, author, date, message = entry
387 387 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
388 388 in origpaths.iteritems() if e.copyfrom_path]
389 389 copies.sort()
390 390 # Apply moves/copies from more specific to general
391 391 copies.reverse()
392 392
393 393 srctagspath = tagspath
394 394 if copies and copies[-1][2] == tagspath:
395 395 # Track tags directory moves
396 396 srctagspath = copies.pop()[0]
397 397
398 398 for source, sourcerev, dest in copies:
399 399 if not dest.startswith(tagspath + '/'):
400 400 continue
401 401 for tag in pendings:
402 402 if tag[0].startswith(dest):
403 403 tagpath = source + tag[0][len(dest):]
404 404 tag[:2] = [tagpath, sourcerev]
405 405 break
406 406 else:
407 407 pendings.append([source, sourcerev, dest.split('/')[-1]])
408 408
409 409 # Tell tag renamings from tag creations
410 410 remainings = []
411 411 for source, sourcerev, tagname in pendings:
412 412 if source.startswith(srctagspath):
413 413 remainings.append([source, sourcerev, tagname])
414 414 continue
415 415 # From revision may be fake, get one with changes
416 416 tagid = self.latest(source, sourcerev)
417 417 if tagid:
418 418 tags[tagname] = tagid
419 419 pendings = remainings
420 420 tagspath = srctagspath
421 421
422 422 except SubversionException, (inst, num):
423 self.ui.note('no tags found at revision %d\n' % start)
423 self.ui.note(_('no tags found at revision %d\n') % start)
424 424 return tags
425 425
426 426 def converted(self, rev, destrev):
427 427 if not self.wc:
428 428 return
429 429 if self.convertfp is None:
430 430 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
431 431 'a')
432 432 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
433 433 self.convertfp.flush()
434 434
435 435 # -- helper functions --
436 436
437 437 def revid(self, revnum, module=None):
438 438 if not module:
439 439 module = self.module
440 440 return u"svn:%s%s@%s" % (self.uuid, module.decode(self.encoding),
441 441 revnum)
442 442
443 443 def revnum(self, rev):
444 444 return int(rev.split('@')[-1])
445 445
446 446 def revsplit(self, rev):
447 447 url, revnum = rev.encode(self.encoding).split('@', 1)
448 448 revnum = int(revnum)
449 449 parts = url.split('/', 1)
450 450 uuid = parts.pop(0)[4:]
451 451 mod = ''
452 452 if parts:
453 453 mod = '/' + parts[0]
454 454 return uuid, mod, revnum
455 455
456 456 def latest(self, path, stop=0):
457 457 """Find the latest revid affecting path, up to stop. It may return
458 458 a revision in a different module, since a branch may be moved without
459 459 a change being reported. Return None if computed module does not
460 460 belong to rootmodule subtree.
461 461 """
462 462 if not path.startswith(self.rootmodule):
463 463 # Requests on foreign branches may be forbidden at server level
464 464 self.ui.debug(_('ignoring foreign branch %r\n') % path)
465 465 return None
466 466
467 467 if not stop:
468 468 stop = svn.ra.get_latest_revnum(self.ra)
469 469 try:
470 470 prevmodule = self.reparent('')
471 471 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
472 472 self.reparent(prevmodule)
473 473 except SubversionException:
474 474 dirent = None
475 475 if not dirent:
476 raise util.Abort('%s not found up to revision %d' % (path, stop))
476 raise util.Abort(_('%s not found up to revision %d') % (path, stop))
477 477
478 478 # stat() gives us the previous revision on this line of development, but
479 479 # it might be in *another module*. Fetch the log and detect renames down
480 480 # to the latest revision.
481 481 stream = self._getlog([path], stop, dirent.created_rev)
482 482 try:
483 483 for entry in stream:
484 484 paths, revnum, author, date, message = entry
485 485 if revnum <= dirent.created_rev:
486 486 break
487 487
488 488 for p in paths:
489 489 if not path.startswith(p) or not paths[p].copyfrom_path:
490 490 continue
491 491 newpath = paths[p].copyfrom_path + path[len(p):]
492 self.ui.debug("branch renamed from %s to %s at %d\n" %
492 self.ui.debug(_("branch renamed from %s to %s at %d\n") %
493 493 (path, newpath, revnum))
494 494 path = newpath
495 495 break
496 496 finally:
497 497 stream.close()
498 498
499 499 if not path.startswith(self.rootmodule):
500 500 self.ui.debug(_('ignoring foreign branch %r\n') % path)
501 501 return None
502 502 return self.revid(dirent.created_rev, path)
503 503
504 504 def get_blacklist(self):
505 505 """Avoid certain revision numbers.
506 506 It is not uncommon for two nearby revisions to cancel each other
507 507 out, e.g. 'I copied trunk into a subdirectory of itself instead
508 508 of making a branch'. The converted repository is significantly
509 509 smaller if we ignore such revisions."""
510 510 self.blacklist = util.set()
511 511 blacklist = self.blacklist
512 512 for line in file("blacklist.txt", "r"):
513 513 if not line.startswith("#"):
514 514 try:
515 515 svn_rev = int(line.strip())
516 516 blacklist.add(svn_rev)
517 517 except ValueError, e:
518 518 pass # not an integer or a comment
519 519
520 520 def is_blacklisted(self, svn_rev):
521 521 return svn_rev in self.blacklist
522 522
523 523 def reparent(self, module):
524 524 """Reparent the svn transport and return the previous parent."""
525 525 if self.prevmodule == module:
526 526 return module
527 527 svn_url = (self.base + module).encode(self.encoding)
528 528 prevmodule = self.prevmodule
529 529 if prevmodule is None:
530 530 prevmodule = ''
531 self.ui.debug("reparent to %s\n" % svn_url)
531 self.ui.debug(_("reparent to %s\n") % svn_url)
532 532 svn.ra.reparent(self.ra, svn_url)
533 533 self.prevmodule = module
534 534 return prevmodule
535 535
536 536 def expandpaths(self, rev, paths, parents):
537 537 entries = []
538 538 copyfrom = {} # Map of entrypath, revision for finding source of deleted revisions.
539 539 copies = {}
540 540
541 541 new_module, revnum = self.revsplit(rev)[1:]
542 542 if new_module != self.module:
543 543 self.module = new_module
544 544 self.reparent(self.module)
545 545
546 546 for path, ent in paths:
547 547 entrypath = self.getrelpath(path)
548 548 entry = entrypath.decode(self.encoding)
549 549
550 550 kind = self._checkpath(entrypath, revnum)
551 551 if kind == svn.core.svn_node_file:
552 552 entries.append(self.recode(entry))
553 553 if not ent.copyfrom_path or not parents:
554 554 continue
555 555 # Copy sources not in parent revisions cannot be represented,
556 556 # ignore their origin for now
557 557 pmodule, prevnum = self.revsplit(parents[0])[1:]
558 558 if ent.copyfrom_rev < prevnum:
559 559 continue
560 560 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
561 561 if not copyfrom_path:
562 562 continue
563 self.ui.debug("copied to %s from %s@%s\n" %
563 self.ui.debug(_("copied to %s from %s@%s\n") %
564 564 (entrypath, copyfrom_path, ent.copyfrom_rev))
565 565 copies[self.recode(entry)] = self.recode(copyfrom_path)
566 566 elif kind == 0: # gone, but had better be a deleted *file*
567 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
567 self.ui.debug(_("gone from %s\n") % ent.copyfrom_rev)
568 568
569 569 # if a branch is created but entries are removed in the same
570 570 # changeset, get the right fromrev
571 571 # parents cannot be empty here, you cannot remove things from
572 572 # a root revision.
573 573 uuid, old_module, fromrev = self.revsplit(parents[0])
574 574
575 575 basepath = old_module + "/" + self.getrelpath(path)
576 576 entrypath = basepath
577 577
578 578 def lookup_parts(p):
579 579 rc = None
580 580 parts = p.split("/")
581 581 for i in range(len(parts)):
582 582 part = "/".join(parts[:i])
583 583 info = part, copyfrom.get(part, None)
584 584 if info[1] is not None:
585 self.ui.debug("Found parent directory %s\n" % info[1])
585 self.ui.debug(_("Found parent directory %s\n") % info[1])
586 586 rc = info
587 587 return rc
588 588
589 self.ui.debug("base, entry %s %s\n" % (basepath, entrypath))
589 self.ui.debug(_("base, entry %s %s\n") % (basepath, entrypath))
590 590
591 591 frompath, froment = lookup_parts(entrypath) or (None, revnum - 1)
592 592
593 593 # need to remove fragment from lookup_parts and replace with copyfrom_path
594 594 if frompath is not None:
595 self.ui.debug("munge-o-matic\n")
595 self.ui.debug(_("munge-o-matic\n"))
596 596 self.ui.debug(entrypath + '\n')
597 597 self.ui.debug(entrypath[len(frompath):] + '\n')
598 598 entrypath = froment.copyfrom_path + entrypath[len(frompath):]
599 599 fromrev = froment.copyfrom_rev
600 self.ui.debug("Info: %s %s %s %s\n" % (frompath, froment, ent, entrypath))
600 self.ui.debug(_("Info: %s %s %s %s\n") % (frompath, froment, ent, entrypath))
601 601
602 602 # We can avoid the reparent calls if the module has not changed
603 603 # but it probably does not worth the pain.
604 604 prevmodule = self.reparent('')
605 605 fromkind = svn.ra.check_path(self.ra, entrypath.strip('/'), fromrev)
606 606 self.reparent(prevmodule)
607 607
608 608 if fromkind == svn.core.svn_node_file: # a deleted file
609 609 entries.append(self.recode(entry))
610 610 elif fromkind == svn.core.svn_node_dir:
611 611 # print "Deleted/moved non-file:", revnum, path, ent
612 612 # children = self._find_children(path, revnum - 1)
613 613 # print "find children %s@%d from %d action %s" % (path, revnum, ent.copyfrom_rev, ent.action)
614 614 # Sometimes this is tricky. For example: in
615 615 # The Subversion Repository revision 6940 a dir
616 616 # was copied and one of its files was deleted
617 617 # from the new location in the same commit. This
618 618 # code can't deal with that yet.
619 619 if ent.action == 'C':
620 620 children = self._find_children(path, fromrev)
621 621 else:
622 622 oroot = entrypath.strip('/')
623 623 nroot = path.strip('/')
624 624 children = self._find_children(oroot, fromrev)
625 625 children = [s.replace(oroot,nroot) for s in children]
626 626 # Mark all [files, not directories] as deleted.
627 627 for child in children:
628 628 # Can we move a child directory and its
629 629 # parent in the same commit? (probably can). Could
630 630 # cause problems if instead of revnum -1,
631 631 # we have to look in (copyfrom_path, revnum - 1)
632 632 entrypath = self.getrelpath("/" + child, module=old_module)
633 633 if entrypath:
634 634 entry = self.recode(entrypath.decode(self.encoding))
635 635 if entry in copies:
636 636 # deleted file within a copy
637 637 del copies[entry]
638 638 else:
639 639 entries.append(entry)
640 640 else:
641 self.ui.debug('unknown path in revision %d: %s\n' % \
641 self.ui.debug(_('unknown path in revision %d: %s\n') % \
642 642 (revnum, path))
643 643 elif kind == svn.core.svn_node_dir:
644 644 # Should probably synthesize normal file entries
645 645 # and handle as above to clean up copy/rename handling.
646 646
647 647 # If the directory just had a prop change,
648 648 # then we shouldn't need to look for its children.
649 649 if ent.action == 'M':
650 650 continue
651 651
652 652 # Also this could create duplicate entries. Not sure
653 653 # whether this will matter. Maybe should make entries a set.
654 654 # print "Changed directory", revnum, path, ent.action, ent.copyfrom_path, ent.copyfrom_rev
655 655 # This will fail if a directory was copied
656 656 # from another branch and then some of its files
657 657 # were deleted in the same transaction.
658 658 children = util.sort(self._find_children(path, revnum))
659 659 for child in children:
660 660 # Can we move a child directory and its
661 661 # parent in the same commit? (probably can). Could
662 662 # cause problems if instead of revnum -1,
663 663 # we have to look in (copyfrom_path, revnum - 1)
664 664 entrypath = self.getrelpath("/" + child)
665 665 # print child, self.module, entrypath
666 666 if entrypath:
667 667 # Need to filter out directories here...
668 668 kind = self._checkpath(entrypath, revnum)
669 669 if kind != svn.core.svn_node_dir:
670 670 entries.append(self.recode(entrypath))
671 671
672 672 # Copies here (must copy all from source)
673 673 # Probably not a real problem for us if
674 674 # source does not exist
675 675 if not ent.copyfrom_path or not parents:
676 676 continue
677 677 # Copy sources not in parent revisions cannot be represented,
678 678 # ignore their origin for now
679 679 pmodule, prevnum = self.revsplit(parents[0])[1:]
680 680 if ent.copyfrom_rev < prevnum:
681 681 continue
682 682 copyfrompath = ent.copyfrom_path.decode(self.encoding)
683 683 copyfrompath = self.getrelpath(copyfrompath, pmodule)
684 684 if not copyfrompath:
685 685 continue
686 686 copyfrom[path] = ent
687 self.ui.debug("mark %s came from %s:%d\n"
687 self.ui.debug(_("mark %s came from %s:%d\n")
688 688 % (path, copyfrompath, ent.copyfrom_rev))
689 689 children = self._find_children(ent.copyfrom_path, ent.copyfrom_rev)
690 690 children.sort()
691 691 for child in children:
692 692 entrypath = self.getrelpath("/" + child, pmodule)
693 693 if not entrypath:
694 694 continue
695 695 entry = entrypath.decode(self.encoding)
696 696 copytopath = path + entry[len(copyfrompath):]
697 697 copytopath = self.getrelpath(copytopath)
698 698 copies[self.recode(copytopath)] = self.recode(entry, pmodule)
699 699
700 700 return (util.unique(entries), copies)
701 701
702 702 def _fetch_revisions(self, from_revnum, to_revnum):
703 703 if from_revnum < to_revnum:
704 704 from_revnum, to_revnum = to_revnum, from_revnum
705 705
706 706 self.child_cset = None
707 707
708 708 def isdescendantof(parent, child):
709 709 if not child or not parent or not child.startswith(parent):
710 710 return False
711 711 subpath = child[len(parent):]
712 712 return len(subpath) > 1 and subpath[0] == '/'
713 713
714 714 def parselogentry(orig_paths, revnum, author, date, message):
715 715 """Return the parsed commit object or None, and True if
716 716 the revision is a branch root.
717 717 """
718 self.ui.debug("parsing revision %d (%d changes)\n" %
718 self.ui.debug(_("parsing revision %d (%d changes)\n") %
719 719 (revnum, len(orig_paths)))
720 720
721 721 branched = False
722 722 rev = self.revid(revnum)
723 723 # branch log might return entries for a parent we already have
724 724
725 725 if (rev in self.commits or revnum < to_revnum):
726 726 return None, branched
727 727
728 728 parents = []
729 729 # check whether this revision is the start of a branch or part
730 730 # of a branch renaming
731 731 orig_paths = util.sort(orig_paths.items())
732 732 root_paths = [(p,e) for p,e in orig_paths if self.module.startswith(p)]
733 733 if root_paths:
734 734 path, ent = root_paths[-1]
735 735 if ent.copyfrom_path:
736 736 # If dir was moved while one of its file was removed
737 737 # the log may look like:
738 738 # A /dir (from /dir:x)
739 739 # A /dir/a (from /dir/a:y)
740 740 # A /dir/b (from /dir/b:z)
741 741 # ...
742 742 # for all remaining children.
743 743 # Let's take the highest child element from rev as source.
744 744 copies = [(p,e) for p,e in orig_paths[:-1]
745 745 if isdescendantof(ent.copyfrom_path, e.copyfrom_path)]
746 746 fromrev = max([e.copyfrom_rev for p,e in copies] + [ent.copyfrom_rev])
747 747 branched = True
748 748 newpath = ent.copyfrom_path + self.module[len(path):]
749 749 # ent.copyfrom_rev may not be the actual last revision
750 750 previd = self.latest(newpath, fromrev)
751 751 if previd is not None:
752 752 prevmodule, prevnum = self.revsplit(previd)[1:]
753 753 if prevnum >= self.startrev:
754 754 parents = [previd]
755 self.ui.note('found parent of branch %s at %d: %s\n' %
755 self.ui.note(_('found parent of branch %s at %d: %s\n') %
756 756 (self.module, prevnum, prevmodule))
757 757 else:
758 self.ui.debug("No copyfrom path, don't know what to do.\n")
758 self.ui.debug(_("No copyfrom path, don't know what to do.\n"))
759 759
760 760 paths = []
761 761 # filter out unrelated paths
762 762 for path, ent in orig_paths:
763 763 if self.getrelpath(path) is None:
764 764 continue
765 765 paths.append((path, ent))
766 766
767 767 # Example SVN datetime. Includes microseconds.
768 768 # ISO-8601 conformant
769 769 # '2007-01-04T17:35:00.902377Z'
770 770 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
771 771
772 772 log = message and self.recode(message) or ''
773 773 author = author and self.recode(author) or ''
774 774 try:
775 775 branch = self.module.split("/")[-1]
776 776 if branch == 'trunk':
777 777 branch = ''
778 778 except IndexError:
779 779 branch = None
780 780
781 781 cset = commit(author=author,
782 782 date=util.datestr(date),
783 783 desc=log,
784 784 parents=parents,
785 785 branch=branch,
786 786 rev=rev.encode('utf-8'))
787 787
788 788 self.commits[rev] = cset
789 789 # The parents list is *shared* among self.paths and the
790 790 # commit object. Both will be updated below.
791 791 self.paths[rev] = (paths, cset.parents)
792 792 if self.child_cset and not self.child_cset.parents:
793 793 self.child_cset.parents[:] = [rev]
794 794 self.child_cset = cset
795 795 return cset, branched
796 796
797 self.ui.note('fetching revision log for "%s" from %d to %d\n' %
797 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
798 798 (self.module, from_revnum, to_revnum))
799 799
800 800 try:
801 801 firstcset = None
802 802 lastonbranch = False
803 803 stream = self._getlog([self.module], from_revnum, to_revnum)
804 804 try:
805 805 for entry in stream:
806 806 paths, revnum, author, date, message = entry
807 807 if revnum < self.startrev:
808 808 lastonbranch = True
809 809 break
810 810 if self.is_blacklisted(revnum):
811 self.ui.note('skipping blacklisted revision %d\n'
811 self.ui.note(_('skipping blacklisted revision %d\n')
812 812 % revnum)
813 813 continue
814 814 if paths is None:
815 self.ui.debug('revision %d has no entries\n' % revnum)
815 self.ui.debug(_('revision %d has no entries\n') % revnum)
816 816 continue
817 817 cset, lastonbranch = parselogentry(paths, revnum, author,
818 818 date, message)
819 819 if cset:
820 820 firstcset = cset
821 821 if lastonbranch:
822 822 break
823 823 finally:
824 824 stream.close()
825 825
826 826 if not lastonbranch and firstcset and not firstcset.parents:
827 827 # The first revision of the sequence (the last fetched one)
828 828 # has invalid parents if not a branch root. Find the parent
829 829 # revision now, if any.
830 830 try:
831 831 firstrevnum = self.revnum(firstcset.rev)
832 832 if firstrevnum > 1:
833 833 latest = self.latest(self.module, firstrevnum - 1)
834 834 if latest:
835 835 firstcset.parents.append(latest)
836 836 except util.Abort:
837 837 pass
838 838 except SubversionException, (inst, num):
839 839 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
840 raise util.Abort('svn: branch has no revision %s' % to_revnum)
840 raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
841 841 raise
842 842
843 843 def _getfile(self, file, rev):
844 844 io = StringIO()
845 845 # TODO: ra.get_file transmits the whole file instead of diffs.
846 846 mode = ''
847 847 try:
848 848 new_module, revnum = self.revsplit(rev)[1:]
849 849 if self.module != new_module:
850 850 self.module = new_module
851 851 self.reparent(self.module)
852 852 info = svn.ra.get_file(self.ra, file, revnum, io)
853 853 if isinstance(info, list):
854 854 info = info[-1]
855 855 mode = ("svn:executable" in info) and 'x' or ''
856 856 mode = ("svn:special" in info) and 'l' or mode
857 857 except SubversionException, e:
858 858 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
859 859 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
860 860 if e.apr_err in notfound: # File not found
861 861 raise IOError()
862 862 raise
863 863 data = io.getvalue()
864 864 if mode == 'l':
865 865 link_prefix = "link "
866 866 if data.startswith(link_prefix):
867 867 data = data[len(link_prefix):]
868 868 return data, mode
869 869
870 870 def _find_children(self, path, revnum):
871 871 path = path.strip('/')
872 872 pool = Pool()
873 873 rpath = '/'.join([self.base, path]).strip('/')
874 874 return ['%s/%s' % (path, x) for x in svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool).keys()]
875 875
876 876 def getrelpath(self, path, module=None):
877 877 if module is None:
878 878 module = self.module
879 879 # Given the repository url of this wc, say
880 880 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
881 881 # extract the "entry" portion (a relative path) from what
882 882 # svn log --xml says, ie
883 883 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
884 884 # that is to say "tests/PloneTestCase.py"
885 885 if path.startswith(module):
886 886 relative = path.rstrip('/')[len(module):]
887 887 if relative.startswith('/'):
888 888 return relative[1:]
889 889 elif relative == '':
890 890 return relative
891 891
892 892 # The path is outside our tracked tree...
893 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
893 self.ui.debug(_('%r is not under %r, ignoring\n') % (path, module))
894 894 return None
895 895
896 896 def _checkpath(self, path, revnum):
897 897 # ra.check_path does not like leading slashes very much, it leads
898 898 # to PROPFIND subversion errors
899 899 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
900 900
901 901 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
902 902 strict_node_history=False):
903 903 # Normalize path names, svn >= 1.5 only wants paths relative to
904 904 # supplied URL
905 905 relpaths = []
906 906 for p in paths:
907 907 if not p.startswith('/'):
908 908 p = self.module + '/' + p
909 909 relpaths.append(p.strip('/'))
910 910 args = [self.base, relpaths, start, end, limit, discover_changed_paths,
911 911 strict_node_history]
912 912 arg = encodeargs(args)
913 913 hgexe = util.hgexecutable()
914 914 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
915 915 stdin, stdout = os.popen2(cmd, 'b')
916 916 stdin.write(arg)
917 917 stdin.close()
918 918 return logstream(stdout)
919 919
920 920 pre_revprop_change = '''#!/bin/sh
921 921
922 922 REPOS="$1"
923 923 REV="$2"
924 924 USER="$3"
925 925 PROPNAME="$4"
926 926 ACTION="$5"
927 927
928 928 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
929 929 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
930 930 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
931 931
932 932 echo "Changing prohibited revision property" >&2
933 933 exit 1
934 934 '''
935 935
936 936 class svn_sink(converter_sink, commandline):
937 937 commit_re = re.compile(r'Committed revision (\d+).', re.M)
938 938
939 939 def prerun(self):
940 940 if self.wc:
941 941 os.chdir(self.wc)
942 942
943 943 def postrun(self):
944 944 if self.wc:
945 945 os.chdir(self.cwd)
946 946
947 947 def join(self, name):
948 948 return os.path.join(self.wc, '.svn', name)
949 949
950 950 def revmapfile(self):
951 951 return self.join('hg-shamap')
952 952
953 953 def authorfile(self):
954 954 return self.join('hg-authormap')
955 955
956 956 def __init__(self, ui, path):
957 957 converter_sink.__init__(self, ui, path)
958 958 commandline.__init__(self, ui, 'svn')
959 959 self.delete = []
960 960 self.setexec = []
961 961 self.delexec = []
962 962 self.copies = []
963 963 self.wc = None
964 964 self.cwd = os.getcwd()
965 965
966 966 path = os.path.realpath(path)
967 967
968 968 created = False
969 969 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
970 970 self.wc = path
971 971 self.run0('update')
972 972 else:
973 973 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
974 974
975 975 if os.path.isdir(os.path.dirname(path)):
976 976 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
977 977 ui.status(_('initializing svn repo %r\n') %
978 978 os.path.basename(path))
979 979 commandline(ui, 'svnadmin').run0('create', path)
980 980 created = path
981 981 path = util.normpath(path)
982 982 if not path.startswith('/'):
983 983 path = '/' + path
984 984 path = 'file://' + path
985 985
986 986 ui.status(_('initializing svn wc %r\n') % os.path.basename(wcpath))
987 987 self.run0('checkout', path, wcpath)
988 988
989 989 self.wc = wcpath
990 990 self.opener = util.opener(self.wc)
991 991 self.wopener = util.opener(self.wc)
992 992 self.childmap = mapfile(ui, self.join('hg-childmap'))
993 993 self.is_exec = util.checkexec(self.wc) and util.is_exec or None
994 994
995 995 if created:
996 996 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
997 997 fp = open(hook, 'w')
998 998 fp.write(pre_revprop_change)
999 999 fp.close()
1000 1000 util.set_flags(hook, False, True)
1001 1001
1002 1002 xport = transport.SvnRaTransport(url=geturl(path))
1003 1003 self.uuid = svn.ra.get_uuid(xport.ra)
1004 1004
1005 1005 def wjoin(self, *names):
1006 1006 return os.path.join(self.wc, *names)
1007 1007
1008 1008 def putfile(self, filename, flags, data):
1009 1009 if 'l' in flags:
1010 1010 self.wopener.symlink(data, filename)
1011 1011 else:
1012 1012 try:
1013 1013 if os.path.islink(self.wjoin(filename)):
1014 1014 os.unlink(filename)
1015 1015 except OSError:
1016 1016 pass
1017 1017 self.wopener(filename, 'w').write(data)
1018 1018
1019 1019 if self.is_exec:
1020 1020 was_exec = self.is_exec(self.wjoin(filename))
1021 1021 else:
1022 1022 # On filesystems not supporting execute-bit, there is no way
1023 1023 # to know if it is set but asking subversion. Setting it
1024 1024 # systematically is just as expensive and much simpler.
1025 1025 was_exec = 'x' not in flags
1026 1026
1027 1027 util.set_flags(self.wjoin(filename), False, 'x' in flags)
1028 1028 if was_exec:
1029 1029 if 'x' not in flags:
1030 1030 self.delexec.append(filename)
1031 1031 else:
1032 1032 if 'x' in flags:
1033 1033 self.setexec.append(filename)
1034 1034
1035 1035 def _copyfile(self, source, dest):
1036 1036 # SVN's copy command pukes if the destination file exists, but
1037 1037 # our copyfile method expects to record a copy that has
1038 1038 # already occurred. Cross the semantic gap.
1039 1039 wdest = self.wjoin(dest)
1040 1040 exists = os.path.exists(wdest)
1041 1041 if exists:
1042 1042 fd, tempname = tempfile.mkstemp(
1043 1043 prefix='hg-copy-', dir=os.path.dirname(wdest))
1044 1044 os.close(fd)
1045 1045 os.unlink(tempname)
1046 1046 os.rename(wdest, tempname)
1047 1047 try:
1048 1048 self.run0('copy', source, dest)
1049 1049 finally:
1050 1050 if exists:
1051 1051 try:
1052 1052 os.unlink(wdest)
1053 1053 except OSError:
1054 1054 pass
1055 1055 os.rename(tempname, wdest)
1056 1056
1057 1057 def dirs_of(self, files):
1058 1058 dirs = util.set()
1059 1059 for f in files:
1060 1060 if os.path.isdir(self.wjoin(f)):
1061 1061 dirs.add(f)
1062 1062 for i in strutil.rfindall(f, '/'):
1063 1063 dirs.add(f[:i])
1064 1064 return dirs
1065 1065
1066 1066 def add_dirs(self, files):
1067 1067 add_dirs = [d for d in util.sort(self.dirs_of(files))
1068 1068 if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
1069 1069 if add_dirs:
1070 1070 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1071 1071 return add_dirs
1072 1072
1073 1073 def add_files(self, files):
1074 1074 if files:
1075 1075 self.xargs(files, 'add', quiet=True)
1076 1076 return files
1077 1077
1078 1078 def tidy_dirs(self, names):
1079 1079 dirs = util.sort(self.dirs_of(names))
1080 1080 dirs.reverse()
1081 1081 deleted = []
1082 1082 for d in dirs:
1083 1083 wd = self.wjoin(d)
1084 1084 if os.listdir(wd) == '.svn':
1085 1085 self.run0('delete', d)
1086 1086 deleted.append(d)
1087 1087 return deleted
1088 1088
1089 1089 def addchild(self, parent, child):
1090 1090 self.childmap[parent] = child
1091 1091
1092 1092 def revid(self, rev):
1093 1093 return u"svn:%s@%s" % (self.uuid, rev)
1094 1094
1095 1095 def putcommit(self, files, copies, parents, commit, source):
1096 1096 # Apply changes to working copy
1097 1097 for f, v in files:
1098 1098 try:
1099 1099 data = source.getfile(f, v)
1100 1100 except IOError, inst:
1101 1101 self.delete.append(f)
1102 1102 else:
1103 1103 e = source.getmode(f, v)
1104 1104 self.putfile(f, e, data)
1105 1105 if f in copies:
1106 1106 self.copies.append([copies[f], f])
1107 1107 files = [f[0] for f in files]
1108 1108
1109 1109 for parent in parents:
1110 1110 try:
1111 1111 return self.revid(self.childmap[parent])
1112 1112 except KeyError:
1113 1113 pass
1114 1114 entries = util.set(self.delete)
1115 1115 files = util.frozenset(files)
1116 1116 entries.update(self.add_dirs(files.difference(entries)))
1117 1117 if self.copies:
1118 1118 for s, d in self.copies:
1119 1119 self._copyfile(s, d)
1120 1120 self.copies = []
1121 1121 if self.delete:
1122 1122 self.xargs(self.delete, 'delete')
1123 1123 self.delete = []
1124 1124 entries.update(self.add_files(files.difference(entries)))
1125 1125 entries.update(self.tidy_dirs(entries))
1126 1126 if self.delexec:
1127 1127 self.xargs(self.delexec, 'propdel', 'svn:executable')
1128 1128 self.delexec = []
1129 1129 if self.setexec:
1130 1130 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1131 1131 self.setexec = []
1132 1132
1133 1133 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1134 1134 fp = os.fdopen(fd, 'w')
1135 1135 fp.write(commit.desc)
1136 1136 fp.close()
1137 1137 try:
1138 1138 output = self.run0('commit',
1139 1139 username=util.shortuser(commit.author),
1140 1140 file=messagefile,
1141 1141 encoding='utf-8')
1142 1142 try:
1143 1143 rev = self.commit_re.search(output).group(1)
1144 1144 except AttributeError:
1145 1145 self.ui.warn(_('unexpected svn output:\n'))
1146 1146 self.ui.warn(output)
1147 1147 raise util.Abort(_('unable to cope with svn output'))
1148 1148 if commit.rev:
1149 1149 self.run('propset', 'hg:convert-rev', commit.rev,
1150 1150 revprop=True, revision=rev)
1151 1151 if commit.branch and commit.branch != 'default':
1152 1152 self.run('propset', 'hg:convert-branch', commit.branch,
1153 1153 revprop=True, revision=rev)
1154 1154 for parent in parents:
1155 1155 self.addchild(parent, rev)
1156 1156 return self.revid(rev)
1157 1157 finally:
1158 1158 os.unlink(messagefile)
1159 1159
1160 1160 def puttags(self, tags):
1161 1161 self.ui.warn(_('XXX TAGS NOT IMPLEMENTED YET\n'))
General Comments 0
You need to be logged in to leave comments. Login now