##// END OF EJS Templates
convert/svn: handle MissingTool exception from converter_sink class...
Azhagu Selvan SP -
r13479:b14ed169 stable
parent child Browse files
Show More
@@ -1,434 +1,436
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap
18 import filemap
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
74 raise util.Abort(_('%s\n') % inst)
73 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
74
76
75 class progresssource(object):
77 class progresssource(object):
76 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
77 self.ui = ui
79 self.ui = ui
78 self.source = source
80 self.source = source
79 self.filecount = filecount
81 self.filecount = filecount
80 self.retrieved = 0
82 self.retrieved = 0
81
83
82 def getfile(self, file, rev):
84 def getfile(self, file, rev):
83 self.retrieved += 1
85 self.retrieved += 1
84 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
85 item=file, total=self.filecount)
87 item=file, total=self.filecount)
86 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
87
89
88 def lookuprev(self, rev):
90 def lookuprev(self, rev):
89 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
90
92
91 def close(self):
93 def close(self):
92 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
93
95
94 class converter(object):
96 class converter(object):
95 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
96
98
97 self.source = source
99 self.source = source
98 self.dest = dest
100 self.dest = dest
99 self.ui = ui
101 self.ui = ui
100 self.opts = opts
102 self.opts = opts
101 self.commitcache = {}
103 self.commitcache = {}
102 self.authors = {}
104 self.authors = {}
103 self.authorfile = None
105 self.authorfile = None
104
106
105 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
106 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
107 # incremental conversions work.)
109 # incremental conversions work.)
108 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
109
111
110 # Read first the dst author map if any
112 # Read first the dst author map if any
111 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
112 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
113 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
114 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
115 if opts.get('authormap'):
117 if opts.get('authormap'):
116 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
117 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
118
120
119 self.splicemap = mapfile(ui, opts.get('splicemap'))
121 self.splicemap = mapfile(ui, opts.get('splicemap'))
120 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
121
123
122 def walktree(self, heads):
124 def walktree(self, heads):
123 '''Return a mapping that identifies the uncommitted parents of every
125 '''Return a mapping that identifies the uncommitted parents of every
124 uncommitted changeset.'''
126 uncommitted changeset.'''
125 visit = heads
127 visit = heads
126 known = set()
128 known = set()
127 parents = {}
129 parents = {}
128 while visit:
130 while visit:
129 n = visit.pop(0)
131 n = visit.pop(0)
130 if n in known or n in self.map:
132 if n in known or n in self.map:
131 continue
133 continue
132 known.add(n)
134 known.add(n)
133 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
134 commit = self.cachecommit(n)
136 commit = self.cachecommit(n)
135 parents[n] = []
137 parents[n] = []
136 for p in commit.parents:
138 for p in commit.parents:
137 parents[n].append(p)
139 parents[n].append(p)
138 visit.append(p)
140 visit.append(p)
139 self.ui.progress(_('scanning'), None)
141 self.ui.progress(_('scanning'), None)
140
142
141 return parents
143 return parents
142
144
143 def toposort(self, parents, sortmode):
145 def toposort(self, parents, sortmode):
144 '''Return an ordering such that every uncommitted changeset is
146 '''Return an ordering such that every uncommitted changeset is
145 preceeded by all its uncommitted ancestors.'''
147 preceeded by all its uncommitted ancestors.'''
146
148
147 def mapchildren(parents):
149 def mapchildren(parents):
148 """Return a (children, roots) tuple where 'children' maps parent
150 """Return a (children, roots) tuple where 'children' maps parent
149 revision identifiers to children ones, and 'roots' is the list of
151 revision identifiers to children ones, and 'roots' is the list of
150 revisions without parents. 'parents' must be a mapping of revision
152 revisions without parents. 'parents' must be a mapping of revision
151 identifier to its parents ones.
153 identifier to its parents ones.
152 """
154 """
153 visit = parents.keys()
155 visit = parents.keys()
154 seen = set()
156 seen = set()
155 children = {}
157 children = {}
156 roots = []
158 roots = []
157
159
158 while visit:
160 while visit:
159 n = visit.pop(0)
161 n = visit.pop(0)
160 if n in seen:
162 if n in seen:
161 continue
163 continue
162 seen.add(n)
164 seen.add(n)
163 # Ensure that nodes without parents are present in the
165 # Ensure that nodes without parents are present in the
164 # 'children' mapping.
166 # 'children' mapping.
165 children.setdefault(n, [])
167 children.setdefault(n, [])
166 hasparent = False
168 hasparent = False
167 for p in parents[n]:
169 for p in parents[n]:
168 if not p in self.map:
170 if not p in self.map:
169 visit.append(p)
171 visit.append(p)
170 hasparent = True
172 hasparent = True
171 children.setdefault(p, []).append(n)
173 children.setdefault(p, []).append(n)
172 if not hasparent:
174 if not hasparent:
173 roots.append(n)
175 roots.append(n)
174
176
175 return children, roots
177 return children, roots
176
178
177 # Sort functions are supposed to take a list of revisions which
179 # Sort functions are supposed to take a list of revisions which
178 # can be converted immediately and pick one
180 # can be converted immediately and pick one
179
181
180 def makebranchsorter():
182 def makebranchsorter():
181 """If the previously converted revision has a child in the
183 """If the previously converted revision has a child in the
182 eligible revisions list, pick it. Return the list head
184 eligible revisions list, pick it. Return the list head
183 otherwise. Branch sort attempts to minimize branch
185 otherwise. Branch sort attempts to minimize branch
184 switching, which is harmful for Mercurial backend
186 switching, which is harmful for Mercurial backend
185 compression.
187 compression.
186 """
188 """
187 prev = [None]
189 prev = [None]
188 def picknext(nodes):
190 def picknext(nodes):
189 next = nodes[0]
191 next = nodes[0]
190 for n in nodes:
192 for n in nodes:
191 if prev[0] in parents[n]:
193 if prev[0] in parents[n]:
192 next = n
194 next = n
193 break
195 break
194 prev[0] = next
196 prev[0] = next
195 return next
197 return next
196 return picknext
198 return picknext
197
199
198 def makesourcesorter():
200 def makesourcesorter():
199 """Source specific sort."""
201 """Source specific sort."""
200 keyfn = lambda n: self.commitcache[n].sortkey
202 keyfn = lambda n: self.commitcache[n].sortkey
201 def picknext(nodes):
203 def picknext(nodes):
202 return sorted(nodes, key=keyfn)[0]
204 return sorted(nodes, key=keyfn)[0]
203 return picknext
205 return picknext
204
206
205 def makedatesorter():
207 def makedatesorter():
206 """Sort revisions by date."""
208 """Sort revisions by date."""
207 dates = {}
209 dates = {}
208 def getdate(n):
210 def getdate(n):
209 if n not in dates:
211 if n not in dates:
210 dates[n] = util.parsedate(self.commitcache[n].date)
212 dates[n] = util.parsedate(self.commitcache[n].date)
211 return dates[n]
213 return dates[n]
212
214
213 def picknext(nodes):
215 def picknext(nodes):
214 return min([(getdate(n), n) for n in nodes])[1]
216 return min([(getdate(n), n) for n in nodes])[1]
215
217
216 return picknext
218 return picknext
217
219
218 if sortmode == 'branchsort':
220 if sortmode == 'branchsort':
219 picknext = makebranchsorter()
221 picknext = makebranchsorter()
220 elif sortmode == 'datesort':
222 elif sortmode == 'datesort':
221 picknext = makedatesorter()
223 picknext = makedatesorter()
222 elif sortmode == 'sourcesort':
224 elif sortmode == 'sourcesort':
223 picknext = makesourcesorter()
225 picknext = makesourcesorter()
224 else:
226 else:
225 raise util.Abort(_('unknown sort mode: %s') % sortmode)
227 raise util.Abort(_('unknown sort mode: %s') % sortmode)
226
228
227 children, actives = mapchildren(parents)
229 children, actives = mapchildren(parents)
228
230
229 s = []
231 s = []
230 pendings = {}
232 pendings = {}
231 while actives:
233 while actives:
232 n = picknext(actives)
234 n = picknext(actives)
233 actives.remove(n)
235 actives.remove(n)
234 s.append(n)
236 s.append(n)
235
237
236 # Update dependents list
238 # Update dependents list
237 for c in children.get(n, []):
239 for c in children.get(n, []):
238 if c not in pendings:
240 if c not in pendings:
239 pendings[c] = [p for p in parents[c] if p not in self.map]
241 pendings[c] = [p for p in parents[c] if p not in self.map]
240 try:
242 try:
241 pendings[c].remove(n)
243 pendings[c].remove(n)
242 except ValueError:
244 except ValueError:
243 raise util.Abort(_('cycle detected between %s and %s')
245 raise util.Abort(_('cycle detected between %s and %s')
244 % (recode(c), recode(n)))
246 % (recode(c), recode(n)))
245 if not pendings[c]:
247 if not pendings[c]:
246 # Parents are converted, node is eligible
248 # Parents are converted, node is eligible
247 actives.insert(0, c)
249 actives.insert(0, c)
248 pendings[c] = None
250 pendings[c] = None
249
251
250 if len(s) != len(parents):
252 if len(s) != len(parents):
251 raise util.Abort(_("not all revisions were sorted"))
253 raise util.Abort(_("not all revisions were sorted"))
252
254
253 return s
255 return s
254
256
255 def writeauthormap(self):
257 def writeauthormap(self):
256 authorfile = self.authorfile
258 authorfile = self.authorfile
257 if authorfile:
259 if authorfile:
258 self.ui.status(_('Writing author map file %s\n') % authorfile)
260 self.ui.status(_('Writing author map file %s\n') % authorfile)
259 ofile = open(authorfile, 'w+')
261 ofile = open(authorfile, 'w+')
260 for author in self.authors:
262 for author in self.authors:
261 ofile.write("%s=%s\n" % (author, self.authors[author]))
263 ofile.write("%s=%s\n" % (author, self.authors[author]))
262 ofile.close()
264 ofile.close()
263
265
264 def readauthormap(self, authorfile):
266 def readauthormap(self, authorfile):
265 afile = open(authorfile, 'r')
267 afile = open(authorfile, 'r')
266 for line in afile:
268 for line in afile:
267
269
268 line = line.strip()
270 line = line.strip()
269 if not line or line.startswith('#'):
271 if not line or line.startswith('#'):
270 continue
272 continue
271
273
272 try:
274 try:
273 srcauthor, dstauthor = line.split('=', 1)
275 srcauthor, dstauthor = line.split('=', 1)
274 except ValueError:
276 except ValueError:
275 msg = _('Ignoring bad line in author map file %s: %s\n')
277 msg = _('Ignoring bad line in author map file %s: %s\n')
276 self.ui.warn(msg % (authorfile, line.rstrip()))
278 self.ui.warn(msg % (authorfile, line.rstrip()))
277 continue
279 continue
278
280
279 srcauthor = srcauthor.strip()
281 srcauthor = srcauthor.strip()
280 dstauthor = dstauthor.strip()
282 dstauthor = dstauthor.strip()
281 if self.authors.get(srcauthor) in (None, dstauthor):
283 if self.authors.get(srcauthor) in (None, dstauthor):
282 msg = _('mapping author %s to %s\n')
284 msg = _('mapping author %s to %s\n')
283 self.ui.debug(msg % (srcauthor, dstauthor))
285 self.ui.debug(msg % (srcauthor, dstauthor))
284 self.authors[srcauthor] = dstauthor
286 self.authors[srcauthor] = dstauthor
285 continue
287 continue
286
288
287 m = _('overriding mapping for author %s, was %s, will be %s\n')
289 m = _('overriding mapping for author %s, was %s, will be %s\n')
288 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
290 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
289
291
290 afile.close()
292 afile.close()
291
293
292 def cachecommit(self, rev):
294 def cachecommit(self, rev):
293 commit = self.source.getcommit(rev)
295 commit = self.source.getcommit(rev)
294 commit.author = self.authors.get(commit.author, commit.author)
296 commit.author = self.authors.get(commit.author, commit.author)
295 commit.branch = self.branchmap.get(commit.branch, commit.branch)
297 commit.branch = self.branchmap.get(commit.branch, commit.branch)
296 self.commitcache[rev] = commit
298 self.commitcache[rev] = commit
297 return commit
299 return commit
298
300
299 def copy(self, rev):
301 def copy(self, rev):
300 commit = self.commitcache[rev]
302 commit = self.commitcache[rev]
301
303
302 changes = self.source.getchanges(rev)
304 changes = self.source.getchanges(rev)
303 if isinstance(changes, basestring):
305 if isinstance(changes, basestring):
304 if changes == SKIPREV:
306 if changes == SKIPREV:
305 dest = SKIPREV
307 dest = SKIPREV
306 else:
308 else:
307 dest = self.map[changes]
309 dest = self.map[changes]
308 self.map[rev] = dest
310 self.map[rev] = dest
309 return
311 return
310 files, copies = changes
312 files, copies = changes
311 pbranches = []
313 pbranches = []
312 if commit.parents:
314 if commit.parents:
313 for prev in commit.parents:
315 for prev in commit.parents:
314 if prev not in self.commitcache:
316 if prev not in self.commitcache:
315 self.cachecommit(prev)
317 self.cachecommit(prev)
316 pbranches.append((self.map[prev],
318 pbranches.append((self.map[prev],
317 self.commitcache[prev].branch))
319 self.commitcache[prev].branch))
318 self.dest.setbranch(commit.branch, pbranches)
320 self.dest.setbranch(commit.branch, pbranches)
319 try:
321 try:
320 parents = self.splicemap[rev].replace(',', ' ').split()
322 parents = self.splicemap[rev].replace(',', ' ').split()
321 self.ui.status(_('spliced in %s as parents of %s\n') %
323 self.ui.status(_('spliced in %s as parents of %s\n') %
322 (parents, rev))
324 (parents, rev))
323 parents = [self.map.get(p, p) for p in parents]
325 parents = [self.map.get(p, p) for p in parents]
324 except KeyError:
326 except KeyError:
325 parents = [b[0] for b in pbranches]
327 parents = [b[0] for b in pbranches]
326 source = progresssource(self.ui, self.source, len(files))
328 source = progresssource(self.ui, self.source, len(files))
327 newnode = self.dest.putcommit(files, copies, parents, commit,
329 newnode = self.dest.putcommit(files, copies, parents, commit,
328 source, self.map)
330 source, self.map)
329 source.close()
331 source.close()
330 self.source.converted(rev, newnode)
332 self.source.converted(rev, newnode)
331 self.map[rev] = newnode
333 self.map[rev] = newnode
332
334
333 def convert(self, sortmode):
335 def convert(self, sortmode):
334 try:
336 try:
335 self.source.before()
337 self.source.before()
336 self.dest.before()
338 self.dest.before()
337 self.source.setrevmap(self.map)
339 self.source.setrevmap(self.map)
338 self.ui.status(_("scanning source...\n"))
340 self.ui.status(_("scanning source...\n"))
339 heads = self.source.getheads()
341 heads = self.source.getheads()
340 parents = self.walktree(heads)
342 parents = self.walktree(heads)
341 self.ui.status(_("sorting...\n"))
343 self.ui.status(_("sorting...\n"))
342 t = self.toposort(parents, sortmode)
344 t = self.toposort(parents, sortmode)
343 num = len(t)
345 num = len(t)
344 c = None
346 c = None
345
347
346 self.ui.status(_("converting...\n"))
348 self.ui.status(_("converting...\n"))
347 for i, c in enumerate(t):
349 for i, c in enumerate(t):
348 num -= 1
350 num -= 1
349 desc = self.commitcache[c].desc
351 desc = self.commitcache[c].desc
350 if "\n" in desc:
352 if "\n" in desc:
351 desc = desc.splitlines()[0]
353 desc = desc.splitlines()[0]
352 # convert log message to local encoding without using
354 # convert log message to local encoding without using
353 # tolocal() because the encoding.encoding convert()
355 # tolocal() because the encoding.encoding convert()
354 # uses is 'utf-8'
356 # uses is 'utf-8'
355 self.ui.status("%d %s\n" % (num, recode(desc)))
357 self.ui.status("%d %s\n" % (num, recode(desc)))
356 self.ui.note(_("source: %s\n") % recode(c))
358 self.ui.note(_("source: %s\n") % recode(c))
357 self.ui.progress(_('converting'), i, unit=_('revisions'),
359 self.ui.progress(_('converting'), i, unit=_('revisions'),
358 total=len(t))
360 total=len(t))
359 self.copy(c)
361 self.copy(c)
360 self.ui.progress(_('converting'), None)
362 self.ui.progress(_('converting'), None)
361
363
362 tags = self.source.gettags()
364 tags = self.source.gettags()
363 ctags = {}
365 ctags = {}
364 for k in tags:
366 for k in tags:
365 v = tags[k]
367 v = tags[k]
366 if self.map.get(v, SKIPREV) != SKIPREV:
368 if self.map.get(v, SKIPREV) != SKIPREV:
367 ctags[k] = self.map[v]
369 ctags[k] = self.map[v]
368
370
369 if c and ctags:
371 if c and ctags:
370 nrev, tagsparent = self.dest.puttags(ctags)
372 nrev, tagsparent = self.dest.puttags(ctags)
371 if nrev and tagsparent:
373 if nrev and tagsparent:
372 # write another hash correspondence to override the previous
374 # write another hash correspondence to override the previous
373 # one so we don't end up with extra tag heads
375 # one so we don't end up with extra tag heads
374 tagsparents = [e for e in self.map.iteritems()
376 tagsparents = [e for e in self.map.iteritems()
375 if e[1] == tagsparent]
377 if e[1] == tagsparent]
376 if tagsparents:
378 if tagsparents:
377 self.map[tagsparents[0][0]] = nrev
379 self.map[tagsparents[0][0]] = nrev
378
380
379 self.writeauthormap()
381 self.writeauthormap()
380 finally:
382 finally:
381 self.cleanup()
383 self.cleanup()
382
384
383 def cleanup(self):
385 def cleanup(self):
384 try:
386 try:
385 self.dest.after()
387 self.dest.after()
386 finally:
388 finally:
387 self.source.after()
389 self.source.after()
388 self.map.close()
390 self.map.close()
389
391
390 def convert(ui, src, dest=None, revmapfile=None, **opts):
392 def convert(ui, src, dest=None, revmapfile=None, **opts):
391 global orig_encoding
393 global orig_encoding
392 orig_encoding = encoding.encoding
394 orig_encoding = encoding.encoding
393 encoding.encoding = 'UTF-8'
395 encoding.encoding = 'UTF-8'
394
396
395 # support --authors as an alias for --authormap
397 # support --authors as an alias for --authormap
396 if not opts.get('authormap'):
398 if not opts.get('authormap'):
397 opts['authormap'] = opts.get('authors')
399 opts['authormap'] = opts.get('authors')
398
400
399 if not dest:
401 if not dest:
400 dest = hg.defaultdest(src) + "-hg"
402 dest = hg.defaultdest(src) + "-hg"
401 ui.status(_("assuming destination %s\n") % dest)
403 ui.status(_("assuming destination %s\n") % dest)
402
404
403 destc = convertsink(ui, dest, opts.get('dest_type'))
405 destc = convertsink(ui, dest, opts.get('dest_type'))
404
406
405 try:
407 try:
406 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
408 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
407 opts.get('rev'))
409 opts.get('rev'))
408 except Exception:
410 except Exception:
409 for path in destc.created:
411 for path in destc.created:
410 shutil.rmtree(path, True)
412 shutil.rmtree(path, True)
411 raise
413 raise
412
414
413 sortmodes = ('branchsort', 'datesort', 'sourcesort')
415 sortmodes = ('branchsort', 'datesort', 'sourcesort')
414 sortmode = [m for m in sortmodes if opts.get(m)]
416 sortmode = [m for m in sortmodes if opts.get(m)]
415 if len(sortmode) > 1:
417 if len(sortmode) > 1:
416 raise util.Abort(_('more than one sort mode specified'))
418 raise util.Abort(_('more than one sort mode specified'))
417 sortmode = sortmode and sortmode[0] or defaultsort
419 sortmode = sortmode and sortmode[0] or defaultsort
418 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
420 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
419 raise util.Abort(_('--sourcesort is not supported by this data source'))
421 raise util.Abort(_('--sourcesort is not supported by this data source'))
420
422
421 fmap = opts.get('filemap')
423 fmap = opts.get('filemap')
422 if fmap:
424 if fmap:
423 srcc = filemap.filemap_source(ui, srcc, fmap)
425 srcc = filemap.filemap_source(ui, srcc, fmap)
424 destc.setfilemapmode(True)
426 destc.setfilemapmode(True)
425
427
426 if not revmapfile:
428 if not revmapfile:
427 try:
429 try:
428 revmapfile = destc.revmapfile()
430 revmapfile = destc.revmapfile()
429 except:
431 except:
430 revmapfile = os.path.join(destc, "map")
432 revmapfile = os.path.join(destc, "map")
431
433
432 c = converter(ui, srcc, destc, revmapfile, opts)
434 c = converter(ui, srcc, destc, revmapfile, opts)
433 c.convert(sortmode)
435 c.convert(sortmode)
434
436
General Comments 0
You need to be logged in to leave comments. Login now