##// END OF EJS Templates
convert: process splicemap in sorted order
Mads Kiilerich -
r18372:5965997b default
parent child Browse files
Show More
@@ -1,470 +1,470
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap, common
18 import filemap, common
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = common.parsesplicemap(opts.get('splicemap'))
121 self.splicemap = common.parsesplicemap(opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124 def walktree(self, heads):
124 def walktree(self, heads):
125 '''Return a mapping that identifies the uncommitted parents of every
125 '''Return a mapping that identifies the uncommitted parents of every
126 uncommitted changeset.'''
126 uncommitted changeset.'''
127 visit = heads
127 visit = heads
128 known = set()
128 known = set()
129 parents = {}
129 parents = {}
130 while visit:
130 while visit:
131 n = visit.pop(0)
131 n = visit.pop(0)
132 if n in known or n in self.map:
132 if n in known or n in self.map:
133 continue
133 continue
134 known.add(n)
134 known.add(n)
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
136 commit = self.cachecommit(n)
136 commit = self.cachecommit(n)
137 parents[n] = []
137 parents[n] = []
138 for p in commit.parents:
138 for p in commit.parents:
139 parents[n].append(p)
139 parents[n].append(p)
140 visit.append(p)
140 visit.append(p)
141 self.ui.progress(_('scanning'), None)
141 self.ui.progress(_('scanning'), None)
142
142
143 return parents
143 return parents
144
144
145 def mergesplicemap(self, parents, splicemap):
145 def mergesplicemap(self, parents, splicemap):
146 """A splicemap redefines child/parent relationships. Check the
146 """A splicemap redefines child/parent relationships. Check the
147 map contains valid revision identifiers and merge the new
147 map contains valid revision identifiers and merge the new
148 links in the source graph.
148 links in the source graph.
149 """
149 """
150 for c in splicemap:
150 for c in sorted(splicemap):
151 if c not in parents:
151 if c not in parents:
152 if not self.dest.hascommit(self.map.get(c, c)):
152 if not self.dest.hascommit(self.map.get(c, c)):
153 # Could be in source but not converted during this run
153 # Could be in source but not converted during this run
154 self.ui.warn(_('splice map revision %s is not being '
154 self.ui.warn(_('splice map revision %s is not being '
155 'converted, ignoring\n') % c)
155 'converted, ignoring\n') % c)
156 continue
156 continue
157 pc = []
157 pc = []
158 for p in splicemap[c]:
158 for p in splicemap[c]:
159 # We do not have to wait for nodes already in dest.
159 # We do not have to wait for nodes already in dest.
160 if self.dest.hascommit(self.map.get(p, p)):
160 if self.dest.hascommit(self.map.get(p, p)):
161 continue
161 continue
162 # Parent is not in dest and not being converted, not good
162 # Parent is not in dest and not being converted, not good
163 if p not in parents:
163 if p not in parents:
164 raise util.Abort(_('unknown splice map parent: %s') % p)
164 raise util.Abort(_('unknown splice map parent: %s') % p)
165 pc.append(p)
165 pc.append(p)
166 parents[c] = pc
166 parents[c] = pc
167
167
168 def toposort(self, parents, sortmode):
168 def toposort(self, parents, sortmode):
169 '''Return an ordering such that every uncommitted changeset is
169 '''Return an ordering such that every uncommitted changeset is
170 preceded by all its uncommitted ancestors.'''
170 preceded by all its uncommitted ancestors.'''
171
171
172 def mapchildren(parents):
172 def mapchildren(parents):
173 """Return a (children, roots) tuple where 'children' maps parent
173 """Return a (children, roots) tuple where 'children' maps parent
174 revision identifiers to children ones, and 'roots' is the list of
174 revision identifiers to children ones, and 'roots' is the list of
175 revisions without parents. 'parents' must be a mapping of revision
175 revisions without parents. 'parents' must be a mapping of revision
176 identifier to its parents ones.
176 identifier to its parents ones.
177 """
177 """
178 visit = parents.keys()
178 visit = parents.keys()
179 seen = set()
179 seen = set()
180 children = {}
180 children = {}
181 roots = []
181 roots = []
182
182
183 while visit:
183 while visit:
184 n = visit.pop(0)
184 n = visit.pop(0)
185 if n in seen:
185 if n in seen:
186 continue
186 continue
187 seen.add(n)
187 seen.add(n)
188 # Ensure that nodes without parents are present in the
188 # Ensure that nodes without parents are present in the
189 # 'children' mapping.
189 # 'children' mapping.
190 children.setdefault(n, [])
190 children.setdefault(n, [])
191 hasparent = False
191 hasparent = False
192 for p in parents[n]:
192 for p in parents[n]:
193 if p not in self.map:
193 if p not in self.map:
194 visit.append(p)
194 visit.append(p)
195 hasparent = True
195 hasparent = True
196 children.setdefault(p, []).append(n)
196 children.setdefault(p, []).append(n)
197 if not hasparent:
197 if not hasparent:
198 roots.append(n)
198 roots.append(n)
199
199
200 return children, roots
200 return children, roots
201
201
202 # Sort functions are supposed to take a list of revisions which
202 # Sort functions are supposed to take a list of revisions which
203 # can be converted immediately and pick one
203 # can be converted immediately and pick one
204
204
205 def makebranchsorter():
205 def makebranchsorter():
206 """If the previously converted revision has a child in the
206 """If the previously converted revision has a child in the
207 eligible revisions list, pick it. Return the list head
207 eligible revisions list, pick it. Return the list head
208 otherwise. Branch sort attempts to minimize branch
208 otherwise. Branch sort attempts to minimize branch
209 switching, which is harmful for Mercurial backend
209 switching, which is harmful for Mercurial backend
210 compression.
210 compression.
211 """
211 """
212 prev = [None]
212 prev = [None]
213 def picknext(nodes):
213 def picknext(nodes):
214 next = nodes[0]
214 next = nodes[0]
215 for n in nodes:
215 for n in nodes:
216 if prev[0] in parents[n]:
216 if prev[0] in parents[n]:
217 next = n
217 next = n
218 break
218 break
219 prev[0] = next
219 prev[0] = next
220 return next
220 return next
221 return picknext
221 return picknext
222
222
223 def makesourcesorter():
223 def makesourcesorter():
224 """Source specific sort."""
224 """Source specific sort."""
225 keyfn = lambda n: self.commitcache[n].sortkey
225 keyfn = lambda n: self.commitcache[n].sortkey
226 def picknext(nodes):
226 def picknext(nodes):
227 return sorted(nodes, key=keyfn)[0]
227 return sorted(nodes, key=keyfn)[0]
228 return picknext
228 return picknext
229
229
230 def makedatesorter():
230 def makedatesorter():
231 """Sort revisions by date."""
231 """Sort revisions by date."""
232 dates = {}
232 dates = {}
233 def getdate(n):
233 def getdate(n):
234 if n not in dates:
234 if n not in dates:
235 dates[n] = util.parsedate(self.commitcache[n].date)
235 dates[n] = util.parsedate(self.commitcache[n].date)
236 return dates[n]
236 return dates[n]
237
237
238 def picknext(nodes):
238 def picknext(nodes):
239 return min([(getdate(n), n) for n in nodes])[1]
239 return min([(getdate(n), n) for n in nodes])[1]
240
240
241 return picknext
241 return picknext
242
242
243 if sortmode == 'branchsort':
243 if sortmode == 'branchsort':
244 picknext = makebranchsorter()
244 picknext = makebranchsorter()
245 elif sortmode == 'datesort':
245 elif sortmode == 'datesort':
246 picknext = makedatesorter()
246 picknext = makedatesorter()
247 elif sortmode == 'sourcesort':
247 elif sortmode == 'sourcesort':
248 picknext = makesourcesorter()
248 picknext = makesourcesorter()
249 else:
249 else:
250 raise util.Abort(_('unknown sort mode: %s') % sortmode)
250 raise util.Abort(_('unknown sort mode: %s') % sortmode)
251
251
252 children, actives = mapchildren(parents)
252 children, actives = mapchildren(parents)
253
253
254 s = []
254 s = []
255 pendings = {}
255 pendings = {}
256 while actives:
256 while actives:
257 n = picknext(actives)
257 n = picknext(actives)
258 actives.remove(n)
258 actives.remove(n)
259 s.append(n)
259 s.append(n)
260
260
261 # Update dependents list
261 # Update dependents list
262 for c in children.get(n, []):
262 for c in children.get(n, []):
263 if c not in pendings:
263 if c not in pendings:
264 pendings[c] = [p for p in parents[c] if p not in self.map]
264 pendings[c] = [p for p in parents[c] if p not in self.map]
265 try:
265 try:
266 pendings[c].remove(n)
266 pendings[c].remove(n)
267 except ValueError:
267 except ValueError:
268 raise util.Abort(_('cycle detected between %s and %s')
268 raise util.Abort(_('cycle detected between %s and %s')
269 % (recode(c), recode(n)))
269 % (recode(c), recode(n)))
270 if not pendings[c]:
270 if not pendings[c]:
271 # Parents are converted, node is eligible
271 # Parents are converted, node is eligible
272 actives.insert(0, c)
272 actives.insert(0, c)
273 pendings[c] = None
273 pendings[c] = None
274
274
275 if len(s) != len(parents):
275 if len(s) != len(parents):
276 raise util.Abort(_("not all revisions were sorted"))
276 raise util.Abort(_("not all revisions were sorted"))
277
277
278 return s
278 return s
279
279
280 def writeauthormap(self):
280 def writeauthormap(self):
281 authorfile = self.authorfile
281 authorfile = self.authorfile
282 if authorfile:
282 if authorfile:
283 self.ui.status(_('writing author map file %s\n') % authorfile)
283 self.ui.status(_('writing author map file %s\n') % authorfile)
284 ofile = open(authorfile, 'w+')
284 ofile = open(authorfile, 'w+')
285 for author in self.authors:
285 for author in self.authors:
286 ofile.write("%s=%s\n" % (author, self.authors[author]))
286 ofile.write("%s=%s\n" % (author, self.authors[author]))
287 ofile.close()
287 ofile.close()
288
288
289 def readauthormap(self, authorfile):
289 def readauthormap(self, authorfile):
290 afile = open(authorfile, 'r')
290 afile = open(authorfile, 'r')
291 for line in afile:
291 for line in afile:
292
292
293 line = line.strip()
293 line = line.strip()
294 if not line or line.startswith('#'):
294 if not line or line.startswith('#'):
295 continue
295 continue
296
296
297 try:
297 try:
298 srcauthor, dstauthor = line.split('=', 1)
298 srcauthor, dstauthor = line.split('=', 1)
299 except ValueError:
299 except ValueError:
300 msg = _('ignoring bad line in author map file %s: %s\n')
300 msg = _('ignoring bad line in author map file %s: %s\n')
301 self.ui.warn(msg % (authorfile, line.rstrip()))
301 self.ui.warn(msg % (authorfile, line.rstrip()))
302 continue
302 continue
303
303
304 srcauthor = srcauthor.strip()
304 srcauthor = srcauthor.strip()
305 dstauthor = dstauthor.strip()
305 dstauthor = dstauthor.strip()
306 if self.authors.get(srcauthor) in (None, dstauthor):
306 if self.authors.get(srcauthor) in (None, dstauthor):
307 msg = _('mapping author %s to %s\n')
307 msg = _('mapping author %s to %s\n')
308 self.ui.debug(msg % (srcauthor, dstauthor))
308 self.ui.debug(msg % (srcauthor, dstauthor))
309 self.authors[srcauthor] = dstauthor
309 self.authors[srcauthor] = dstauthor
310 continue
310 continue
311
311
312 m = _('overriding mapping for author %s, was %s, will be %s\n')
312 m = _('overriding mapping for author %s, was %s, will be %s\n')
313 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
313 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
314
314
315 afile.close()
315 afile.close()
316
316
317 def cachecommit(self, rev):
317 def cachecommit(self, rev):
318 commit = self.source.getcommit(rev)
318 commit = self.source.getcommit(rev)
319 commit.author = self.authors.get(commit.author, commit.author)
319 commit.author = self.authors.get(commit.author, commit.author)
320 commit.branch = self.branchmap.get(commit.branch, commit.branch)
320 commit.branch = self.branchmap.get(commit.branch, commit.branch)
321 self.commitcache[rev] = commit
321 self.commitcache[rev] = commit
322 return commit
322 return commit
323
323
324 def copy(self, rev):
324 def copy(self, rev):
325 commit = self.commitcache[rev]
325 commit = self.commitcache[rev]
326
326
327 changes = self.source.getchanges(rev)
327 changes = self.source.getchanges(rev)
328 if isinstance(changes, basestring):
328 if isinstance(changes, basestring):
329 if changes == SKIPREV:
329 if changes == SKIPREV:
330 dest = SKIPREV
330 dest = SKIPREV
331 else:
331 else:
332 dest = self.map[changes]
332 dest = self.map[changes]
333 self.map[rev] = dest
333 self.map[rev] = dest
334 return
334 return
335 files, copies = changes
335 files, copies = changes
336 pbranches = []
336 pbranches = []
337 if commit.parents:
337 if commit.parents:
338 for prev in commit.parents:
338 for prev in commit.parents:
339 if prev not in self.commitcache:
339 if prev not in self.commitcache:
340 self.cachecommit(prev)
340 self.cachecommit(prev)
341 pbranches.append((self.map[prev],
341 pbranches.append((self.map[prev],
342 self.commitcache[prev].branch))
342 self.commitcache[prev].branch))
343 self.dest.setbranch(commit.branch, pbranches)
343 self.dest.setbranch(commit.branch, pbranches)
344 try:
344 try:
345 parents = self.splicemap[rev]
345 parents = self.splicemap[rev]
346 self.ui.status(_('spliced in %s as parents of %s\n') %
346 self.ui.status(_('spliced in %s as parents of %s\n') %
347 (parents, rev))
347 (parents, rev))
348 parents = [self.map.get(p, p) for p in parents]
348 parents = [self.map.get(p, p) for p in parents]
349 except KeyError:
349 except KeyError:
350 parents = [b[0] for b in pbranches]
350 parents = [b[0] for b in pbranches]
351 source = progresssource(self.ui, self.source, len(files))
351 source = progresssource(self.ui, self.source, len(files))
352 newnode = self.dest.putcommit(files, copies, parents, commit,
352 newnode = self.dest.putcommit(files, copies, parents, commit,
353 source, self.map)
353 source, self.map)
354 source.close()
354 source.close()
355 self.source.converted(rev, newnode)
355 self.source.converted(rev, newnode)
356 self.map[rev] = newnode
356 self.map[rev] = newnode
357
357
358 def convert(self, sortmode):
358 def convert(self, sortmode):
359 try:
359 try:
360 self.source.before()
360 self.source.before()
361 self.dest.before()
361 self.dest.before()
362 self.source.setrevmap(self.map)
362 self.source.setrevmap(self.map)
363 self.ui.status(_("scanning source...\n"))
363 self.ui.status(_("scanning source...\n"))
364 heads = self.source.getheads()
364 heads = self.source.getheads()
365 parents = self.walktree(heads)
365 parents = self.walktree(heads)
366 self.mergesplicemap(parents, self.splicemap)
366 self.mergesplicemap(parents, self.splicemap)
367 self.ui.status(_("sorting...\n"))
367 self.ui.status(_("sorting...\n"))
368 t = self.toposort(parents, sortmode)
368 t = self.toposort(parents, sortmode)
369 num = len(t)
369 num = len(t)
370 c = None
370 c = None
371
371
372 self.ui.status(_("converting...\n"))
372 self.ui.status(_("converting...\n"))
373 for i, c in enumerate(t):
373 for i, c in enumerate(t):
374 num -= 1
374 num -= 1
375 desc = self.commitcache[c].desc
375 desc = self.commitcache[c].desc
376 if "\n" in desc:
376 if "\n" in desc:
377 desc = desc.splitlines()[0]
377 desc = desc.splitlines()[0]
378 # convert log message to local encoding without using
378 # convert log message to local encoding without using
379 # tolocal() because the encoding.encoding convert()
379 # tolocal() because the encoding.encoding convert()
380 # uses is 'utf-8'
380 # uses is 'utf-8'
381 self.ui.status("%d %s\n" % (num, recode(desc)))
381 self.ui.status("%d %s\n" % (num, recode(desc)))
382 self.ui.note(_("source: %s\n") % recode(c))
382 self.ui.note(_("source: %s\n") % recode(c))
383 self.ui.progress(_('converting'), i, unit=_('revisions'),
383 self.ui.progress(_('converting'), i, unit=_('revisions'),
384 total=len(t))
384 total=len(t))
385 self.copy(c)
385 self.copy(c)
386 self.ui.progress(_('converting'), None)
386 self.ui.progress(_('converting'), None)
387
387
388 tags = self.source.gettags()
388 tags = self.source.gettags()
389 ctags = {}
389 ctags = {}
390 for k in tags:
390 for k in tags:
391 v = tags[k]
391 v = tags[k]
392 if self.map.get(v, SKIPREV) != SKIPREV:
392 if self.map.get(v, SKIPREV) != SKIPREV:
393 ctags[k] = self.map[v]
393 ctags[k] = self.map[v]
394
394
395 if c and ctags:
395 if c and ctags:
396 nrev, tagsparent = self.dest.puttags(ctags)
396 nrev, tagsparent = self.dest.puttags(ctags)
397 if nrev and tagsparent:
397 if nrev and tagsparent:
398 # write another hash correspondence to override the previous
398 # write another hash correspondence to override the previous
399 # one so we don't end up with extra tag heads
399 # one so we don't end up with extra tag heads
400 tagsparents = [e for e in self.map.iteritems()
400 tagsparents = [e for e in self.map.iteritems()
401 if e[1] == tagsparent]
401 if e[1] == tagsparent]
402 if tagsparents:
402 if tagsparents:
403 self.map[tagsparents[0][0]] = nrev
403 self.map[tagsparents[0][0]] = nrev
404
404
405 bookmarks = self.source.getbookmarks()
405 bookmarks = self.source.getbookmarks()
406 cbookmarks = {}
406 cbookmarks = {}
407 for k in bookmarks:
407 for k in bookmarks:
408 v = bookmarks[k]
408 v = bookmarks[k]
409 if self.map.get(v, SKIPREV) != SKIPREV:
409 if self.map.get(v, SKIPREV) != SKIPREV:
410 cbookmarks[k] = self.map[v]
410 cbookmarks[k] = self.map[v]
411
411
412 if c and cbookmarks:
412 if c and cbookmarks:
413 self.dest.putbookmarks(cbookmarks)
413 self.dest.putbookmarks(cbookmarks)
414
414
415 self.writeauthormap()
415 self.writeauthormap()
416 finally:
416 finally:
417 self.cleanup()
417 self.cleanup()
418
418
419 def cleanup(self):
419 def cleanup(self):
420 try:
420 try:
421 self.dest.after()
421 self.dest.after()
422 finally:
422 finally:
423 self.source.after()
423 self.source.after()
424 self.map.close()
424 self.map.close()
425
425
426 def convert(ui, src, dest=None, revmapfile=None, **opts):
426 def convert(ui, src, dest=None, revmapfile=None, **opts):
427 global orig_encoding
427 global orig_encoding
428 orig_encoding = encoding.encoding
428 orig_encoding = encoding.encoding
429 encoding.encoding = 'UTF-8'
429 encoding.encoding = 'UTF-8'
430
430
431 # support --authors as an alias for --authormap
431 # support --authors as an alias for --authormap
432 if not opts.get('authormap'):
432 if not opts.get('authormap'):
433 opts['authormap'] = opts.get('authors')
433 opts['authormap'] = opts.get('authors')
434
434
435 if not dest:
435 if not dest:
436 dest = hg.defaultdest(src) + "-hg"
436 dest = hg.defaultdest(src) + "-hg"
437 ui.status(_("assuming destination %s\n") % dest)
437 ui.status(_("assuming destination %s\n") % dest)
438
438
439 destc = convertsink(ui, dest, opts.get('dest_type'))
439 destc = convertsink(ui, dest, opts.get('dest_type'))
440
440
441 try:
441 try:
442 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
442 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
443 opts.get('rev'))
443 opts.get('rev'))
444 except Exception:
444 except Exception:
445 for path in destc.created:
445 for path in destc.created:
446 shutil.rmtree(path, True)
446 shutil.rmtree(path, True)
447 raise
447 raise
448
448
449 sortmodes = ('branchsort', 'datesort', 'sourcesort')
449 sortmodes = ('branchsort', 'datesort', 'sourcesort')
450 sortmode = [m for m in sortmodes if opts.get(m)]
450 sortmode = [m for m in sortmodes if opts.get(m)]
451 if len(sortmode) > 1:
451 if len(sortmode) > 1:
452 raise util.Abort(_('more than one sort mode specified'))
452 raise util.Abort(_('more than one sort mode specified'))
453 sortmode = sortmode and sortmode[0] or defaultsort
453 sortmode = sortmode and sortmode[0] or defaultsort
454 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
454 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
455 raise util.Abort(_('--sourcesort is not supported by this data source'))
455 raise util.Abort(_('--sourcesort is not supported by this data source'))
456
456
457 fmap = opts.get('filemap')
457 fmap = opts.get('filemap')
458 if fmap:
458 if fmap:
459 srcc = filemap.filemap_source(ui, srcc, fmap)
459 srcc = filemap.filemap_source(ui, srcc, fmap)
460 destc.setfilemapmode(True)
460 destc.setfilemapmode(True)
461
461
462 if not revmapfile:
462 if not revmapfile:
463 try:
463 try:
464 revmapfile = destc.revmapfile()
464 revmapfile = destc.revmapfile()
465 except Exception:
465 except Exception:
466 revmapfile = os.path.join(destc, "map")
466 revmapfile = os.path.join(destc, "map")
467
467
468 c = converter(ui, srcc, destc, revmapfile, opts)
468 c = converter(ui, srcc, destc, revmapfile, opts)
469 c.convert(sortmode)
469 c.convert(sortmode)
470
470
General Comments 0
You need to be logged in to leave comments. Login now