##// END OF EJS Templates
convert: add bookmark support to main command...
Edouard Gomez -
r13745:9ff22f60 default
parent child Browse files
Show More
@@ -1,436 +1,446 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap
18 import filemap
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = mapfile(ui, opts.get('splicemap'))
121 self.splicemap = mapfile(ui, opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124 def walktree(self, heads):
124 def walktree(self, heads):
125 '''Return a mapping that identifies the uncommitted parents of every
125 '''Return a mapping that identifies the uncommitted parents of every
126 uncommitted changeset.'''
126 uncommitted changeset.'''
127 visit = heads
127 visit = heads
128 known = set()
128 known = set()
129 parents = {}
129 parents = {}
130 while visit:
130 while visit:
131 n = visit.pop(0)
131 n = visit.pop(0)
132 if n in known or n in self.map:
132 if n in known or n in self.map:
133 continue
133 continue
134 known.add(n)
134 known.add(n)
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
136 commit = self.cachecommit(n)
136 commit = self.cachecommit(n)
137 parents[n] = []
137 parents[n] = []
138 for p in commit.parents:
138 for p in commit.parents:
139 parents[n].append(p)
139 parents[n].append(p)
140 visit.append(p)
140 visit.append(p)
141 self.ui.progress(_('scanning'), None)
141 self.ui.progress(_('scanning'), None)
142
142
143 return parents
143 return parents
144
144
145 def toposort(self, parents, sortmode):
145 def toposort(self, parents, sortmode):
146 '''Return an ordering such that every uncommitted changeset is
146 '''Return an ordering such that every uncommitted changeset is
147 preceeded by all its uncommitted ancestors.'''
147 preceeded by all its uncommitted ancestors.'''
148
148
149 def mapchildren(parents):
149 def mapchildren(parents):
150 """Return a (children, roots) tuple where 'children' maps parent
150 """Return a (children, roots) tuple where 'children' maps parent
151 revision identifiers to children ones, and 'roots' is the list of
151 revision identifiers to children ones, and 'roots' is the list of
152 revisions without parents. 'parents' must be a mapping of revision
152 revisions without parents. 'parents' must be a mapping of revision
153 identifier to its parents ones.
153 identifier to its parents ones.
154 """
154 """
155 visit = parents.keys()
155 visit = parents.keys()
156 seen = set()
156 seen = set()
157 children = {}
157 children = {}
158 roots = []
158 roots = []
159
159
160 while visit:
160 while visit:
161 n = visit.pop(0)
161 n = visit.pop(0)
162 if n in seen:
162 if n in seen:
163 continue
163 continue
164 seen.add(n)
164 seen.add(n)
165 # Ensure that nodes without parents are present in the
165 # Ensure that nodes without parents are present in the
166 # 'children' mapping.
166 # 'children' mapping.
167 children.setdefault(n, [])
167 children.setdefault(n, [])
168 hasparent = False
168 hasparent = False
169 for p in parents[n]:
169 for p in parents[n]:
170 if not p in self.map:
170 if not p in self.map:
171 visit.append(p)
171 visit.append(p)
172 hasparent = True
172 hasparent = True
173 children.setdefault(p, []).append(n)
173 children.setdefault(p, []).append(n)
174 if not hasparent:
174 if not hasparent:
175 roots.append(n)
175 roots.append(n)
176
176
177 return children, roots
177 return children, roots
178
178
179 # Sort functions are supposed to take a list of revisions which
179 # Sort functions are supposed to take a list of revisions which
180 # can be converted immediately and pick one
180 # can be converted immediately and pick one
181
181
182 def makebranchsorter():
182 def makebranchsorter():
183 """If the previously converted revision has a child in the
183 """If the previously converted revision has a child in the
184 eligible revisions list, pick it. Return the list head
184 eligible revisions list, pick it. Return the list head
185 otherwise. Branch sort attempts to minimize branch
185 otherwise. Branch sort attempts to minimize branch
186 switching, which is harmful for Mercurial backend
186 switching, which is harmful for Mercurial backend
187 compression.
187 compression.
188 """
188 """
189 prev = [None]
189 prev = [None]
190 def picknext(nodes):
190 def picknext(nodes):
191 next = nodes[0]
191 next = nodes[0]
192 for n in nodes:
192 for n in nodes:
193 if prev[0] in parents[n]:
193 if prev[0] in parents[n]:
194 next = n
194 next = n
195 break
195 break
196 prev[0] = next
196 prev[0] = next
197 return next
197 return next
198 return picknext
198 return picknext
199
199
200 def makesourcesorter():
200 def makesourcesorter():
201 """Source specific sort."""
201 """Source specific sort."""
202 keyfn = lambda n: self.commitcache[n].sortkey
202 keyfn = lambda n: self.commitcache[n].sortkey
203 def picknext(nodes):
203 def picknext(nodes):
204 return sorted(nodes, key=keyfn)[0]
204 return sorted(nodes, key=keyfn)[0]
205 return picknext
205 return picknext
206
206
207 def makedatesorter():
207 def makedatesorter():
208 """Sort revisions by date."""
208 """Sort revisions by date."""
209 dates = {}
209 dates = {}
210 def getdate(n):
210 def getdate(n):
211 if n not in dates:
211 if n not in dates:
212 dates[n] = util.parsedate(self.commitcache[n].date)
212 dates[n] = util.parsedate(self.commitcache[n].date)
213 return dates[n]
213 return dates[n]
214
214
215 def picknext(nodes):
215 def picknext(nodes):
216 return min([(getdate(n), n) for n in nodes])[1]
216 return min([(getdate(n), n) for n in nodes])[1]
217
217
218 return picknext
218 return picknext
219
219
220 if sortmode == 'branchsort':
220 if sortmode == 'branchsort':
221 picknext = makebranchsorter()
221 picknext = makebranchsorter()
222 elif sortmode == 'datesort':
222 elif sortmode == 'datesort':
223 picknext = makedatesorter()
223 picknext = makedatesorter()
224 elif sortmode == 'sourcesort':
224 elif sortmode == 'sourcesort':
225 picknext = makesourcesorter()
225 picknext = makesourcesorter()
226 else:
226 else:
227 raise util.Abort(_('unknown sort mode: %s') % sortmode)
227 raise util.Abort(_('unknown sort mode: %s') % sortmode)
228
228
229 children, actives = mapchildren(parents)
229 children, actives = mapchildren(parents)
230
230
231 s = []
231 s = []
232 pendings = {}
232 pendings = {}
233 while actives:
233 while actives:
234 n = picknext(actives)
234 n = picknext(actives)
235 actives.remove(n)
235 actives.remove(n)
236 s.append(n)
236 s.append(n)
237
237
238 # Update dependents list
238 # Update dependents list
239 for c in children.get(n, []):
239 for c in children.get(n, []):
240 if c not in pendings:
240 if c not in pendings:
241 pendings[c] = [p for p in parents[c] if p not in self.map]
241 pendings[c] = [p for p in parents[c] if p not in self.map]
242 try:
242 try:
243 pendings[c].remove(n)
243 pendings[c].remove(n)
244 except ValueError:
244 except ValueError:
245 raise util.Abort(_('cycle detected between %s and %s')
245 raise util.Abort(_('cycle detected between %s and %s')
246 % (recode(c), recode(n)))
246 % (recode(c), recode(n)))
247 if not pendings[c]:
247 if not pendings[c]:
248 # Parents are converted, node is eligible
248 # Parents are converted, node is eligible
249 actives.insert(0, c)
249 actives.insert(0, c)
250 pendings[c] = None
250 pendings[c] = None
251
251
252 if len(s) != len(parents):
252 if len(s) != len(parents):
253 raise util.Abort(_("not all revisions were sorted"))
253 raise util.Abort(_("not all revisions were sorted"))
254
254
255 return s
255 return s
256
256
257 def writeauthormap(self):
257 def writeauthormap(self):
258 authorfile = self.authorfile
258 authorfile = self.authorfile
259 if authorfile:
259 if authorfile:
260 self.ui.status(_('Writing author map file %s\n') % authorfile)
260 self.ui.status(_('Writing author map file %s\n') % authorfile)
261 ofile = open(authorfile, 'w+')
261 ofile = open(authorfile, 'w+')
262 for author in self.authors:
262 for author in self.authors:
263 ofile.write("%s=%s\n" % (author, self.authors[author]))
263 ofile.write("%s=%s\n" % (author, self.authors[author]))
264 ofile.close()
264 ofile.close()
265
265
266 def readauthormap(self, authorfile):
266 def readauthormap(self, authorfile):
267 afile = open(authorfile, 'r')
267 afile = open(authorfile, 'r')
268 for line in afile:
268 for line in afile:
269
269
270 line = line.strip()
270 line = line.strip()
271 if not line or line.startswith('#'):
271 if not line or line.startswith('#'):
272 continue
272 continue
273
273
274 try:
274 try:
275 srcauthor, dstauthor = line.split('=', 1)
275 srcauthor, dstauthor = line.split('=', 1)
276 except ValueError:
276 except ValueError:
277 msg = _('Ignoring bad line in author map file %s: %s\n')
277 msg = _('Ignoring bad line in author map file %s: %s\n')
278 self.ui.warn(msg % (authorfile, line.rstrip()))
278 self.ui.warn(msg % (authorfile, line.rstrip()))
279 continue
279 continue
280
280
281 srcauthor = srcauthor.strip()
281 srcauthor = srcauthor.strip()
282 dstauthor = dstauthor.strip()
282 dstauthor = dstauthor.strip()
283 if self.authors.get(srcauthor) in (None, dstauthor):
283 if self.authors.get(srcauthor) in (None, dstauthor):
284 msg = _('mapping author %s to %s\n')
284 msg = _('mapping author %s to %s\n')
285 self.ui.debug(msg % (srcauthor, dstauthor))
285 self.ui.debug(msg % (srcauthor, dstauthor))
286 self.authors[srcauthor] = dstauthor
286 self.authors[srcauthor] = dstauthor
287 continue
287 continue
288
288
289 m = _('overriding mapping for author %s, was %s, will be %s\n')
289 m = _('overriding mapping for author %s, was %s, will be %s\n')
290 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
290 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
291
291
292 afile.close()
292 afile.close()
293
293
294 def cachecommit(self, rev):
294 def cachecommit(self, rev):
295 commit = self.source.getcommit(rev)
295 commit = self.source.getcommit(rev)
296 commit.author = self.authors.get(commit.author, commit.author)
296 commit.author = self.authors.get(commit.author, commit.author)
297 commit.branch = self.branchmap.get(commit.branch, commit.branch)
297 commit.branch = self.branchmap.get(commit.branch, commit.branch)
298 self.commitcache[rev] = commit
298 self.commitcache[rev] = commit
299 return commit
299 return commit
300
300
301 def copy(self, rev):
301 def copy(self, rev):
302 commit = self.commitcache[rev]
302 commit = self.commitcache[rev]
303
303
304 changes = self.source.getchanges(rev)
304 changes = self.source.getchanges(rev)
305 if isinstance(changes, basestring):
305 if isinstance(changes, basestring):
306 if changes == SKIPREV:
306 if changes == SKIPREV:
307 dest = SKIPREV
307 dest = SKIPREV
308 else:
308 else:
309 dest = self.map[changes]
309 dest = self.map[changes]
310 self.map[rev] = dest
310 self.map[rev] = dest
311 return
311 return
312 files, copies = changes
312 files, copies = changes
313 pbranches = []
313 pbranches = []
314 if commit.parents:
314 if commit.parents:
315 for prev in commit.parents:
315 for prev in commit.parents:
316 if prev not in self.commitcache:
316 if prev not in self.commitcache:
317 self.cachecommit(prev)
317 self.cachecommit(prev)
318 pbranches.append((self.map[prev],
318 pbranches.append((self.map[prev],
319 self.commitcache[prev].branch))
319 self.commitcache[prev].branch))
320 self.dest.setbranch(commit.branch, pbranches)
320 self.dest.setbranch(commit.branch, pbranches)
321 try:
321 try:
322 parents = self.splicemap[rev].replace(',', ' ').split()
322 parents = self.splicemap[rev].replace(',', ' ').split()
323 self.ui.status(_('spliced in %s as parents of %s\n') %
323 self.ui.status(_('spliced in %s as parents of %s\n') %
324 (parents, rev))
324 (parents, rev))
325 parents = [self.map.get(p, p) for p in parents]
325 parents = [self.map.get(p, p) for p in parents]
326 except KeyError:
326 except KeyError:
327 parents = [b[0] for b in pbranches]
327 parents = [b[0] for b in pbranches]
328 source = progresssource(self.ui, self.source, len(files))
328 source = progresssource(self.ui, self.source, len(files))
329 newnode = self.dest.putcommit(files, copies, parents, commit,
329 newnode = self.dest.putcommit(files, copies, parents, commit,
330 source, self.map)
330 source, self.map)
331 source.close()
331 source.close()
332 self.source.converted(rev, newnode)
332 self.source.converted(rev, newnode)
333 self.map[rev] = newnode
333 self.map[rev] = newnode
334
334
335 def convert(self, sortmode):
335 def convert(self, sortmode):
336 try:
336 try:
337 self.source.before()
337 self.source.before()
338 self.dest.before()
338 self.dest.before()
339 self.source.setrevmap(self.map)
339 self.source.setrevmap(self.map)
340 self.ui.status(_("scanning source...\n"))
340 self.ui.status(_("scanning source...\n"))
341 heads = self.source.getheads()
341 heads = self.source.getheads()
342 parents = self.walktree(heads)
342 parents = self.walktree(heads)
343 self.ui.status(_("sorting...\n"))
343 self.ui.status(_("sorting...\n"))
344 t = self.toposort(parents, sortmode)
344 t = self.toposort(parents, sortmode)
345 num = len(t)
345 num = len(t)
346 c = None
346 c = None
347
347
348 self.ui.status(_("converting...\n"))
348 self.ui.status(_("converting...\n"))
349 for i, c in enumerate(t):
349 for i, c in enumerate(t):
350 num -= 1
350 num -= 1
351 desc = self.commitcache[c].desc
351 desc = self.commitcache[c].desc
352 if "\n" in desc:
352 if "\n" in desc:
353 desc = desc.splitlines()[0]
353 desc = desc.splitlines()[0]
354 # convert log message to local encoding without using
354 # convert log message to local encoding without using
355 # tolocal() because the encoding.encoding convert()
355 # tolocal() because the encoding.encoding convert()
356 # uses is 'utf-8'
356 # uses is 'utf-8'
357 self.ui.status("%d %s\n" % (num, recode(desc)))
357 self.ui.status("%d %s\n" % (num, recode(desc)))
358 self.ui.note(_("source: %s\n") % recode(c))
358 self.ui.note(_("source: %s\n") % recode(c))
359 self.ui.progress(_('converting'), i, unit=_('revisions'),
359 self.ui.progress(_('converting'), i, unit=_('revisions'),
360 total=len(t))
360 total=len(t))
361 self.copy(c)
361 self.copy(c)
362 self.ui.progress(_('converting'), None)
362 self.ui.progress(_('converting'), None)
363
363
364 tags = self.source.gettags()
364 tags = self.source.gettags()
365 ctags = {}
365 ctags = {}
366 for k in tags:
366 for k in tags:
367 v = tags[k]
367 v = tags[k]
368 if self.map.get(v, SKIPREV) != SKIPREV:
368 if self.map.get(v, SKIPREV) != SKIPREV:
369 ctags[k] = self.map[v]
369 ctags[k] = self.map[v]
370
370
371 if c and ctags:
371 if c and ctags:
372 nrev, tagsparent = self.dest.puttags(ctags)
372 nrev, tagsparent = self.dest.puttags(ctags)
373 if nrev and tagsparent:
373 if nrev and tagsparent:
374 # write another hash correspondence to override the previous
374 # write another hash correspondence to override the previous
375 # one so we don't end up with extra tag heads
375 # one so we don't end up with extra tag heads
376 tagsparents = [e for e in self.map.iteritems()
376 tagsparents = [e for e in self.map.iteritems()
377 if e[1] == tagsparent]
377 if e[1] == tagsparent]
378 if tagsparents:
378 if tagsparents:
379 self.map[tagsparents[0][0]] = nrev
379 self.map[tagsparents[0][0]] = nrev
380
380
381 bookmarks = self.source.getbookmarks()
382 cbookmarks = {}
383 for k in bookmarks:
384 v = bookmarks[k]
385 if self.map.get(v, SKIPREV) != SKIPREV:
386 cbookmarks[k] = self.map[v]
387
388 if c and cbookmarks:
389 self.dest.putbookmarks(cbookmarks)
390
381 self.writeauthormap()
391 self.writeauthormap()
382 finally:
392 finally:
383 self.cleanup()
393 self.cleanup()
384
394
385 def cleanup(self):
395 def cleanup(self):
386 try:
396 try:
387 self.dest.after()
397 self.dest.after()
388 finally:
398 finally:
389 self.source.after()
399 self.source.after()
390 self.map.close()
400 self.map.close()
391
401
392 def convert(ui, src, dest=None, revmapfile=None, **opts):
402 def convert(ui, src, dest=None, revmapfile=None, **opts):
393 global orig_encoding
403 global orig_encoding
394 orig_encoding = encoding.encoding
404 orig_encoding = encoding.encoding
395 encoding.encoding = 'UTF-8'
405 encoding.encoding = 'UTF-8'
396
406
397 # support --authors as an alias for --authormap
407 # support --authors as an alias for --authormap
398 if not opts.get('authormap'):
408 if not opts.get('authormap'):
399 opts['authormap'] = opts.get('authors')
409 opts['authormap'] = opts.get('authors')
400
410
401 if not dest:
411 if not dest:
402 dest = hg.defaultdest(src) + "-hg"
412 dest = hg.defaultdest(src) + "-hg"
403 ui.status(_("assuming destination %s\n") % dest)
413 ui.status(_("assuming destination %s\n") % dest)
404
414
405 destc = convertsink(ui, dest, opts.get('dest_type'))
415 destc = convertsink(ui, dest, opts.get('dest_type'))
406
416
407 try:
417 try:
408 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
418 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
409 opts.get('rev'))
419 opts.get('rev'))
410 except Exception:
420 except Exception:
411 for path in destc.created:
421 for path in destc.created:
412 shutil.rmtree(path, True)
422 shutil.rmtree(path, True)
413 raise
423 raise
414
424
415 sortmodes = ('branchsort', 'datesort', 'sourcesort')
425 sortmodes = ('branchsort', 'datesort', 'sourcesort')
416 sortmode = [m for m in sortmodes if opts.get(m)]
426 sortmode = [m for m in sortmodes if opts.get(m)]
417 if len(sortmode) > 1:
427 if len(sortmode) > 1:
418 raise util.Abort(_('more than one sort mode specified'))
428 raise util.Abort(_('more than one sort mode specified'))
419 sortmode = sortmode and sortmode[0] or defaultsort
429 sortmode = sortmode and sortmode[0] or defaultsort
420 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
430 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
421 raise util.Abort(_('--sourcesort is not supported by this data source'))
431 raise util.Abort(_('--sourcesort is not supported by this data source'))
422
432
423 fmap = opts.get('filemap')
433 fmap = opts.get('filemap')
424 if fmap:
434 if fmap:
425 srcc = filemap.filemap_source(ui, srcc, fmap)
435 srcc = filemap.filemap_source(ui, srcc, fmap)
426 destc.setfilemapmode(True)
436 destc.setfilemapmode(True)
427
437
428 if not revmapfile:
438 if not revmapfile:
429 try:
439 try:
430 revmapfile = destc.revmapfile()
440 revmapfile = destc.revmapfile()
431 except:
441 except:
432 revmapfile = os.path.join(destc, "map")
442 revmapfile = os.path.join(destc, "map")
433
443
434 c = converter(ui, srcc, destc, revmapfile, opts)
444 c = converter(ui, srcc, destc, revmapfile, opts)
435 c.convert(sortmode)
445 c.convert(sortmode)
436
446
General Comments 0
You need to be logged in to leave comments. Login now