##// END OF EJS Templates
convcmd: pass encoding name as a sysstr...
Augie Fackler -
r36150:6df206ef default
parent child Browse files
Show More
@@ -1,614 +1,616
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import collections
9 import collections
10 import os
10 import os
11 import shlex
11 import shlex
12 import shutil
12 import shutil
13
13
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15 from mercurial import (
15 from mercurial import (
16 encoding,
16 encoding,
17 error,
17 error,
18 hg,
18 hg,
19 pycompat,
19 scmutil,
20 scmutil,
20 util,
21 util,
21 )
22 )
22
23
23 from . import (
24 from . import (
24 bzr,
25 bzr,
25 common,
26 common,
26 cvs,
27 cvs,
27 darcs,
28 darcs,
28 filemap,
29 filemap,
29 git,
30 git,
30 gnuarch,
31 gnuarch,
31 hg as hgconvert,
32 hg as hgconvert,
32 monotone,
33 monotone,
33 p4,
34 p4,
34 subversion,
35 subversion,
35 )
36 )
36
37
37 mapfile = common.mapfile
38 mapfile = common.mapfile
38 MissingTool = common.MissingTool
39 MissingTool = common.MissingTool
39 NoRepo = common.NoRepo
40 NoRepo = common.NoRepo
40 SKIPREV = common.SKIPREV
41 SKIPREV = common.SKIPREV
41
42
42 bzr_source = bzr.bzr_source
43 bzr_source = bzr.bzr_source
43 convert_cvs = cvs.convert_cvs
44 convert_cvs = cvs.convert_cvs
44 convert_git = git.convert_git
45 convert_git = git.convert_git
45 darcs_source = darcs.darcs_source
46 darcs_source = darcs.darcs_source
46 gnuarch_source = gnuarch.gnuarch_source
47 gnuarch_source = gnuarch.gnuarch_source
47 mercurial_sink = hgconvert.mercurial_sink
48 mercurial_sink = hgconvert.mercurial_sink
48 mercurial_source = hgconvert.mercurial_source
49 mercurial_source = hgconvert.mercurial_source
49 monotone_source = monotone.monotone_source
50 monotone_source = monotone.monotone_source
50 p4_source = p4.p4_source
51 p4_source = p4.p4_source
51 svn_sink = subversion.svn_sink
52 svn_sink = subversion.svn_sink
52 svn_source = subversion.svn_source
53 svn_source = subversion.svn_source
53
54
54 orig_encoding = 'ascii'
55 orig_encoding = 'ascii'
55
56
56 def recode(s):
57 def recode(s):
57 if isinstance(s, unicode):
58 if isinstance(s, unicode):
58 return s.encode(orig_encoding, 'replace')
59 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
59 else:
60 else:
60 return s.decode('utf-8').encode(orig_encoding, 'replace')
61 return s.decode('utf-8').encode(
62 pycompat.sysstr(orig_encoding), 'replace')
61
63
62 def mapbranch(branch, branchmap):
64 def mapbranch(branch, branchmap):
63 '''
65 '''
64 >>> bmap = {b'default': b'branch1'}
66 >>> bmap = {b'default': b'branch1'}
65 >>> for i in [b'', None]:
67 >>> for i in [b'', None]:
66 ... mapbranch(i, bmap)
68 ... mapbranch(i, bmap)
67 'branch1'
69 'branch1'
68 'branch1'
70 'branch1'
69 >>> bmap = {b'None': b'branch2'}
71 >>> bmap = {b'None': b'branch2'}
70 >>> for i in [b'', None]:
72 >>> for i in [b'', None]:
71 ... mapbranch(i, bmap)
73 ... mapbranch(i, bmap)
72 'branch2'
74 'branch2'
73 'branch2'
75 'branch2'
74 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
76 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
75 >>> for i in [b'None', b'', None, b'default', b'branch5']:
77 >>> for i in [b'None', b'', None, b'default', b'branch5']:
76 ... mapbranch(i, bmap)
78 ... mapbranch(i, bmap)
77 'branch3'
79 'branch3'
78 'branch4'
80 'branch4'
79 'branch4'
81 'branch4'
80 'branch4'
82 'branch4'
81 'branch5'
83 'branch5'
82 '''
84 '''
83 # If branch is None or empty, this commit is coming from the source
85 # If branch is None or empty, this commit is coming from the source
84 # repository's default branch and destined for the default branch in the
86 # repository's default branch and destined for the default branch in the
85 # destination repository. For such commits, using a literal "default"
87 # destination repository. For such commits, using a literal "default"
86 # in branchmap below allows the user to map "default" to an alternate
88 # in branchmap below allows the user to map "default" to an alternate
87 # default branch in the destination repository.
89 # default branch in the destination repository.
88 branch = branchmap.get(branch or 'default', branch)
90 branch = branchmap.get(branch or 'default', branch)
89 # At some point we used "None" literal to denote the default branch,
91 # At some point we used "None" literal to denote the default branch,
90 # attempt to use that for backward compatibility.
92 # attempt to use that for backward compatibility.
91 if (not branch):
93 if (not branch):
92 branch = branchmap.get('None', branch)
94 branch = branchmap.get('None', branch)
93 return branch
95 return branch
94
96
95 source_converters = [
97 source_converters = [
96 ('cvs', convert_cvs, 'branchsort'),
98 ('cvs', convert_cvs, 'branchsort'),
97 ('git', convert_git, 'branchsort'),
99 ('git', convert_git, 'branchsort'),
98 ('svn', svn_source, 'branchsort'),
100 ('svn', svn_source, 'branchsort'),
99 ('hg', mercurial_source, 'sourcesort'),
101 ('hg', mercurial_source, 'sourcesort'),
100 ('darcs', darcs_source, 'branchsort'),
102 ('darcs', darcs_source, 'branchsort'),
101 ('mtn', monotone_source, 'branchsort'),
103 ('mtn', monotone_source, 'branchsort'),
102 ('gnuarch', gnuarch_source, 'branchsort'),
104 ('gnuarch', gnuarch_source, 'branchsort'),
103 ('bzr', bzr_source, 'branchsort'),
105 ('bzr', bzr_source, 'branchsort'),
104 ('p4', p4_source, 'branchsort'),
106 ('p4', p4_source, 'branchsort'),
105 ]
107 ]
106
108
107 sink_converters = [
109 sink_converters = [
108 ('hg', mercurial_sink),
110 ('hg', mercurial_sink),
109 ('svn', svn_sink),
111 ('svn', svn_sink),
110 ]
112 ]
111
113
112 def convertsource(ui, path, type, revs):
114 def convertsource(ui, path, type, revs):
113 exceptions = []
115 exceptions = []
114 if type and type not in [s[0] for s in source_converters]:
116 if type and type not in [s[0] for s in source_converters]:
115 raise error.Abort(_('%s: invalid source repository type') % type)
117 raise error.Abort(_('%s: invalid source repository type') % type)
116 for name, source, sortmode in source_converters:
118 for name, source, sortmode in source_converters:
117 try:
119 try:
118 if not type or name == type:
120 if not type or name == type:
119 return source(ui, name, path, revs), sortmode
121 return source(ui, name, path, revs), sortmode
120 except (NoRepo, MissingTool) as inst:
122 except (NoRepo, MissingTool) as inst:
121 exceptions.append(inst)
123 exceptions.append(inst)
122 if not ui.quiet:
124 if not ui.quiet:
123 for inst in exceptions:
125 for inst in exceptions:
124 ui.write("%s\n" % inst)
126 ui.write("%s\n" % inst)
125 raise error.Abort(_('%s: missing or unsupported repository') % path)
127 raise error.Abort(_('%s: missing or unsupported repository') % path)
126
128
127 def convertsink(ui, path, type):
129 def convertsink(ui, path, type):
128 if type and type not in [s[0] for s in sink_converters]:
130 if type and type not in [s[0] for s in sink_converters]:
129 raise error.Abort(_('%s: invalid destination repository type') % type)
131 raise error.Abort(_('%s: invalid destination repository type') % type)
130 for name, sink in sink_converters:
132 for name, sink in sink_converters:
131 try:
133 try:
132 if not type or name == type:
134 if not type or name == type:
133 return sink(ui, name, path)
135 return sink(ui, name, path)
134 except NoRepo as inst:
136 except NoRepo as inst:
135 ui.note(_("convert: %s\n") % inst)
137 ui.note(_("convert: %s\n") % inst)
136 except MissingTool as inst:
138 except MissingTool as inst:
137 raise error.Abort('%s\n' % inst)
139 raise error.Abort('%s\n' % inst)
138 raise error.Abort(_('%s: unknown repository type') % path)
140 raise error.Abort(_('%s: unknown repository type') % path)
139
141
140 class progresssource(object):
142 class progresssource(object):
141 def __init__(self, ui, source, filecount):
143 def __init__(self, ui, source, filecount):
142 self.ui = ui
144 self.ui = ui
143 self.source = source
145 self.source = source
144 self.filecount = filecount
146 self.filecount = filecount
145 self.retrieved = 0
147 self.retrieved = 0
146
148
147 def getfile(self, file, rev):
149 def getfile(self, file, rev):
148 self.retrieved += 1
150 self.retrieved += 1
149 self.ui.progress(_('getting files'), self.retrieved,
151 self.ui.progress(_('getting files'), self.retrieved,
150 item=file, total=self.filecount, unit=_('files'))
152 item=file, total=self.filecount, unit=_('files'))
151 return self.source.getfile(file, rev)
153 return self.source.getfile(file, rev)
152
154
153 def targetfilebelongstosource(self, targetfilename):
155 def targetfilebelongstosource(self, targetfilename):
154 return self.source.targetfilebelongstosource(targetfilename)
156 return self.source.targetfilebelongstosource(targetfilename)
155
157
156 def lookuprev(self, rev):
158 def lookuprev(self, rev):
157 return self.source.lookuprev(rev)
159 return self.source.lookuprev(rev)
158
160
159 def close(self):
161 def close(self):
160 self.ui.progress(_('getting files'), None)
162 self.ui.progress(_('getting files'), None)
161
163
162 class converter(object):
164 class converter(object):
163 def __init__(self, ui, source, dest, revmapfile, opts):
165 def __init__(self, ui, source, dest, revmapfile, opts):
164
166
165 self.source = source
167 self.source = source
166 self.dest = dest
168 self.dest = dest
167 self.ui = ui
169 self.ui = ui
168 self.opts = opts
170 self.opts = opts
169 self.commitcache = {}
171 self.commitcache = {}
170 self.authors = {}
172 self.authors = {}
171 self.authorfile = None
173 self.authorfile = None
172
174
173 # Record converted revisions persistently: maps source revision
175 # Record converted revisions persistently: maps source revision
174 # ID to target revision ID (both strings). (This is how
176 # ID to target revision ID (both strings). (This is how
175 # incremental conversions work.)
177 # incremental conversions work.)
176 self.map = mapfile(ui, revmapfile)
178 self.map = mapfile(ui, revmapfile)
177
179
178 # Read first the dst author map if any
180 # Read first the dst author map if any
179 authorfile = self.dest.authorfile()
181 authorfile = self.dest.authorfile()
180 if authorfile and os.path.exists(authorfile):
182 if authorfile and os.path.exists(authorfile):
181 self.readauthormap(authorfile)
183 self.readauthormap(authorfile)
182 # Extend/Override with new author map if necessary
184 # Extend/Override with new author map if necessary
183 if opts.get('authormap'):
185 if opts.get('authormap'):
184 self.readauthormap(opts.get('authormap'))
186 self.readauthormap(opts.get('authormap'))
185 self.authorfile = self.dest.authorfile()
187 self.authorfile = self.dest.authorfile()
186
188
187 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
189 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
188 self.branchmap = mapfile(ui, opts.get('branchmap'))
190 self.branchmap = mapfile(ui, opts.get('branchmap'))
189
191
190 def parsesplicemap(self, path):
192 def parsesplicemap(self, path):
191 """ check and validate the splicemap format and
193 """ check and validate the splicemap format and
192 return a child/parents dictionary.
194 return a child/parents dictionary.
193 Format checking has two parts.
195 Format checking has two parts.
194 1. generic format which is same across all source types
196 1. generic format which is same across all source types
195 2. specific format checking which may be different for
197 2. specific format checking which may be different for
196 different source type. This logic is implemented in
198 different source type. This logic is implemented in
197 checkrevformat function in source files like
199 checkrevformat function in source files like
198 hg.py, subversion.py etc.
200 hg.py, subversion.py etc.
199 """
201 """
200
202
201 if not path:
203 if not path:
202 return {}
204 return {}
203 m = {}
205 m = {}
204 try:
206 try:
205 fp = open(path, 'rb')
207 fp = open(path, 'rb')
206 for i, line in enumerate(util.iterfile(fp)):
208 for i, line in enumerate(util.iterfile(fp)):
207 line = line.splitlines()[0].rstrip()
209 line = line.splitlines()[0].rstrip()
208 if not line:
210 if not line:
209 # Ignore blank lines
211 # Ignore blank lines
210 continue
212 continue
211 # split line
213 # split line
212 lex = shlex.shlex(line, posix=True)
214 lex = shlex.shlex(line, posix=True)
213 lex.whitespace_split = True
215 lex.whitespace_split = True
214 lex.whitespace += ','
216 lex.whitespace += ','
215 line = list(lex)
217 line = list(lex)
216 # check number of parents
218 # check number of parents
217 if not (2 <= len(line) <= 3):
219 if not (2 <= len(line) <= 3):
218 raise error.Abort(_('syntax error in %s(%d): child parent1'
220 raise error.Abort(_('syntax error in %s(%d): child parent1'
219 '[,parent2] expected') % (path, i + 1))
221 '[,parent2] expected') % (path, i + 1))
220 for part in line:
222 for part in line:
221 self.source.checkrevformat(part)
223 self.source.checkrevformat(part)
222 child, p1, p2 = line[0], line[1:2], line[2:]
224 child, p1, p2 = line[0], line[1:2], line[2:]
223 if p1 == p2:
225 if p1 == p2:
224 m[child] = p1
226 m[child] = p1
225 else:
227 else:
226 m[child] = p1 + p2
228 m[child] = p1 + p2
227 # if file does not exist or error reading, exit
229 # if file does not exist or error reading, exit
228 except IOError:
230 except IOError:
229 raise error.Abort(_('splicemap file not found or error reading %s:')
231 raise error.Abort(_('splicemap file not found or error reading %s:')
230 % path)
232 % path)
231 return m
233 return m
232
234
233
235
234 def walktree(self, heads):
236 def walktree(self, heads):
235 '''Return a mapping that identifies the uncommitted parents of every
237 '''Return a mapping that identifies the uncommitted parents of every
236 uncommitted changeset.'''
238 uncommitted changeset.'''
237 visit = heads
239 visit = heads
238 known = set()
240 known = set()
239 parents = {}
241 parents = {}
240 numcommits = self.source.numcommits()
242 numcommits = self.source.numcommits()
241 while visit:
243 while visit:
242 n = visit.pop(0)
244 n = visit.pop(0)
243 if n in known:
245 if n in known:
244 continue
246 continue
245 if n in self.map:
247 if n in self.map:
246 m = self.map[n]
248 m = self.map[n]
247 if m == SKIPREV or self.dest.hascommitfrommap(m):
249 if m == SKIPREV or self.dest.hascommitfrommap(m):
248 continue
250 continue
249 known.add(n)
251 known.add(n)
250 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
252 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
251 total=numcommits)
253 total=numcommits)
252 commit = self.cachecommit(n)
254 commit = self.cachecommit(n)
253 parents[n] = []
255 parents[n] = []
254 for p in commit.parents:
256 for p in commit.parents:
255 parents[n].append(p)
257 parents[n].append(p)
256 visit.append(p)
258 visit.append(p)
257 self.ui.progress(_('scanning'), None)
259 self.ui.progress(_('scanning'), None)
258
260
259 return parents
261 return parents
260
262
261 def mergesplicemap(self, parents, splicemap):
263 def mergesplicemap(self, parents, splicemap):
262 """A splicemap redefines child/parent relationships. Check the
264 """A splicemap redefines child/parent relationships. Check the
263 map contains valid revision identifiers and merge the new
265 map contains valid revision identifiers and merge the new
264 links in the source graph.
266 links in the source graph.
265 """
267 """
266 for c in sorted(splicemap):
268 for c in sorted(splicemap):
267 if c not in parents:
269 if c not in parents:
268 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
270 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
269 # Could be in source but not converted during this run
271 # Could be in source but not converted during this run
270 self.ui.warn(_('splice map revision %s is not being '
272 self.ui.warn(_('splice map revision %s is not being '
271 'converted, ignoring\n') % c)
273 'converted, ignoring\n') % c)
272 continue
274 continue
273 pc = []
275 pc = []
274 for p in splicemap[c]:
276 for p in splicemap[c]:
275 # We do not have to wait for nodes already in dest.
277 # We do not have to wait for nodes already in dest.
276 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
278 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
277 continue
279 continue
278 # Parent is not in dest and not being converted, not good
280 # Parent is not in dest and not being converted, not good
279 if p not in parents:
281 if p not in parents:
280 raise error.Abort(_('unknown splice map parent: %s') % p)
282 raise error.Abort(_('unknown splice map parent: %s') % p)
281 pc.append(p)
283 pc.append(p)
282 parents[c] = pc
284 parents[c] = pc
283
285
284 def toposort(self, parents, sortmode):
286 def toposort(self, parents, sortmode):
285 '''Return an ordering such that every uncommitted changeset is
287 '''Return an ordering such that every uncommitted changeset is
286 preceded by all its uncommitted ancestors.'''
288 preceded by all its uncommitted ancestors.'''
287
289
288 def mapchildren(parents):
290 def mapchildren(parents):
289 """Return a (children, roots) tuple where 'children' maps parent
291 """Return a (children, roots) tuple where 'children' maps parent
290 revision identifiers to children ones, and 'roots' is the list of
292 revision identifiers to children ones, and 'roots' is the list of
291 revisions without parents. 'parents' must be a mapping of revision
293 revisions without parents. 'parents' must be a mapping of revision
292 identifier to its parents ones.
294 identifier to its parents ones.
293 """
295 """
294 visit = collections.deque(sorted(parents))
296 visit = collections.deque(sorted(parents))
295 seen = set()
297 seen = set()
296 children = {}
298 children = {}
297 roots = []
299 roots = []
298
300
299 while visit:
301 while visit:
300 n = visit.popleft()
302 n = visit.popleft()
301 if n in seen:
303 if n in seen:
302 continue
304 continue
303 seen.add(n)
305 seen.add(n)
304 # Ensure that nodes without parents are present in the
306 # Ensure that nodes without parents are present in the
305 # 'children' mapping.
307 # 'children' mapping.
306 children.setdefault(n, [])
308 children.setdefault(n, [])
307 hasparent = False
309 hasparent = False
308 for p in parents[n]:
310 for p in parents[n]:
309 if p not in self.map:
311 if p not in self.map:
310 visit.append(p)
312 visit.append(p)
311 hasparent = True
313 hasparent = True
312 children.setdefault(p, []).append(n)
314 children.setdefault(p, []).append(n)
313 if not hasparent:
315 if not hasparent:
314 roots.append(n)
316 roots.append(n)
315
317
316 return children, roots
318 return children, roots
317
319
318 # Sort functions are supposed to take a list of revisions which
320 # Sort functions are supposed to take a list of revisions which
319 # can be converted immediately and pick one
321 # can be converted immediately and pick one
320
322
321 def makebranchsorter():
323 def makebranchsorter():
322 """If the previously converted revision has a child in the
324 """If the previously converted revision has a child in the
323 eligible revisions list, pick it. Return the list head
325 eligible revisions list, pick it. Return the list head
324 otherwise. Branch sort attempts to minimize branch
326 otherwise. Branch sort attempts to minimize branch
325 switching, which is harmful for Mercurial backend
327 switching, which is harmful for Mercurial backend
326 compression.
328 compression.
327 """
329 """
328 prev = [None]
330 prev = [None]
329 def picknext(nodes):
331 def picknext(nodes):
330 next = nodes[0]
332 next = nodes[0]
331 for n in nodes:
333 for n in nodes:
332 if prev[0] in parents[n]:
334 if prev[0] in parents[n]:
333 next = n
335 next = n
334 break
336 break
335 prev[0] = next
337 prev[0] = next
336 return next
338 return next
337 return picknext
339 return picknext
338
340
339 def makesourcesorter():
341 def makesourcesorter():
340 """Source specific sort."""
342 """Source specific sort."""
341 keyfn = lambda n: self.commitcache[n].sortkey
343 keyfn = lambda n: self.commitcache[n].sortkey
342 def picknext(nodes):
344 def picknext(nodes):
343 return sorted(nodes, key=keyfn)[0]
345 return sorted(nodes, key=keyfn)[0]
344 return picknext
346 return picknext
345
347
346 def makeclosesorter():
348 def makeclosesorter():
347 """Close order sort."""
349 """Close order sort."""
348 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
350 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
349 self.commitcache[n].sortkey)
351 self.commitcache[n].sortkey)
350 def picknext(nodes):
352 def picknext(nodes):
351 return sorted(nodes, key=keyfn)[0]
353 return sorted(nodes, key=keyfn)[0]
352 return picknext
354 return picknext
353
355
354 def makedatesorter():
356 def makedatesorter():
355 """Sort revisions by date."""
357 """Sort revisions by date."""
356 dates = {}
358 dates = {}
357 def getdate(n):
359 def getdate(n):
358 if n not in dates:
360 if n not in dates:
359 dates[n] = util.parsedate(self.commitcache[n].date)
361 dates[n] = util.parsedate(self.commitcache[n].date)
360 return dates[n]
362 return dates[n]
361
363
362 def picknext(nodes):
364 def picknext(nodes):
363 return min([(getdate(n), n) for n in nodes])[1]
365 return min([(getdate(n), n) for n in nodes])[1]
364
366
365 return picknext
367 return picknext
366
368
367 if sortmode == 'branchsort':
369 if sortmode == 'branchsort':
368 picknext = makebranchsorter()
370 picknext = makebranchsorter()
369 elif sortmode == 'datesort':
371 elif sortmode == 'datesort':
370 picknext = makedatesorter()
372 picknext = makedatesorter()
371 elif sortmode == 'sourcesort':
373 elif sortmode == 'sourcesort':
372 picknext = makesourcesorter()
374 picknext = makesourcesorter()
373 elif sortmode == 'closesort':
375 elif sortmode == 'closesort':
374 picknext = makeclosesorter()
376 picknext = makeclosesorter()
375 else:
377 else:
376 raise error.Abort(_('unknown sort mode: %s') % sortmode)
378 raise error.Abort(_('unknown sort mode: %s') % sortmode)
377
379
378 children, actives = mapchildren(parents)
380 children, actives = mapchildren(parents)
379
381
380 s = []
382 s = []
381 pendings = {}
383 pendings = {}
382 while actives:
384 while actives:
383 n = picknext(actives)
385 n = picknext(actives)
384 actives.remove(n)
386 actives.remove(n)
385 s.append(n)
387 s.append(n)
386
388
387 # Update dependents list
389 # Update dependents list
388 for c in children.get(n, []):
390 for c in children.get(n, []):
389 if c not in pendings:
391 if c not in pendings:
390 pendings[c] = [p for p in parents[c] if p not in self.map]
392 pendings[c] = [p for p in parents[c] if p not in self.map]
391 try:
393 try:
392 pendings[c].remove(n)
394 pendings[c].remove(n)
393 except ValueError:
395 except ValueError:
394 raise error.Abort(_('cycle detected between %s and %s')
396 raise error.Abort(_('cycle detected between %s and %s')
395 % (recode(c), recode(n)))
397 % (recode(c), recode(n)))
396 if not pendings[c]:
398 if not pendings[c]:
397 # Parents are converted, node is eligible
399 # Parents are converted, node is eligible
398 actives.insert(0, c)
400 actives.insert(0, c)
399 pendings[c] = None
401 pendings[c] = None
400
402
401 if len(s) != len(parents):
403 if len(s) != len(parents):
402 raise error.Abort(_("not all revisions were sorted"))
404 raise error.Abort(_("not all revisions were sorted"))
403
405
404 return s
406 return s
405
407
406 def writeauthormap(self):
408 def writeauthormap(self):
407 authorfile = self.authorfile
409 authorfile = self.authorfile
408 if authorfile:
410 if authorfile:
409 self.ui.status(_('writing author map file %s\n') % authorfile)
411 self.ui.status(_('writing author map file %s\n') % authorfile)
410 ofile = open(authorfile, 'wb+')
412 ofile = open(authorfile, 'wb+')
411 for author in self.authors:
413 for author in self.authors:
412 ofile.write("%s=%s\n" % (author, self.authors[author]))
414 ofile.write("%s=%s\n" % (author, self.authors[author]))
413 ofile.close()
415 ofile.close()
414
416
415 def readauthormap(self, authorfile):
417 def readauthormap(self, authorfile):
416 afile = open(authorfile, 'rb')
418 afile = open(authorfile, 'rb')
417 for line in afile:
419 for line in afile:
418
420
419 line = line.strip()
421 line = line.strip()
420 if not line or line.startswith('#'):
422 if not line or line.startswith('#'):
421 continue
423 continue
422
424
423 try:
425 try:
424 srcauthor, dstauthor = line.split('=', 1)
426 srcauthor, dstauthor = line.split('=', 1)
425 except ValueError:
427 except ValueError:
426 msg = _('ignoring bad line in author map file %s: %s\n')
428 msg = _('ignoring bad line in author map file %s: %s\n')
427 self.ui.warn(msg % (authorfile, line.rstrip()))
429 self.ui.warn(msg % (authorfile, line.rstrip()))
428 continue
430 continue
429
431
430 srcauthor = srcauthor.strip()
432 srcauthor = srcauthor.strip()
431 dstauthor = dstauthor.strip()
433 dstauthor = dstauthor.strip()
432 if self.authors.get(srcauthor) in (None, dstauthor):
434 if self.authors.get(srcauthor) in (None, dstauthor):
433 msg = _('mapping author %s to %s\n')
435 msg = _('mapping author %s to %s\n')
434 self.ui.debug(msg % (srcauthor, dstauthor))
436 self.ui.debug(msg % (srcauthor, dstauthor))
435 self.authors[srcauthor] = dstauthor
437 self.authors[srcauthor] = dstauthor
436 continue
438 continue
437
439
438 m = _('overriding mapping for author %s, was %s, will be %s\n')
440 m = _('overriding mapping for author %s, was %s, will be %s\n')
439 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
441 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
440
442
441 afile.close()
443 afile.close()
442
444
443 def cachecommit(self, rev):
445 def cachecommit(self, rev):
444 commit = self.source.getcommit(rev)
446 commit = self.source.getcommit(rev)
445 commit.author = self.authors.get(commit.author, commit.author)
447 commit.author = self.authors.get(commit.author, commit.author)
446 commit.branch = mapbranch(commit.branch, self.branchmap)
448 commit.branch = mapbranch(commit.branch, self.branchmap)
447 self.commitcache[rev] = commit
449 self.commitcache[rev] = commit
448 return commit
450 return commit
449
451
450 def copy(self, rev):
452 def copy(self, rev):
451 commit = self.commitcache[rev]
453 commit = self.commitcache[rev]
452 full = self.opts.get('full')
454 full = self.opts.get('full')
453 changes = self.source.getchanges(rev, full)
455 changes = self.source.getchanges(rev, full)
454 if isinstance(changes, bytes):
456 if isinstance(changes, bytes):
455 if changes == SKIPREV:
457 if changes == SKIPREV:
456 dest = SKIPREV
458 dest = SKIPREV
457 else:
459 else:
458 dest = self.map[changes]
460 dest = self.map[changes]
459 self.map[rev] = dest
461 self.map[rev] = dest
460 return
462 return
461 files, copies, cleanp2 = changes
463 files, copies, cleanp2 = changes
462 pbranches = []
464 pbranches = []
463 if commit.parents:
465 if commit.parents:
464 for prev in commit.parents:
466 for prev in commit.parents:
465 if prev not in self.commitcache:
467 if prev not in self.commitcache:
466 self.cachecommit(prev)
468 self.cachecommit(prev)
467 pbranches.append((self.map[prev],
469 pbranches.append((self.map[prev],
468 self.commitcache[prev].branch))
470 self.commitcache[prev].branch))
469 self.dest.setbranch(commit.branch, pbranches)
471 self.dest.setbranch(commit.branch, pbranches)
470 try:
472 try:
471 parents = self.splicemap[rev]
473 parents = self.splicemap[rev]
472 self.ui.status(_('spliced in %s as parents of %s\n') %
474 self.ui.status(_('spliced in %s as parents of %s\n') %
473 (_(' and ').join(parents), rev))
475 (_(' and ').join(parents), rev))
474 parents = [self.map.get(p, p) for p in parents]
476 parents = [self.map.get(p, p) for p in parents]
475 except KeyError:
477 except KeyError:
476 parents = [b[0] for b in pbranches]
478 parents = [b[0] for b in pbranches]
477 parents.extend(self.map[x]
479 parents.extend(self.map[x]
478 for x in commit.optparents
480 for x in commit.optparents
479 if x in self.map)
481 if x in self.map)
480 if len(pbranches) != 2:
482 if len(pbranches) != 2:
481 cleanp2 = set()
483 cleanp2 = set()
482 if len(parents) < 3:
484 if len(parents) < 3:
483 source = progresssource(self.ui, self.source, len(files))
485 source = progresssource(self.ui, self.source, len(files))
484 else:
486 else:
485 # For an octopus merge, we end up traversing the list of
487 # For an octopus merge, we end up traversing the list of
486 # changed files N-1 times. This tweak to the number of
488 # changed files N-1 times. This tweak to the number of
487 # files makes it so the progress bar doesn't overflow
489 # files makes it so the progress bar doesn't overflow
488 # itself.
490 # itself.
489 source = progresssource(self.ui, self.source,
491 source = progresssource(self.ui, self.source,
490 len(files) * (len(parents) - 1))
492 len(files) * (len(parents) - 1))
491 newnode = self.dest.putcommit(files, copies, parents, commit,
493 newnode = self.dest.putcommit(files, copies, parents, commit,
492 source, self.map, full, cleanp2)
494 source, self.map, full, cleanp2)
493 source.close()
495 source.close()
494 self.source.converted(rev, newnode)
496 self.source.converted(rev, newnode)
495 self.map[rev] = newnode
497 self.map[rev] = newnode
496
498
497 def convert(self, sortmode):
499 def convert(self, sortmode):
498 try:
500 try:
499 self.source.before()
501 self.source.before()
500 self.dest.before()
502 self.dest.before()
501 self.source.setrevmap(self.map)
503 self.source.setrevmap(self.map)
502 self.ui.status(_("scanning source...\n"))
504 self.ui.status(_("scanning source...\n"))
503 heads = self.source.getheads()
505 heads = self.source.getheads()
504 parents = self.walktree(heads)
506 parents = self.walktree(heads)
505 self.mergesplicemap(parents, self.splicemap)
507 self.mergesplicemap(parents, self.splicemap)
506 self.ui.status(_("sorting...\n"))
508 self.ui.status(_("sorting...\n"))
507 t = self.toposort(parents, sortmode)
509 t = self.toposort(parents, sortmode)
508 num = len(t)
510 num = len(t)
509 c = None
511 c = None
510
512
511 self.ui.status(_("converting...\n"))
513 self.ui.status(_("converting...\n"))
512 for i, c in enumerate(t):
514 for i, c in enumerate(t):
513 num -= 1
515 num -= 1
514 desc = self.commitcache[c].desc
516 desc = self.commitcache[c].desc
515 if "\n" in desc:
517 if "\n" in desc:
516 desc = desc.splitlines()[0]
518 desc = desc.splitlines()[0]
517 # convert log message to local encoding without using
519 # convert log message to local encoding without using
518 # tolocal() because the encoding.encoding convert()
520 # tolocal() because the encoding.encoding convert()
519 # uses is 'utf-8'
521 # uses is 'utf-8'
520 self.ui.status("%d %s\n" % (num, recode(desc)))
522 self.ui.status("%d %s\n" % (num, recode(desc)))
521 self.ui.note(_("source: %s\n") % recode(c))
523 self.ui.note(_("source: %s\n") % recode(c))
522 self.ui.progress(_('converting'), i, unit=_('revisions'),
524 self.ui.progress(_('converting'), i, unit=_('revisions'),
523 total=len(t))
525 total=len(t))
524 self.copy(c)
526 self.copy(c)
525 self.ui.progress(_('converting'), None)
527 self.ui.progress(_('converting'), None)
526
528
527 if not self.ui.configbool('convert', 'skiptags'):
529 if not self.ui.configbool('convert', 'skiptags'):
528 tags = self.source.gettags()
530 tags = self.source.gettags()
529 ctags = {}
531 ctags = {}
530 for k in tags:
532 for k in tags:
531 v = tags[k]
533 v = tags[k]
532 if self.map.get(v, SKIPREV) != SKIPREV:
534 if self.map.get(v, SKIPREV) != SKIPREV:
533 ctags[k] = self.map[v]
535 ctags[k] = self.map[v]
534
536
535 if c and ctags:
537 if c and ctags:
536 nrev, tagsparent = self.dest.puttags(ctags)
538 nrev, tagsparent = self.dest.puttags(ctags)
537 if nrev and tagsparent:
539 if nrev and tagsparent:
538 # write another hash correspondence to override the
540 # write another hash correspondence to override the
539 # previous one so we don't end up with extra tag heads
541 # previous one so we don't end up with extra tag heads
540 tagsparents = [e for e in self.map.iteritems()
542 tagsparents = [e for e in self.map.iteritems()
541 if e[1] == tagsparent]
543 if e[1] == tagsparent]
542 if tagsparents:
544 if tagsparents:
543 self.map[tagsparents[0][0]] = nrev
545 self.map[tagsparents[0][0]] = nrev
544
546
545 bookmarks = self.source.getbookmarks()
547 bookmarks = self.source.getbookmarks()
546 cbookmarks = {}
548 cbookmarks = {}
547 for k in bookmarks:
549 for k in bookmarks:
548 v = bookmarks[k]
550 v = bookmarks[k]
549 if self.map.get(v, SKIPREV) != SKIPREV:
551 if self.map.get(v, SKIPREV) != SKIPREV:
550 cbookmarks[k] = self.map[v]
552 cbookmarks[k] = self.map[v]
551
553
552 if c and cbookmarks:
554 if c and cbookmarks:
553 self.dest.putbookmarks(cbookmarks)
555 self.dest.putbookmarks(cbookmarks)
554
556
555 self.writeauthormap()
557 self.writeauthormap()
556 finally:
558 finally:
557 self.cleanup()
559 self.cleanup()
558
560
559 def cleanup(self):
561 def cleanup(self):
560 try:
562 try:
561 self.dest.after()
563 self.dest.after()
562 finally:
564 finally:
563 self.source.after()
565 self.source.after()
564 self.map.close()
566 self.map.close()
565
567
566 def convert(ui, src, dest=None, revmapfile=None, **opts):
568 def convert(ui, src, dest=None, revmapfile=None, **opts):
567 global orig_encoding
569 global orig_encoding
568 orig_encoding = encoding.encoding
570 orig_encoding = encoding.encoding
569 encoding.encoding = 'UTF-8'
571 encoding.encoding = 'UTF-8'
570
572
571 # support --authors as an alias for --authormap
573 # support --authors as an alias for --authormap
572 if not opts.get('authormap'):
574 if not opts.get('authormap'):
573 opts['authormap'] = opts.get('authors')
575 opts['authormap'] = opts.get('authors')
574
576
575 if not dest:
577 if not dest:
576 dest = hg.defaultdest(src) + "-hg"
578 dest = hg.defaultdest(src) + "-hg"
577 ui.status(_("assuming destination %s\n") % dest)
579 ui.status(_("assuming destination %s\n") % dest)
578
580
579 destc = convertsink(ui, dest, opts.get('dest_type'))
581 destc = convertsink(ui, dest, opts.get('dest_type'))
580 destc = scmutil.wrapconvertsink(destc)
582 destc = scmutil.wrapconvertsink(destc)
581
583
582 try:
584 try:
583 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
585 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
584 opts.get('rev'))
586 opts.get('rev'))
585 except Exception:
587 except Exception:
586 for path in destc.created:
588 for path in destc.created:
587 shutil.rmtree(path, True)
589 shutil.rmtree(path, True)
588 raise
590 raise
589
591
590 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
592 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
591 sortmode = [m for m in sortmodes if opts.get(m)]
593 sortmode = [m for m in sortmodes if opts.get(m)]
592 if len(sortmode) > 1:
594 if len(sortmode) > 1:
593 raise error.Abort(_('more than one sort mode specified'))
595 raise error.Abort(_('more than one sort mode specified'))
594 if sortmode:
596 if sortmode:
595 sortmode = sortmode[0]
597 sortmode = sortmode[0]
596 else:
598 else:
597 sortmode = defaultsort
599 sortmode = defaultsort
598
600
599 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
601 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
600 raise error.Abort(_('--sourcesort is not supported by this data source')
602 raise error.Abort(_('--sourcesort is not supported by this data source')
601 )
603 )
602 if sortmode == 'closesort' and not srcc.hasnativeclose():
604 if sortmode == 'closesort' and not srcc.hasnativeclose():
603 raise error.Abort(_('--closesort is not supported by this data source'))
605 raise error.Abort(_('--closesort is not supported by this data source'))
604
606
605 fmap = opts.get('filemap')
607 fmap = opts.get('filemap')
606 if fmap:
608 if fmap:
607 srcc = filemap.filemap_source(ui, srcc, fmap)
609 srcc = filemap.filemap_source(ui, srcc, fmap)
608 destc.setfilemapmode(True)
610 destc.setfilemapmode(True)
609
611
610 if not revmapfile:
612 if not revmapfile:
611 revmapfile = destc.revmapfile()
613 revmapfile = destc.revmapfile()
612
614
613 c = converter(ui, srcc, destc, revmapfile, opts)
615 c = converter(ui, srcc, destc, revmapfile, opts)
614 c.convert(sortmode)
616 c.convert(sortmode)
General Comments 0
You need to be logged in to leave comments. Login now