##// END OF EJS Templates
convert: refactor authormap into separate function for outside use...
Joerg Sonnenberger -
r44561:fdaa4233 default
parent child Browse files
Show More
@@ -1,665 +1,670 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7 from __future__ import absolute_import
7 from __future__ import absolute_import
8
8
9 import collections
9 import collections
10 import os
10 import os
11 import shutil
11 import shutil
12
12
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14 from mercurial.pycompat import open
14 from mercurial.pycompat import open
15 from mercurial import (
15 from mercurial import (
16 encoding,
16 encoding,
17 error,
17 error,
18 hg,
18 hg,
19 pycompat,
19 pycompat,
20 scmutil,
20 scmutil,
21 util,
21 util,
22 )
22 )
23 from mercurial.utils import dateutil
23 from mercurial.utils import dateutil
24
24
25 from . import (
25 from . import (
26 bzr,
26 bzr,
27 common,
27 common,
28 cvs,
28 cvs,
29 darcs,
29 darcs,
30 filemap,
30 filemap,
31 git,
31 git,
32 gnuarch,
32 gnuarch,
33 hg as hgconvert,
33 hg as hgconvert,
34 monotone,
34 monotone,
35 p4,
35 p4,
36 subversion,
36 subversion,
37 )
37 )
38
38
39 mapfile = common.mapfile
39 mapfile = common.mapfile
40 MissingTool = common.MissingTool
40 MissingTool = common.MissingTool
41 NoRepo = common.NoRepo
41 NoRepo = common.NoRepo
42 SKIPREV = common.SKIPREV
42 SKIPREV = common.SKIPREV
43
43
44 bzr_source = bzr.bzr_source
44 bzr_source = bzr.bzr_source
45 convert_cvs = cvs.convert_cvs
45 convert_cvs = cvs.convert_cvs
46 convert_git = git.convert_git
46 convert_git = git.convert_git
47 darcs_source = darcs.darcs_source
47 darcs_source = darcs.darcs_source
48 gnuarch_source = gnuarch.gnuarch_source
48 gnuarch_source = gnuarch.gnuarch_source
49 mercurial_sink = hgconvert.mercurial_sink
49 mercurial_sink = hgconvert.mercurial_sink
50 mercurial_source = hgconvert.mercurial_source
50 mercurial_source = hgconvert.mercurial_source
51 monotone_source = monotone.monotone_source
51 monotone_source = monotone.monotone_source
52 p4_source = p4.p4_source
52 p4_source = p4.p4_source
53 svn_sink = subversion.svn_sink
53 svn_sink = subversion.svn_sink
54 svn_source = subversion.svn_source
54 svn_source = subversion.svn_source
55
55
56 orig_encoding = b'ascii'
56 orig_encoding = b'ascii'
57
57
58
58
59 def readauthormap(ui, authorfile, authors=None):
60 if authors is None:
61 authors = {}
62 with open(authorfile, b'rb') as afile:
63 for line in afile:
64
65 line = line.strip()
66 if not line or line.startswith(b'#'):
67 continue
68
69 try:
70 srcauthor, dstauthor = line.split(b'=', 1)
71 except ValueError:
72 msg = _(b'ignoring bad line in author map file %s: %s\n')
73 ui.warn(msg % (authorfile, line.rstrip()))
74 continue
75
76 srcauthor = srcauthor.strip()
77 dstauthor = dstauthor.strip()
78 if authors.get(srcauthor) in (None, dstauthor):
79 msg = _(b'mapping author %s to %s\n')
80 ui.debug(msg % (srcauthor, dstauthor))
81 authors[srcauthor] = dstauthor
82 continue
83
84 m = _(b'overriding mapping for author %s, was %s, will be %s\n')
85 ui.status(m % (srcauthor, authors[srcauthor], dstauthor))
86 return authors
87
88
59 def recode(s):
89 def recode(s):
60 if isinstance(s, pycompat.unicode):
90 if isinstance(s, pycompat.unicode):
61 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
91 return s.encode(pycompat.sysstr(orig_encoding), 'replace')
62 else:
92 else:
63 return s.decode('utf-8').encode(
93 return s.decode('utf-8').encode(
64 pycompat.sysstr(orig_encoding), 'replace'
94 pycompat.sysstr(orig_encoding), 'replace'
65 )
95 )
66
96
67
97
68 def mapbranch(branch, branchmap):
98 def mapbranch(branch, branchmap):
69 '''
99 '''
70 >>> bmap = {b'default': b'branch1'}
100 >>> bmap = {b'default': b'branch1'}
71 >>> for i in [b'', None]:
101 >>> for i in [b'', None]:
72 ... mapbranch(i, bmap)
102 ... mapbranch(i, bmap)
73 'branch1'
103 'branch1'
74 'branch1'
104 'branch1'
75 >>> bmap = {b'None': b'branch2'}
105 >>> bmap = {b'None': b'branch2'}
76 >>> for i in [b'', None]:
106 >>> for i in [b'', None]:
77 ... mapbranch(i, bmap)
107 ... mapbranch(i, bmap)
78 'branch2'
108 'branch2'
79 'branch2'
109 'branch2'
80 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
110 >>> bmap = {b'None': b'branch3', b'default': b'branch4'}
81 >>> for i in [b'None', b'', None, b'default', b'branch5']:
111 >>> for i in [b'None', b'', None, b'default', b'branch5']:
82 ... mapbranch(i, bmap)
112 ... mapbranch(i, bmap)
83 'branch3'
113 'branch3'
84 'branch4'
114 'branch4'
85 'branch4'
115 'branch4'
86 'branch4'
116 'branch4'
87 'branch5'
117 'branch5'
88 '''
118 '''
89 # If branch is None or empty, this commit is coming from the source
119 # If branch is None or empty, this commit is coming from the source
90 # repository's default branch and destined for the default branch in the
120 # repository's default branch and destined for the default branch in the
91 # destination repository. For such commits, using a literal "default"
121 # destination repository. For such commits, using a literal "default"
92 # in branchmap below allows the user to map "default" to an alternate
122 # in branchmap below allows the user to map "default" to an alternate
93 # default branch in the destination repository.
123 # default branch in the destination repository.
94 branch = branchmap.get(branch or b'default', branch)
124 branch = branchmap.get(branch or b'default', branch)
95 # At some point we used "None" literal to denote the default branch,
125 # At some point we used "None" literal to denote the default branch,
96 # attempt to use that for backward compatibility.
126 # attempt to use that for backward compatibility.
97 if not branch:
127 if not branch:
98 branch = branchmap.get(b'None', branch)
128 branch = branchmap.get(b'None', branch)
99 return branch
129 return branch
100
130
101
131
102 source_converters = [
132 source_converters = [
103 (b'cvs', convert_cvs, b'branchsort'),
133 (b'cvs', convert_cvs, b'branchsort'),
104 (b'git', convert_git, b'branchsort'),
134 (b'git', convert_git, b'branchsort'),
105 (b'svn', svn_source, b'branchsort'),
135 (b'svn', svn_source, b'branchsort'),
106 (b'hg', mercurial_source, b'sourcesort'),
136 (b'hg', mercurial_source, b'sourcesort'),
107 (b'darcs', darcs_source, b'branchsort'),
137 (b'darcs', darcs_source, b'branchsort'),
108 (b'mtn', monotone_source, b'branchsort'),
138 (b'mtn', monotone_source, b'branchsort'),
109 (b'gnuarch', gnuarch_source, b'branchsort'),
139 (b'gnuarch', gnuarch_source, b'branchsort'),
110 (b'bzr', bzr_source, b'branchsort'),
140 (b'bzr', bzr_source, b'branchsort'),
111 (b'p4', p4_source, b'branchsort'),
141 (b'p4', p4_source, b'branchsort'),
112 ]
142 ]
113
143
114 sink_converters = [
144 sink_converters = [
115 (b'hg', mercurial_sink),
145 (b'hg', mercurial_sink),
116 (b'svn', svn_sink),
146 (b'svn', svn_sink),
117 ]
147 ]
118
148
119
149
120 def convertsource(ui, path, type, revs):
150 def convertsource(ui, path, type, revs):
121 exceptions = []
151 exceptions = []
122 if type and type not in [s[0] for s in source_converters]:
152 if type and type not in [s[0] for s in source_converters]:
123 raise error.Abort(_(b'%s: invalid source repository type') % type)
153 raise error.Abort(_(b'%s: invalid source repository type') % type)
124 for name, source, sortmode in source_converters:
154 for name, source, sortmode in source_converters:
125 try:
155 try:
126 if not type or name == type:
156 if not type or name == type:
127 return source(ui, name, path, revs), sortmode
157 return source(ui, name, path, revs), sortmode
128 except (NoRepo, MissingTool) as inst:
158 except (NoRepo, MissingTool) as inst:
129 exceptions.append(inst)
159 exceptions.append(inst)
130 if not ui.quiet:
160 if not ui.quiet:
131 for inst in exceptions:
161 for inst in exceptions:
132 ui.write(b"%s\n" % pycompat.bytestr(inst.args[0]))
162 ui.write(b"%s\n" % pycompat.bytestr(inst.args[0]))
133 raise error.Abort(_(b'%s: missing or unsupported repository') % path)
163 raise error.Abort(_(b'%s: missing or unsupported repository') % path)
134
164
135
165
136 def convertsink(ui, path, type):
166 def convertsink(ui, path, type):
137 if type and type not in [s[0] for s in sink_converters]:
167 if type and type not in [s[0] for s in sink_converters]:
138 raise error.Abort(_(b'%s: invalid destination repository type') % type)
168 raise error.Abort(_(b'%s: invalid destination repository type') % type)
139 for name, sink in sink_converters:
169 for name, sink in sink_converters:
140 try:
170 try:
141 if not type or name == type:
171 if not type or name == type:
142 return sink(ui, name, path)
172 return sink(ui, name, path)
143 except NoRepo as inst:
173 except NoRepo as inst:
144 ui.note(_(b"convert: %s\n") % inst)
174 ui.note(_(b"convert: %s\n") % inst)
145 except MissingTool as inst:
175 except MissingTool as inst:
146 raise error.Abort(b'%s\n' % inst)
176 raise error.Abort(b'%s\n' % inst)
147 raise error.Abort(_(b'%s: unknown repository type') % path)
177 raise error.Abort(_(b'%s: unknown repository type') % path)
148
178
149
179
150 class progresssource(object):
180 class progresssource(object):
151 def __init__(self, ui, source, filecount):
181 def __init__(self, ui, source, filecount):
152 self.ui = ui
182 self.ui = ui
153 self.source = source
183 self.source = source
154 self.progress = ui.makeprogress(
184 self.progress = ui.makeprogress(
155 _(b'getting files'), unit=_(b'files'), total=filecount
185 _(b'getting files'), unit=_(b'files'), total=filecount
156 )
186 )
157
187
158 def getfile(self, file, rev):
188 def getfile(self, file, rev):
159 self.progress.increment(item=file)
189 self.progress.increment(item=file)
160 return self.source.getfile(file, rev)
190 return self.source.getfile(file, rev)
161
191
162 def targetfilebelongstosource(self, targetfilename):
192 def targetfilebelongstosource(self, targetfilename):
163 return self.source.targetfilebelongstosource(targetfilename)
193 return self.source.targetfilebelongstosource(targetfilename)
164
194
165 def lookuprev(self, rev):
195 def lookuprev(self, rev):
166 return self.source.lookuprev(rev)
196 return self.source.lookuprev(rev)
167
197
168 def close(self):
198 def close(self):
169 self.progress.complete()
199 self.progress.complete()
170
200
171
201
172 class converter(object):
202 class converter(object):
173 def __init__(self, ui, source, dest, revmapfile, opts):
203 def __init__(self, ui, source, dest, revmapfile, opts):
174
204
175 self.source = source
205 self.source = source
176 self.dest = dest
206 self.dest = dest
177 self.ui = ui
207 self.ui = ui
178 self.opts = opts
208 self.opts = opts
179 self.commitcache = {}
209 self.commitcache = {}
180 self.authors = {}
210 self.authors = {}
181 self.authorfile = None
211 self.authorfile = None
182
212
183 # Record converted revisions persistently: maps source revision
213 # Record converted revisions persistently: maps source revision
184 # ID to target revision ID (both strings). (This is how
214 # ID to target revision ID (both strings). (This is how
185 # incremental conversions work.)
215 # incremental conversions work.)
186 self.map = mapfile(ui, revmapfile)
216 self.map = mapfile(ui, revmapfile)
187
217
188 # Read first the dst author map if any
218 # Read first the dst author map if any
189 authorfile = self.dest.authorfile()
219 authorfile = self.dest.authorfile()
190 if authorfile and os.path.exists(authorfile):
220 if authorfile and os.path.exists(authorfile):
191 self.readauthormap(authorfile)
221 self.readauthormap(authorfile)
192 # Extend/Override with new author map if necessary
222 # Extend/Override with new author map if necessary
193 if opts.get(b'authormap'):
223 if opts.get(b'authormap'):
194 self.readauthormap(opts.get(b'authormap'))
224 self.readauthormap(opts.get(b'authormap'))
195 self.authorfile = self.dest.authorfile()
225 self.authorfile = self.dest.authorfile()
196
226
197 self.splicemap = self.parsesplicemap(opts.get(b'splicemap'))
227 self.splicemap = self.parsesplicemap(opts.get(b'splicemap'))
198 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
228 self.branchmap = mapfile(ui, opts.get(b'branchmap'))
199
229
200 def parsesplicemap(self, path):
230 def parsesplicemap(self, path):
201 """ check and validate the splicemap format and
231 """ check and validate the splicemap format and
202 return a child/parents dictionary.
232 return a child/parents dictionary.
203 Format checking has two parts.
233 Format checking has two parts.
204 1. generic format which is same across all source types
234 1. generic format which is same across all source types
205 2. specific format checking which may be different for
235 2. specific format checking which may be different for
206 different source type. This logic is implemented in
236 different source type. This logic is implemented in
207 checkrevformat function in source files like
237 checkrevformat function in source files like
208 hg.py, subversion.py etc.
238 hg.py, subversion.py etc.
209 """
239 """
210
240
211 if not path:
241 if not path:
212 return {}
242 return {}
213 m = {}
243 m = {}
214 try:
244 try:
215 fp = open(path, b'rb')
245 fp = open(path, b'rb')
216 for i, line in enumerate(util.iterfile(fp)):
246 for i, line in enumerate(util.iterfile(fp)):
217 line = line.splitlines()[0].rstrip()
247 line = line.splitlines()[0].rstrip()
218 if not line:
248 if not line:
219 # Ignore blank lines
249 # Ignore blank lines
220 continue
250 continue
221 # split line
251 # split line
222 lex = common.shlexer(data=line, whitespace=b',')
252 lex = common.shlexer(data=line, whitespace=b',')
223 line = list(lex)
253 line = list(lex)
224 # check number of parents
254 # check number of parents
225 if not (2 <= len(line) <= 3):
255 if not (2 <= len(line) <= 3):
226 raise error.Abort(
256 raise error.Abort(
227 _(
257 _(
228 b'syntax error in %s(%d): child parent1'
258 b'syntax error in %s(%d): child parent1'
229 b'[,parent2] expected'
259 b'[,parent2] expected'
230 )
260 )
231 % (path, i + 1)
261 % (path, i + 1)
232 )
262 )
233 for part in line:
263 for part in line:
234 self.source.checkrevformat(part)
264 self.source.checkrevformat(part)
235 child, p1, p2 = line[0], line[1:2], line[2:]
265 child, p1, p2 = line[0], line[1:2], line[2:]
236 if p1 == p2:
266 if p1 == p2:
237 m[child] = p1
267 m[child] = p1
238 else:
268 else:
239 m[child] = p1 + p2
269 m[child] = p1 + p2
240 # if file does not exist or error reading, exit
270 # if file does not exist or error reading, exit
241 except IOError:
271 except IOError:
242 raise error.Abort(
272 raise error.Abort(
243 _(b'splicemap file not found or error reading %s:') % path
273 _(b'splicemap file not found or error reading %s:') % path
244 )
274 )
245 return m
275 return m
246
276
247 def walktree(self, heads):
277 def walktree(self, heads):
248 '''Return a mapping that identifies the uncommitted parents of every
278 '''Return a mapping that identifies the uncommitted parents of every
249 uncommitted changeset.'''
279 uncommitted changeset.'''
250 visit = list(heads)
280 visit = list(heads)
251 known = set()
281 known = set()
252 parents = {}
282 parents = {}
253 numcommits = self.source.numcommits()
283 numcommits = self.source.numcommits()
254 progress = self.ui.makeprogress(
284 progress = self.ui.makeprogress(
255 _(b'scanning'), unit=_(b'revisions'), total=numcommits
285 _(b'scanning'), unit=_(b'revisions'), total=numcommits
256 )
286 )
257 while visit:
287 while visit:
258 n = visit.pop(0)
288 n = visit.pop(0)
259 if n in known:
289 if n in known:
260 continue
290 continue
261 if n in self.map:
291 if n in self.map:
262 m = self.map[n]
292 m = self.map[n]
263 if m == SKIPREV or self.dest.hascommitfrommap(m):
293 if m == SKIPREV or self.dest.hascommitfrommap(m):
264 continue
294 continue
265 known.add(n)
295 known.add(n)
266 progress.update(len(known))
296 progress.update(len(known))
267 commit = self.cachecommit(n)
297 commit = self.cachecommit(n)
268 parents[n] = []
298 parents[n] = []
269 for p in commit.parents:
299 for p in commit.parents:
270 parents[n].append(p)
300 parents[n].append(p)
271 visit.append(p)
301 visit.append(p)
272 progress.complete()
302 progress.complete()
273
303
274 return parents
304 return parents
275
305
276 def mergesplicemap(self, parents, splicemap):
306 def mergesplicemap(self, parents, splicemap):
277 """A splicemap redefines child/parent relationships. Check the
307 """A splicemap redefines child/parent relationships. Check the
278 map contains valid revision identifiers and merge the new
308 map contains valid revision identifiers and merge the new
279 links in the source graph.
309 links in the source graph.
280 """
310 """
281 for c in sorted(splicemap):
311 for c in sorted(splicemap):
282 if c not in parents:
312 if c not in parents:
283 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
313 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
284 # Could be in source but not converted during this run
314 # Could be in source but not converted during this run
285 self.ui.warn(
315 self.ui.warn(
286 _(
316 _(
287 b'splice map revision %s is not being '
317 b'splice map revision %s is not being '
288 b'converted, ignoring\n'
318 b'converted, ignoring\n'
289 )
319 )
290 % c
320 % c
291 )
321 )
292 continue
322 continue
293 pc = []
323 pc = []
294 for p in splicemap[c]:
324 for p in splicemap[c]:
295 # We do not have to wait for nodes already in dest.
325 # We do not have to wait for nodes already in dest.
296 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
326 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
297 continue
327 continue
298 # Parent is not in dest and not being converted, not good
328 # Parent is not in dest and not being converted, not good
299 if p not in parents:
329 if p not in parents:
300 raise error.Abort(_(b'unknown splice map parent: %s') % p)
330 raise error.Abort(_(b'unknown splice map parent: %s') % p)
301 pc.append(p)
331 pc.append(p)
302 parents[c] = pc
332 parents[c] = pc
303
333
304 def toposort(self, parents, sortmode):
334 def toposort(self, parents, sortmode):
305 '''Return an ordering such that every uncommitted changeset is
335 '''Return an ordering such that every uncommitted changeset is
306 preceded by all its uncommitted ancestors.'''
336 preceded by all its uncommitted ancestors.'''
307
337
308 def mapchildren(parents):
338 def mapchildren(parents):
309 """Return a (children, roots) tuple where 'children' maps parent
339 """Return a (children, roots) tuple where 'children' maps parent
310 revision identifiers to children ones, and 'roots' is the list of
340 revision identifiers to children ones, and 'roots' is the list of
311 revisions without parents. 'parents' must be a mapping of revision
341 revisions without parents. 'parents' must be a mapping of revision
312 identifier to its parents ones.
342 identifier to its parents ones.
313 """
343 """
314 visit = collections.deque(sorted(parents))
344 visit = collections.deque(sorted(parents))
315 seen = set()
345 seen = set()
316 children = {}
346 children = {}
317 roots = []
347 roots = []
318
348
319 while visit:
349 while visit:
320 n = visit.popleft()
350 n = visit.popleft()
321 if n in seen:
351 if n in seen:
322 continue
352 continue
323 seen.add(n)
353 seen.add(n)
324 # Ensure that nodes without parents are present in the
354 # Ensure that nodes without parents are present in the
325 # 'children' mapping.
355 # 'children' mapping.
326 children.setdefault(n, [])
356 children.setdefault(n, [])
327 hasparent = False
357 hasparent = False
328 for p in parents[n]:
358 for p in parents[n]:
329 if p not in self.map:
359 if p not in self.map:
330 visit.append(p)
360 visit.append(p)
331 hasparent = True
361 hasparent = True
332 children.setdefault(p, []).append(n)
362 children.setdefault(p, []).append(n)
333 if not hasparent:
363 if not hasparent:
334 roots.append(n)
364 roots.append(n)
335
365
336 return children, roots
366 return children, roots
337
367
338 # Sort functions are supposed to take a list of revisions which
368 # Sort functions are supposed to take a list of revisions which
339 # can be converted immediately and pick one
369 # can be converted immediately and pick one
340
370
341 def makebranchsorter():
371 def makebranchsorter():
342 """If the previously converted revision has a child in the
372 """If the previously converted revision has a child in the
343 eligible revisions list, pick it. Return the list head
373 eligible revisions list, pick it. Return the list head
344 otherwise. Branch sort attempts to minimize branch
374 otherwise. Branch sort attempts to minimize branch
345 switching, which is harmful for Mercurial backend
375 switching, which is harmful for Mercurial backend
346 compression.
376 compression.
347 """
377 """
348 prev = [None]
378 prev = [None]
349
379
350 def picknext(nodes):
380 def picknext(nodes):
351 next = nodes[0]
381 next = nodes[0]
352 for n in nodes:
382 for n in nodes:
353 if prev[0] in parents[n]:
383 if prev[0] in parents[n]:
354 next = n
384 next = n
355 break
385 break
356 prev[0] = next
386 prev[0] = next
357 return next
387 return next
358
388
359 return picknext
389 return picknext
360
390
361 def makesourcesorter():
391 def makesourcesorter():
362 """Source specific sort."""
392 """Source specific sort."""
363 keyfn = lambda n: self.commitcache[n].sortkey
393 keyfn = lambda n: self.commitcache[n].sortkey
364
394
365 def picknext(nodes):
395 def picknext(nodes):
366 return sorted(nodes, key=keyfn)[0]
396 return sorted(nodes, key=keyfn)[0]
367
397
368 return picknext
398 return picknext
369
399
370 def makeclosesorter():
400 def makeclosesorter():
371 """Close order sort."""
401 """Close order sort."""
372 keyfn = lambda n: (
402 keyfn = lambda n: (
373 b'close' not in self.commitcache[n].extra,
403 b'close' not in self.commitcache[n].extra,
374 self.commitcache[n].sortkey,
404 self.commitcache[n].sortkey,
375 )
405 )
376
406
377 def picknext(nodes):
407 def picknext(nodes):
378 return sorted(nodes, key=keyfn)[0]
408 return sorted(nodes, key=keyfn)[0]
379
409
380 return picknext
410 return picknext
381
411
382 def makedatesorter():
412 def makedatesorter():
383 """Sort revisions by date."""
413 """Sort revisions by date."""
384 dates = {}
414 dates = {}
385
415
386 def getdate(n):
416 def getdate(n):
387 if n not in dates:
417 if n not in dates:
388 dates[n] = dateutil.parsedate(self.commitcache[n].date)
418 dates[n] = dateutil.parsedate(self.commitcache[n].date)
389 return dates[n]
419 return dates[n]
390
420
391 def picknext(nodes):
421 def picknext(nodes):
392 return min([(getdate(n), n) for n in nodes])[1]
422 return min([(getdate(n), n) for n in nodes])[1]
393
423
394 return picknext
424 return picknext
395
425
396 if sortmode == b'branchsort':
426 if sortmode == b'branchsort':
397 picknext = makebranchsorter()
427 picknext = makebranchsorter()
398 elif sortmode == b'datesort':
428 elif sortmode == b'datesort':
399 picknext = makedatesorter()
429 picknext = makedatesorter()
400 elif sortmode == b'sourcesort':
430 elif sortmode == b'sourcesort':
401 picknext = makesourcesorter()
431 picknext = makesourcesorter()
402 elif sortmode == b'closesort':
432 elif sortmode == b'closesort':
403 picknext = makeclosesorter()
433 picknext = makeclosesorter()
404 else:
434 else:
405 raise error.Abort(_(b'unknown sort mode: %s') % sortmode)
435 raise error.Abort(_(b'unknown sort mode: %s') % sortmode)
406
436
407 children, actives = mapchildren(parents)
437 children, actives = mapchildren(parents)
408
438
409 s = []
439 s = []
410 pendings = {}
440 pendings = {}
411 while actives:
441 while actives:
412 n = picknext(actives)
442 n = picknext(actives)
413 actives.remove(n)
443 actives.remove(n)
414 s.append(n)
444 s.append(n)
415
445
416 # Update dependents list
446 # Update dependents list
417 for c in children.get(n, []):
447 for c in children.get(n, []):
418 if c not in pendings:
448 if c not in pendings:
419 pendings[c] = [p for p in parents[c] if p not in self.map]
449 pendings[c] = [p for p in parents[c] if p not in self.map]
420 try:
450 try:
421 pendings[c].remove(n)
451 pendings[c].remove(n)
422 except ValueError:
452 except ValueError:
423 raise error.Abort(
453 raise error.Abort(
424 _(b'cycle detected between %s and %s')
454 _(b'cycle detected between %s and %s')
425 % (recode(c), recode(n))
455 % (recode(c), recode(n))
426 )
456 )
427 if not pendings[c]:
457 if not pendings[c]:
428 # Parents are converted, node is eligible
458 # Parents are converted, node is eligible
429 actives.insert(0, c)
459 actives.insert(0, c)
430 pendings[c] = None
460 pendings[c] = None
431
461
432 if len(s) != len(parents):
462 if len(s) != len(parents):
433 raise error.Abort(_(b"not all revisions were sorted"))
463 raise error.Abort(_(b"not all revisions were sorted"))
434
464
435 return s
465 return s
436
466
437 def writeauthormap(self):
467 def writeauthormap(self):
438 authorfile = self.authorfile
468 authorfile = self.authorfile
439 if authorfile:
469 if authorfile:
440 self.ui.status(_(b'writing author map file %s\n') % authorfile)
470 self.ui.status(_(b'writing author map file %s\n') % authorfile)
441 ofile = open(authorfile, b'wb+')
471 ofile = open(authorfile, b'wb+')
442 for author in self.authors:
472 for author in self.authors:
443 ofile.write(
473 ofile.write(
444 util.tonativeeol(
474 util.tonativeeol(
445 b"%s=%s\n" % (author, self.authors[author])
475 b"%s=%s\n" % (author, self.authors[author])
446 )
476 )
447 )
477 )
448 ofile.close()
478 ofile.close()
449
479
450 def readauthormap(self, authorfile):
480 def readauthormap(self, authorfile):
451 afile = open(authorfile, b'rb')
481 self.authors = readauthormap(self.ui, authorfile, self.authors)
452 for line in afile:
453
454 line = line.strip()
455 if not line or line.startswith(b'#'):
456 continue
457
458 try:
459 srcauthor, dstauthor = line.split(b'=', 1)
460 except ValueError:
461 msg = _(b'ignoring bad line in author map file %s: %s\n')
462 self.ui.warn(msg % (authorfile, line.rstrip()))
463 continue
464
465 srcauthor = srcauthor.strip()
466 dstauthor = dstauthor.strip()
467 if self.authors.get(srcauthor) in (None, dstauthor):
468 msg = _(b'mapping author %s to %s\n')
469 self.ui.debug(msg % (srcauthor, dstauthor))
470 self.authors[srcauthor] = dstauthor
471 continue
472
473 m = _(b'overriding mapping for author %s, was %s, will be %s\n')
474 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
475
476 afile.close()
477
482
478 def cachecommit(self, rev):
483 def cachecommit(self, rev):
479 commit = self.source.getcommit(rev)
484 commit = self.source.getcommit(rev)
480 commit.author = self.authors.get(commit.author, commit.author)
485 commit.author = self.authors.get(commit.author, commit.author)
481 commit.branch = mapbranch(commit.branch, self.branchmap)
486 commit.branch = mapbranch(commit.branch, self.branchmap)
482 self.commitcache[rev] = commit
487 self.commitcache[rev] = commit
483 return commit
488 return commit
484
489
485 def copy(self, rev):
490 def copy(self, rev):
486 commit = self.commitcache[rev]
491 commit = self.commitcache[rev]
487 full = self.opts.get(b'full')
492 full = self.opts.get(b'full')
488 changes = self.source.getchanges(rev, full)
493 changes = self.source.getchanges(rev, full)
489 if isinstance(changes, bytes):
494 if isinstance(changes, bytes):
490 if changes == SKIPREV:
495 if changes == SKIPREV:
491 dest = SKIPREV
496 dest = SKIPREV
492 else:
497 else:
493 dest = self.map[changes]
498 dest = self.map[changes]
494 self.map[rev] = dest
499 self.map[rev] = dest
495 return
500 return
496 files, copies, cleanp2 = changes
501 files, copies, cleanp2 = changes
497 pbranches = []
502 pbranches = []
498 if commit.parents:
503 if commit.parents:
499 for prev in commit.parents:
504 for prev in commit.parents:
500 if prev not in self.commitcache:
505 if prev not in self.commitcache:
501 self.cachecommit(prev)
506 self.cachecommit(prev)
502 pbranches.append(
507 pbranches.append(
503 (self.map[prev], self.commitcache[prev].branch)
508 (self.map[prev], self.commitcache[prev].branch)
504 )
509 )
505 self.dest.setbranch(commit.branch, pbranches)
510 self.dest.setbranch(commit.branch, pbranches)
506 try:
511 try:
507 parents = self.splicemap[rev]
512 parents = self.splicemap[rev]
508 self.ui.status(
513 self.ui.status(
509 _(b'spliced in %s as parents of %s\n')
514 _(b'spliced in %s as parents of %s\n')
510 % (_(b' and ').join(parents), rev)
515 % (_(b' and ').join(parents), rev)
511 )
516 )
512 parents = [self.map.get(p, p) for p in parents]
517 parents = [self.map.get(p, p) for p in parents]
513 except KeyError:
518 except KeyError:
514 parents = [b[0] for b in pbranches]
519 parents = [b[0] for b in pbranches]
515 parents.extend(
520 parents.extend(
516 self.map[x] for x in commit.optparents if x in self.map
521 self.map[x] for x in commit.optparents if x in self.map
517 )
522 )
518 if len(pbranches) != 2:
523 if len(pbranches) != 2:
519 cleanp2 = set()
524 cleanp2 = set()
520 if len(parents) < 3:
525 if len(parents) < 3:
521 source = progresssource(self.ui, self.source, len(files))
526 source = progresssource(self.ui, self.source, len(files))
522 else:
527 else:
523 # For an octopus merge, we end up traversing the list of
528 # For an octopus merge, we end up traversing the list of
524 # changed files N-1 times. This tweak to the number of
529 # changed files N-1 times. This tweak to the number of
525 # files makes it so the progress bar doesn't overflow
530 # files makes it so the progress bar doesn't overflow
526 # itself.
531 # itself.
527 source = progresssource(
532 source = progresssource(
528 self.ui, self.source, len(files) * (len(parents) - 1)
533 self.ui, self.source, len(files) * (len(parents) - 1)
529 )
534 )
530 newnode = self.dest.putcommit(
535 newnode = self.dest.putcommit(
531 files, copies, parents, commit, source, self.map, full, cleanp2
536 files, copies, parents, commit, source, self.map, full, cleanp2
532 )
537 )
533 source.close()
538 source.close()
534 self.source.converted(rev, newnode)
539 self.source.converted(rev, newnode)
535 self.map[rev] = newnode
540 self.map[rev] = newnode
536
541
537 def convert(self, sortmode):
542 def convert(self, sortmode):
538 try:
543 try:
539 self.source.before()
544 self.source.before()
540 self.dest.before()
545 self.dest.before()
541 self.source.setrevmap(self.map)
546 self.source.setrevmap(self.map)
542 self.ui.status(_(b"scanning source...\n"))
547 self.ui.status(_(b"scanning source...\n"))
543 heads = self.source.getheads()
548 heads = self.source.getheads()
544 parents = self.walktree(heads)
549 parents = self.walktree(heads)
545 self.mergesplicemap(parents, self.splicemap)
550 self.mergesplicemap(parents, self.splicemap)
546 self.ui.status(_(b"sorting...\n"))
551 self.ui.status(_(b"sorting...\n"))
547 t = self.toposort(parents, sortmode)
552 t = self.toposort(parents, sortmode)
548 num = len(t)
553 num = len(t)
549 c = None
554 c = None
550
555
551 self.ui.status(_(b"converting...\n"))
556 self.ui.status(_(b"converting...\n"))
552 progress = self.ui.makeprogress(
557 progress = self.ui.makeprogress(
553 _(b'converting'), unit=_(b'revisions'), total=len(t)
558 _(b'converting'), unit=_(b'revisions'), total=len(t)
554 )
559 )
555 for i, c in enumerate(t):
560 for i, c in enumerate(t):
556 num -= 1
561 num -= 1
557 desc = self.commitcache[c].desc
562 desc = self.commitcache[c].desc
558 if b"\n" in desc:
563 if b"\n" in desc:
559 desc = desc.splitlines()[0]
564 desc = desc.splitlines()[0]
560 # convert log message to local encoding without using
565 # convert log message to local encoding without using
561 # tolocal() because the encoding.encoding convert()
566 # tolocal() because the encoding.encoding convert()
562 # uses is 'utf-8'
567 # uses is 'utf-8'
563 self.ui.status(b"%d %s\n" % (num, recode(desc)))
568 self.ui.status(b"%d %s\n" % (num, recode(desc)))
564 self.ui.note(_(b"source: %s\n") % recode(c))
569 self.ui.note(_(b"source: %s\n") % recode(c))
565 progress.update(i)
570 progress.update(i)
566 self.copy(c)
571 self.copy(c)
567 progress.complete()
572 progress.complete()
568
573
569 if not self.ui.configbool(b'convert', b'skiptags'):
574 if not self.ui.configbool(b'convert', b'skiptags'):
570 tags = self.source.gettags()
575 tags = self.source.gettags()
571 ctags = {}
576 ctags = {}
572 for k in tags:
577 for k in tags:
573 v = tags[k]
578 v = tags[k]
574 if self.map.get(v, SKIPREV) != SKIPREV:
579 if self.map.get(v, SKIPREV) != SKIPREV:
575 ctags[k] = self.map[v]
580 ctags[k] = self.map[v]
576
581
577 if c and ctags:
582 if c and ctags:
578 nrev, tagsparent = self.dest.puttags(ctags)
583 nrev, tagsparent = self.dest.puttags(ctags)
579 if nrev and tagsparent:
584 if nrev and tagsparent:
580 # write another hash correspondence to override the
585 # write another hash correspondence to override the
581 # previous one so we don't end up with extra tag heads
586 # previous one so we don't end up with extra tag heads
582 tagsparents = [
587 tagsparents = [
583 e
588 e
584 for e in pycompat.iteritems(self.map)
589 for e in pycompat.iteritems(self.map)
585 if e[1] == tagsparent
590 if e[1] == tagsparent
586 ]
591 ]
587 if tagsparents:
592 if tagsparents:
588 self.map[tagsparents[0][0]] = nrev
593 self.map[tagsparents[0][0]] = nrev
589
594
590 bookmarks = self.source.getbookmarks()
595 bookmarks = self.source.getbookmarks()
591 cbookmarks = {}
596 cbookmarks = {}
592 for k in bookmarks:
597 for k in bookmarks:
593 v = bookmarks[k]
598 v = bookmarks[k]
594 if self.map.get(v, SKIPREV) != SKIPREV:
599 if self.map.get(v, SKIPREV) != SKIPREV:
595 cbookmarks[k] = self.map[v]
600 cbookmarks[k] = self.map[v]
596
601
597 if c and cbookmarks:
602 if c and cbookmarks:
598 self.dest.putbookmarks(cbookmarks)
603 self.dest.putbookmarks(cbookmarks)
599
604
600 self.writeauthormap()
605 self.writeauthormap()
601 finally:
606 finally:
602 self.cleanup()
607 self.cleanup()
603
608
604 def cleanup(self):
609 def cleanup(self):
605 try:
610 try:
606 self.dest.after()
611 self.dest.after()
607 finally:
612 finally:
608 self.source.after()
613 self.source.after()
609 self.map.close()
614 self.map.close()
610
615
611
616
612 def convert(ui, src, dest=None, revmapfile=None, **opts):
617 def convert(ui, src, dest=None, revmapfile=None, **opts):
613 opts = pycompat.byteskwargs(opts)
618 opts = pycompat.byteskwargs(opts)
614 global orig_encoding
619 global orig_encoding
615 orig_encoding = encoding.encoding
620 orig_encoding = encoding.encoding
616 encoding.encoding = b'UTF-8'
621 encoding.encoding = b'UTF-8'
617
622
618 # support --authors as an alias for --authormap
623 # support --authors as an alias for --authormap
619 if not opts.get(b'authormap'):
624 if not opts.get(b'authormap'):
620 opts[b'authormap'] = opts.get(b'authors')
625 opts[b'authormap'] = opts.get(b'authors')
621
626
622 if not dest:
627 if not dest:
623 dest = hg.defaultdest(src) + b"-hg"
628 dest = hg.defaultdest(src) + b"-hg"
624 ui.status(_(b"assuming destination %s\n") % dest)
629 ui.status(_(b"assuming destination %s\n") % dest)
625
630
626 destc = convertsink(ui, dest, opts.get(b'dest_type'))
631 destc = convertsink(ui, dest, opts.get(b'dest_type'))
627 destc = scmutil.wrapconvertsink(destc)
632 destc = scmutil.wrapconvertsink(destc)
628
633
629 try:
634 try:
630 srcc, defaultsort = convertsource(
635 srcc, defaultsort = convertsource(
631 ui, src, opts.get(b'source_type'), opts.get(b'rev')
636 ui, src, opts.get(b'source_type'), opts.get(b'rev')
632 )
637 )
633 except Exception:
638 except Exception:
634 for path in destc.created:
639 for path in destc.created:
635 shutil.rmtree(path, True)
640 shutil.rmtree(path, True)
636 raise
641 raise
637
642
638 sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort')
643 sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort')
639 sortmode = [m for m in sortmodes if opts.get(m)]
644 sortmode = [m for m in sortmodes if opts.get(m)]
640 if len(sortmode) > 1:
645 if len(sortmode) > 1:
641 raise error.Abort(_(b'more than one sort mode specified'))
646 raise error.Abort(_(b'more than one sort mode specified'))
642 if sortmode:
647 if sortmode:
643 sortmode = sortmode[0]
648 sortmode = sortmode[0]
644 else:
649 else:
645 sortmode = defaultsort
650 sortmode = defaultsort
646
651
647 if sortmode == b'sourcesort' and not srcc.hasnativeorder():
652 if sortmode == b'sourcesort' and not srcc.hasnativeorder():
648 raise error.Abort(
653 raise error.Abort(
649 _(b'--sourcesort is not supported by this data source')
654 _(b'--sourcesort is not supported by this data source')
650 )
655 )
651 if sortmode == b'closesort' and not srcc.hasnativeclose():
656 if sortmode == b'closesort' and not srcc.hasnativeclose():
652 raise error.Abort(
657 raise error.Abort(
653 _(b'--closesort is not supported by this data source')
658 _(b'--closesort is not supported by this data source')
654 )
659 )
655
660
656 fmap = opts.get(b'filemap')
661 fmap = opts.get(b'filemap')
657 if fmap:
662 if fmap:
658 srcc = filemap.filemap_source(ui, srcc, fmap)
663 srcc = filemap.filemap_source(ui, srcc, fmap)
659 destc.setfilemapmode(True)
664 destc.setfilemapmode(True)
660
665
661 if not revmapfile:
666 if not revmapfile:
662 revmapfile = destc.revmapfile()
667 revmapfile = destc.revmapfile()
663
668
664 c = converter(ui, srcc, destc, revmapfile, opts)
669 c = converter(ui, srcc, destc, revmapfile, opts)
665 c.convert(sortmode)
670 c.convert(sortmode)
General Comments 0
You need to be logged in to leave comments. Login now