Show More
@@ -1,686 +1,692 b'' | |||||
1 | # convcmd - convert extension commands definition |
|
1 | # convcmd - convert extension commands definition | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> |
|
3 | # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import collections |
|
8 | import collections | |
9 | import heapq |
|
9 | import heapq | |
10 | import os |
|
10 | import os | |
11 | import shutil |
|
11 | import shutil | |
12 |
|
12 | |||
13 | from mercurial.i18n import _ |
|
13 | from mercurial.i18n import _ | |
14 | from mercurial.pycompat import open |
|
14 | from mercurial.pycompat import open | |
15 | from mercurial import ( |
|
15 | from mercurial import ( | |
16 | encoding, |
|
16 | encoding, | |
17 | error, |
|
17 | error, | |
18 | hg, |
|
18 | hg, | |
19 | pycompat, |
|
19 | pycompat, | |
20 | scmutil, |
|
20 | scmutil, | |
21 | util, |
|
21 | util, | |
22 | ) |
|
22 | ) | |
23 | from mercurial.utils import dateutil |
|
23 | from mercurial.utils import dateutil | |
24 |
|
24 | |||
25 | from . import ( |
|
25 | from . import ( | |
26 | bzr, |
|
26 | bzr, | |
27 | common, |
|
27 | common, | |
28 | cvs, |
|
28 | cvs, | |
29 | darcs, |
|
29 | darcs, | |
30 | filemap, |
|
30 | filemap, | |
31 | git, |
|
31 | git, | |
32 | gnuarch, |
|
32 | gnuarch, | |
33 | hg as hgconvert, |
|
33 | hg as hgconvert, | |
34 | monotone, |
|
34 | monotone, | |
35 | p4, |
|
35 | p4, | |
36 | subversion, |
|
36 | subversion, | |
37 | ) |
|
37 | ) | |
38 |
|
38 | |||
39 | mapfile = common.mapfile |
|
39 | mapfile = common.mapfile | |
40 | MissingTool = common.MissingTool |
|
40 | MissingTool = common.MissingTool | |
41 | NoRepo = common.NoRepo |
|
41 | NoRepo = common.NoRepo | |
42 | SKIPREV = common.SKIPREV |
|
42 | SKIPREV = common.SKIPREV | |
43 |
|
43 | |||
44 | bzr_source = bzr.bzr_source |
|
44 | bzr_source = bzr.bzr_source | |
45 | convert_cvs = cvs.convert_cvs |
|
45 | convert_cvs = cvs.convert_cvs | |
46 | convert_git = git.convert_git |
|
46 | convert_git = git.convert_git | |
47 | darcs_source = darcs.darcs_source |
|
47 | darcs_source = darcs.darcs_source | |
48 | gnuarch_source = gnuarch.gnuarch_source |
|
48 | gnuarch_source = gnuarch.gnuarch_source | |
49 | mercurial_sink = hgconvert.mercurial_sink |
|
49 | mercurial_sink = hgconvert.mercurial_sink | |
50 | mercurial_source = hgconvert.mercurial_source |
|
50 | mercurial_source = hgconvert.mercurial_source | |
51 | monotone_source = monotone.monotone_source |
|
51 | monotone_source = monotone.monotone_source | |
52 | p4_source = p4.p4_source |
|
52 | p4_source = p4.p4_source | |
53 | svn_sink = subversion.svn_sink |
|
53 | svn_sink = subversion.svn_sink | |
54 | svn_source = subversion.svn_source |
|
54 | svn_source = subversion.svn_source | |
55 |
|
55 | |||
56 | orig_encoding = b'ascii' |
|
56 | orig_encoding = b'ascii' | |
57 |
|
57 | |||
58 |
|
58 | |||
59 | def readauthormap(ui, authorfile, authors=None): |
|
59 | def readauthormap(ui, authorfile, authors=None): | |
60 | if authors is None: |
|
60 | if authors is None: | |
61 | authors = {} |
|
61 | authors = {} | |
62 | with open(authorfile, b'rb') as afile: |
|
62 | with open(authorfile, b'rb') as afile: | |
63 | for line in afile: |
|
63 | for line in afile: | |
64 |
|
64 | |||
65 | line = line.strip() |
|
65 | line = line.strip() | |
66 | if not line or line.startswith(b'#'): |
|
66 | if not line or line.startswith(b'#'): | |
67 | continue |
|
67 | continue | |
68 |
|
68 | |||
69 | try: |
|
69 | try: | |
70 | srcauthor, dstauthor = line.split(b'=', 1) |
|
70 | srcauthor, dstauthor = line.split(b'=', 1) | |
71 | except ValueError: |
|
71 | except ValueError: | |
72 | msg = _(b'ignoring bad line in author map file %s: %s\n') |
|
72 | msg = _(b'ignoring bad line in author map file %s: %s\n') | |
73 | ui.warn(msg % (authorfile, line.rstrip())) |
|
73 | ui.warn(msg % (authorfile, line.rstrip())) | |
74 | continue |
|
74 | continue | |
75 |
|
75 | |||
76 | srcauthor = srcauthor.strip() |
|
76 | srcauthor = srcauthor.strip() | |
77 | dstauthor = dstauthor.strip() |
|
77 | dstauthor = dstauthor.strip() | |
78 | if authors.get(srcauthor) in (None, dstauthor): |
|
78 | if authors.get(srcauthor) in (None, dstauthor): | |
79 | msg = _(b'mapping author %s to %s\n') |
|
79 | msg = _(b'mapping author %s to %s\n') | |
80 | ui.debug(msg % (srcauthor, dstauthor)) |
|
80 | ui.debug(msg % (srcauthor, dstauthor)) | |
81 | authors[srcauthor] = dstauthor |
|
81 | authors[srcauthor] = dstauthor | |
82 | continue |
|
82 | continue | |
83 |
|
83 | |||
84 | m = _(b'overriding mapping for author %s, was %s, will be %s\n') |
|
84 | m = _(b'overriding mapping for author %s, was %s, will be %s\n') | |
85 | ui.status(m % (srcauthor, authors[srcauthor], dstauthor)) |
|
85 | ui.status(m % (srcauthor, authors[srcauthor], dstauthor)) | |
86 | return authors |
|
86 | return authors | |
87 |
|
87 | |||
88 |
|
88 | |||
89 | def recode(s): |
|
89 | def recode(s): | |
90 | if isinstance(s, str): |
|
90 | if isinstance(s, str): | |
91 | return s.encode(pycompat.sysstr(orig_encoding), 'replace') |
|
91 | return s.encode(pycompat.sysstr(orig_encoding), 'replace') | |
92 | else: |
|
92 | else: | |
93 | return s.decode('utf-8').encode( |
|
93 | return s.decode('utf-8').encode( | |
94 | pycompat.sysstr(orig_encoding), 'replace' |
|
94 | pycompat.sysstr(orig_encoding), 'replace' | |
95 | ) |
|
95 | ) | |
96 |
|
96 | |||
97 |
|
97 | |||
98 | def mapbranch(branch, branchmap): |
|
98 | def mapbranch(branch, branchmap): | |
99 | """ |
|
99 | """ | |
100 | >>> bmap = {b'default': b'branch1'} |
|
100 | >>> bmap = {b'default': b'branch1'} | |
101 | >>> for i in [b'', None]: |
|
101 | >>> for i in [b'', None]: | |
102 | ... mapbranch(i, bmap) |
|
102 | ... mapbranch(i, bmap) | |
103 | 'branch1' |
|
103 | 'branch1' | |
104 | 'branch1' |
|
104 | 'branch1' | |
105 | >>> bmap = {b'None': b'branch2'} |
|
105 | >>> bmap = {b'None': b'branch2'} | |
106 | >>> for i in [b'', None]: |
|
106 | >>> for i in [b'', None]: | |
107 | ... mapbranch(i, bmap) |
|
107 | ... mapbranch(i, bmap) | |
108 | 'branch2' |
|
108 | 'branch2' | |
109 | 'branch2' |
|
109 | 'branch2' | |
110 | >>> bmap = {b'None': b'branch3', b'default': b'branch4'} |
|
110 | >>> bmap = {b'None': b'branch3', b'default': b'branch4'} | |
111 | >>> for i in [b'None', b'', None, b'default', b'branch5']: |
|
111 | >>> for i in [b'None', b'', None, b'default', b'branch5']: | |
112 | ... mapbranch(i, bmap) |
|
112 | ... mapbranch(i, bmap) | |
113 | 'branch3' |
|
113 | 'branch3' | |
114 | 'branch4' |
|
114 | 'branch4' | |
115 | 'branch4' |
|
115 | 'branch4' | |
116 | 'branch4' |
|
116 | 'branch4' | |
117 | 'branch5' |
|
117 | 'branch5' | |
118 | """ |
|
118 | """ | |
119 | # If branch is None or empty, this commit is coming from the source |
|
119 | # If branch is None or empty, this commit is coming from the source | |
120 | # repository's default branch and destined for the default branch in the |
|
120 | # repository's default branch and destined for the default branch in the | |
121 | # destination repository. For such commits, using a literal "default" |
|
121 | # destination repository. For such commits, using a literal "default" | |
122 | # in branchmap below allows the user to map "default" to an alternate |
|
122 | # in branchmap below allows the user to map "default" to an alternate | |
123 | # default branch in the destination repository. |
|
123 | # default branch in the destination repository. | |
124 | branch = branchmap.get(branch or b'default', branch) |
|
124 | branch = branchmap.get(branch or b'default', branch) | |
125 | # At some point we used "None" literal to denote the default branch, |
|
125 | # At some point we used "None" literal to denote the default branch, | |
126 | # attempt to use that for backward compatibility. |
|
126 | # attempt to use that for backward compatibility. | |
127 | if not branch: |
|
127 | if not branch: | |
128 | branch = branchmap.get(b'None', branch) |
|
128 | branch = branchmap.get(b'None', branch) | |
129 | return branch |
|
129 | return branch | |
130 |
|
130 | |||
131 |
|
131 | |||
132 | source_converters = [ |
|
132 | source_converters = [ | |
133 | (b'cvs', convert_cvs, b'branchsort'), |
|
133 | (b'cvs', convert_cvs, b'branchsort'), | |
134 | (b'git', convert_git, b'branchsort'), |
|
134 | (b'git', convert_git, b'branchsort'), | |
135 | (b'svn', svn_source, b'branchsort'), |
|
135 | (b'svn', svn_source, b'branchsort'), | |
136 | (b'hg', mercurial_source, b'sourcesort'), |
|
136 | (b'hg', mercurial_source, b'sourcesort'), | |
137 | (b'darcs', darcs_source, b'branchsort'), |
|
137 | (b'darcs', darcs_source, b'branchsort'), | |
138 | (b'mtn', monotone_source, b'branchsort'), |
|
138 | (b'mtn', monotone_source, b'branchsort'), | |
139 | (b'gnuarch', gnuarch_source, b'branchsort'), |
|
139 | (b'gnuarch', gnuarch_source, b'branchsort'), | |
140 | (b'bzr', bzr_source, b'branchsort'), |
|
140 | (b'bzr', bzr_source, b'branchsort'), | |
141 | (b'p4', p4_source, b'branchsort'), |
|
141 | (b'p4', p4_source, b'branchsort'), | |
142 | ] |
|
142 | ] | |
143 |
|
143 | |||
144 | sink_converters = [ |
|
144 | sink_converters = [ | |
145 | (b'hg', mercurial_sink), |
|
145 | (b'hg', mercurial_sink), | |
146 | (b'svn', svn_sink), |
|
146 | (b'svn', svn_sink), | |
147 | ] |
|
147 | ] | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def convertsource(ui, path, type, revs): |
|
150 | def convertsource(ui, path, type, revs): | |
151 | exceptions = [] |
|
151 | exceptions = [] | |
152 | if type and type not in [s[0] for s in source_converters]: |
|
152 | if type and type not in [s[0] for s in source_converters]: | |
153 | raise error.Abort(_(b'%s: invalid source repository type') % type) |
|
153 | raise error.Abort(_(b'%s: invalid source repository type') % type) | |
154 | for name, source, sortmode in source_converters: |
|
154 | for name, source, sortmode in source_converters: | |
155 | try: |
|
155 | try: | |
156 | if not type or name == type: |
|
156 | if not type or name == type: | |
157 | return source(ui, name, path, revs), sortmode |
|
157 | return source(ui, name, path, revs), sortmode | |
158 | except (NoRepo, MissingTool) as inst: |
|
158 | except (NoRepo, MissingTool) as inst: | |
159 | exceptions.append(inst) |
|
159 | exceptions.append(inst) | |
160 | if not ui.quiet: |
|
160 | if not ui.quiet: | |
161 | for inst in exceptions: |
|
161 | for inst in exceptions: | |
162 | ui.write(b"%s\n" % pycompat.bytestr(inst.args[0])) |
|
162 | ui.write(b"%s\n" % pycompat.bytestr(inst.args[0])) | |
163 | raise error.Abort(_(b'%s: missing or unsupported repository') % path) |
|
163 | raise error.Abort(_(b'%s: missing or unsupported repository') % path) | |
164 |
|
164 | |||
165 |
|
165 | |||
166 | def convertsink(ui, path, type): |
|
166 | def convertsink(ui, path, type): | |
167 | if type and type not in [s[0] for s in sink_converters]: |
|
167 | if type and type not in [s[0] for s in sink_converters]: | |
168 | raise error.Abort(_(b'%s: invalid destination repository type') % type) |
|
168 | raise error.Abort(_(b'%s: invalid destination repository type') % type) | |
169 | for name, sink in sink_converters: |
|
169 | for name, sink in sink_converters: | |
170 | try: |
|
170 | try: | |
171 | if not type or name == type: |
|
171 | if not type or name == type: | |
172 | return sink(ui, name, path) |
|
172 | return sink(ui, name, path) | |
173 | except NoRepo as inst: |
|
173 | except NoRepo as inst: | |
174 | ui.note(_(b"convert: %s\n") % inst) |
|
174 | ui.note(_(b"convert: %s\n") % inst) | |
175 | except MissingTool as inst: |
|
175 | except MissingTool as inst: | |
176 | raise error.Abort(b'%s\n' % inst) |
|
176 | raise error.Abort(b'%s\n' % inst) | |
177 | raise error.Abort(_(b'%s: unknown repository type') % path) |
|
177 | raise error.Abort(_(b'%s: unknown repository type') % path) | |
178 |
|
178 | |||
179 |
|
179 | |||
180 | class progresssource: |
|
180 | class progresssource: | |
181 | def __init__(self, ui, source, filecount): |
|
181 | def __init__(self, ui, source, filecount): | |
182 | self.ui = ui |
|
182 | self.ui = ui | |
183 | self.source = source |
|
183 | self.source = source | |
184 | self.progress = ui.makeprogress( |
|
184 | self.progress = ui.makeprogress( | |
185 | _(b'getting files'), unit=_(b'files'), total=filecount |
|
185 | _(b'getting files'), unit=_(b'files'), total=filecount | |
186 | ) |
|
186 | ) | |
187 |
|
187 | |||
188 | def getfile(self, file, rev): |
|
188 | def getfile(self, file, rev): | |
189 | self.progress.increment(item=file) |
|
189 | self.progress.increment(item=file) | |
190 | return self.source.getfile(file, rev) |
|
190 | return self.source.getfile(file, rev) | |
191 |
|
191 | |||
192 | def targetfilebelongstosource(self, targetfilename): |
|
192 | def targetfilebelongstosource(self, targetfilename): | |
193 | return self.source.targetfilebelongstosource(targetfilename) |
|
193 | return self.source.targetfilebelongstosource(targetfilename) | |
194 |
|
194 | |||
195 | def lookuprev(self, rev): |
|
195 | def lookuprev(self, rev): | |
196 | return self.source.lookuprev(rev) |
|
196 | return self.source.lookuprev(rev) | |
197 |
|
197 | |||
198 | def close(self): |
|
198 | def close(self): | |
199 | self.progress.complete() |
|
199 | self.progress.complete() | |
200 |
|
200 | |||
201 |
|
201 | |||
202 | # Sorters are used by the `toposort` function to maintain a set of revisions |
|
202 | # Sorters are used by the `toposort` function to maintain a set of revisions | |
203 | # which can be converted immediately and pick one |
|
203 | # which can be converted immediately and pick one | |
204 | class branchsorter: |
|
204 | class branchsorter: | |
205 | """If the previously converted revision has a child in the |
|
205 | """If the previously converted revision has a child in the | |
206 | eligible revisions list, pick it. Return the list head |
|
206 | eligible revisions list, pick it. Return the list head | |
207 | otherwise. Branch sort attempts to minimize branch |
|
207 | otherwise. Branch sort attempts to minimize branch | |
208 | switching, which is harmful for Mercurial backend |
|
208 | switching, which is harmful for Mercurial backend | |
209 | compression. |
|
209 | compression. | |
210 | """ |
|
210 | """ | |
211 |
|
211 | |||
212 | def __init__(self, parents): |
|
212 | def __init__(self, parents): | |
213 | self.nodes = [] |
|
213 | self.nodes = [] | |
214 | self.parents = parents |
|
214 | self.parents = parents | |
215 | self.prev = None |
|
215 | self.prev = None | |
216 |
|
216 | |||
217 | def picknext(self): |
|
217 | def picknext(self): | |
218 | next = self.nodes[0] |
|
218 | next = self.nodes[0] | |
219 | for n in self.nodes: |
|
219 | for n in self.nodes: | |
220 | if self.prev in self.parents[n]: |
|
220 | if self.prev in self.parents[n]: | |
221 | next = n |
|
221 | next = n | |
222 | break |
|
222 | break | |
223 | self.prev = next |
|
223 | self.prev = next | |
224 | self.nodes.remove(next) |
|
224 | self.nodes.remove(next) | |
225 | return next |
|
225 | return next | |
226 |
|
226 | |||
227 | def insert(self, node): |
|
227 | def insert(self, node): | |
228 | self.nodes.insert(0, node) |
|
228 | self.nodes.insert(0, node) | |
229 |
|
229 | |||
230 | def __len__(self): |
|
230 | def __len__(self): | |
231 | return self.nodes.__len__() |
|
231 | return self.nodes.__len__() | |
232 |
|
232 | |||
233 |
|
233 | |||
234 | class keysorter: |
|
234 | class keysorter: | |
235 | """Key-based sort, ties broken by insertion order""" |
|
235 | """Key-based sort, ties broken by insertion order""" | |
236 |
|
236 | |||
237 | def __init__(self, keyfn): |
|
237 | def __init__(self, keyfn): | |
238 | self.heap = [] |
|
238 | self.heap = [] | |
239 | self.keyfn = keyfn |
|
239 | self.keyfn = keyfn | |
240 | self.counter = 0 |
|
240 | self.counter = 0 | |
241 |
|
241 | |||
242 | def picknext(self): |
|
242 | def picknext(self): | |
243 | return heapq.heappop(self.heap)[2] |
|
243 | return heapq.heappop(self.heap)[2] | |
244 |
|
244 | |||
245 | def insert(self, node): |
|
245 | def insert(self, node): | |
246 | counter = self.counter |
|
246 | counter = self.counter | |
247 | self.counter = counter + 1 |
|
247 | self.counter = counter + 1 | |
248 | key = self.keyfn(node) |
|
248 | key = self.keyfn(node) | |
249 | heapq.heappush(self.heap, (key, counter, node)) |
|
249 | heapq.heappush(self.heap, (key, counter, node)) | |
250 |
|
250 | |||
251 | def __len__(self): |
|
251 | def __len__(self): | |
252 | return self.heap.__len__() |
|
252 | return self.heap.__len__() | |
253 |
|
253 | |||
254 |
|
254 | |||
255 | class converter: |
|
255 | class converter: | |
256 | def __init__(self, ui, source, dest, revmapfile, opts): |
|
256 | def __init__(self, ui, source, dest, revmapfile, opts): | |
257 |
|
257 | |||
258 | self.source = source |
|
258 | self.source = source | |
259 | self.dest = dest |
|
259 | self.dest = dest | |
260 | self.ui = ui |
|
260 | self.ui = ui | |
261 | self.opts = opts |
|
261 | self.opts = opts | |
262 | self.commitcache = {} |
|
262 | self.commitcache = {} | |
263 | self.authors = {} |
|
263 | self.authors = {} | |
264 | self.authorfile = None |
|
264 | self.authorfile = None | |
265 |
|
265 | |||
266 | # Record converted revisions persistently: maps source revision |
|
266 | # Record converted revisions persistently: maps source revision | |
267 | # ID to target revision ID (both strings). (This is how |
|
267 | # ID to target revision ID (both strings). (This is how | |
268 | # incremental conversions work.) |
|
268 | # incremental conversions work.) | |
269 | self.map = mapfile(ui, revmapfile) |
|
269 | self.map = mapfile(ui, revmapfile) | |
270 |
|
270 | |||
271 | # Read first the dst author map if any |
|
271 | # Read first the dst author map if any | |
272 | authorfile = self.dest.authorfile() |
|
272 | authorfile = self.dest.authorfile() | |
273 | if authorfile and os.path.exists(authorfile): |
|
273 | if authorfile and os.path.exists(authorfile): | |
274 | self.readauthormap(authorfile) |
|
274 | self.readauthormap(authorfile) | |
275 | # Extend/Override with new author map if necessary |
|
275 | # Extend/Override with new author map if necessary | |
276 | if opts.get(b'authormap'): |
|
276 | if opts.get(b'authormap'): | |
277 | self.readauthormap(opts.get(b'authormap')) |
|
277 | self.readauthormap(opts.get(b'authormap')) | |
278 | self.authorfile = self.dest.authorfile() |
|
278 | self.authorfile = self.dest.authorfile() | |
279 |
|
279 | |||
280 | self.splicemap = self.parsesplicemap(opts.get(b'splicemap')) |
|
280 | self.splicemap = self.parsesplicemap(opts.get(b'splicemap')) | |
281 | self.branchmap = mapfile(ui, opts.get(b'branchmap')) |
|
281 | self.branchmap = mapfile(ui, opts.get(b'branchmap')) | |
282 |
|
282 | |||
283 | def parsesplicemap(self, path): |
|
283 | def parsesplicemap(self, path): | |
284 | """check and validate the splicemap format and |
|
284 | """check and validate the splicemap format and | |
285 | return a child/parents dictionary. |
|
285 | return a child/parents dictionary. | |
286 | Format checking has two parts. |
|
286 | Format checking has two parts. | |
287 | 1. generic format which is same across all source types |
|
287 | 1. generic format which is same across all source types | |
288 | 2. specific format checking which may be different for |
|
288 | 2. specific format checking which may be different for | |
289 | different source type. This logic is implemented in |
|
289 | different source type. This logic is implemented in | |
290 | checkrevformat function in source files like |
|
290 | checkrevformat function in source files like | |
291 | hg.py, subversion.py etc. |
|
291 | hg.py, subversion.py etc. | |
292 | """ |
|
292 | """ | |
293 |
|
293 | |||
294 | if not path: |
|
294 | if not path: | |
295 | return {} |
|
295 | return {} | |
296 | m = {} |
|
296 | m = {} | |
297 | try: |
|
297 | try: | |
298 | fp = open(path, b'rb') |
|
298 | fp = open(path, b'rb') | |
299 | for i, line in enumerate(fp): |
|
299 | for i, line in enumerate(fp): | |
300 | line = line.splitlines()[0].rstrip() |
|
300 | line = line.splitlines()[0].rstrip() | |
301 | if not line: |
|
301 | if not line: | |
302 | # Ignore blank lines |
|
302 | # Ignore blank lines | |
303 | continue |
|
303 | continue | |
304 | # split line |
|
304 | # split line | |
305 | lex = common.shlexer(data=line, whitespace=b',') |
|
305 | lex = common.shlexer(data=line, whitespace=b',') | |
306 | line = list(lex) |
|
306 | line = list(lex) | |
307 | # check number of parents |
|
307 | # check number of parents | |
308 | if not (2 <= len(line) <= 3): |
|
308 | if not (2 <= len(line) <= 3): | |
309 | raise error.Abort( |
|
309 | raise error.Abort( | |
310 | _( |
|
310 | _( | |
311 | b'syntax error in %s(%d): child parent1' |
|
311 | b'syntax error in %s(%d): child parent1' | |
312 | b'[,parent2] expected' |
|
312 | b'[,parent2] expected' | |
313 | ) |
|
313 | ) | |
314 | % (path, i + 1) |
|
314 | % (path, i + 1) | |
315 | ) |
|
315 | ) | |
316 | for part in line: |
|
316 | for part in line: | |
317 | self.source.checkrevformat(part) |
|
317 | self.source.checkrevformat(part) | |
318 | child, p1, p2 = line[0], line[1:2], line[2:] |
|
318 | child, p1, p2 = line[0], line[1:2], line[2:] | |
319 | if p1 == p2: |
|
319 | if p1 == p2: | |
320 | m[child] = p1 |
|
320 | m[child] = p1 | |
321 | else: |
|
321 | else: | |
322 | m[child] = p1 + p2 |
|
322 | m[child] = p1 + p2 | |
323 | # if file does not exist or error reading, exit |
|
323 | # if file does not exist or error reading, exit | |
324 | except IOError: |
|
324 | except IOError: | |
325 | raise error.Abort( |
|
325 | raise error.Abort( | |
326 | _(b'splicemap file not found or error reading %s:') % path |
|
326 | _(b'splicemap file not found or error reading %s:') % path | |
327 | ) |
|
327 | ) | |
328 | return m |
|
328 | return m | |
329 |
|
329 | |||
330 | def walktree(self, heads): |
|
330 | def walktree(self, heads): | |
331 | """Return a mapping that identifies the uncommitted parents of every |
|
331 | """Return a mapping that identifies the uncommitted parents of every | |
332 | uncommitted changeset.""" |
|
332 | uncommitted changeset.""" | |
333 | visit = list(heads) |
|
333 | visit = list(heads) | |
334 | known = set() |
|
334 | known = set() | |
335 | parents = {} |
|
335 | parents = {} | |
336 | numcommits = self.source.numcommits() |
|
336 | numcommits = self.source.numcommits() | |
337 | progress = self.ui.makeprogress( |
|
337 | progress = self.ui.makeprogress( | |
338 | _(b'scanning'), unit=_(b'revisions'), total=numcommits |
|
338 | _(b'scanning'), unit=_(b'revisions'), total=numcommits | |
339 | ) |
|
339 | ) | |
340 | while visit: |
|
340 | while visit: | |
341 | n = visit.pop(0) |
|
341 | n = visit.pop(0) | |
342 | if n in known: |
|
342 | if n in known: | |
343 | continue |
|
343 | continue | |
344 | if n in self.map: |
|
344 | if n in self.map: | |
345 | m = self.map[n] |
|
345 | m = self.map[n] | |
346 | if m == SKIPREV or self.dest.hascommitfrommap(m): |
|
346 | if m == SKIPREV or self.dest.hascommitfrommap(m): | |
347 | continue |
|
347 | continue | |
348 | known.add(n) |
|
348 | known.add(n) | |
349 | progress.update(len(known)) |
|
349 | progress.update(len(known)) | |
350 | commit = self.cachecommit(n) |
|
350 | commit = self.cachecommit(n) | |
351 | parents[n] = [] |
|
351 | parents[n] = [] | |
352 | for p in commit.parents: |
|
352 | for p in commit.parents: | |
353 | parents[n].append(p) |
|
353 | parents[n].append(p) | |
354 | visit.append(p) |
|
354 | visit.append(p) | |
355 | progress.complete() |
|
355 | progress.complete() | |
356 |
|
356 | |||
357 | return parents |
|
357 | return parents | |
358 |
|
358 | |||
359 | def mergesplicemap(self, parents, splicemap): |
|
359 | def mergesplicemap(self, parents, splicemap): | |
360 | """A splicemap redefines child/parent relationships. Check the |
|
360 | """A splicemap redefines child/parent relationships. Check the | |
361 | map contains valid revision identifiers and merge the new |
|
361 | map contains valid revision identifiers and merge the new | |
362 | links in the source graph. |
|
362 | links in the source graph. | |
363 | """ |
|
363 | """ | |
364 | for c in sorted(splicemap): |
|
364 | for c in sorted(splicemap): | |
365 | if c not in parents: |
|
365 | if c not in parents: | |
366 | if not self.dest.hascommitforsplicemap(self.map.get(c, c)): |
|
366 | if not self.dest.hascommitforsplicemap(self.map.get(c, c)): | |
367 | # Could be in source but not converted during this run |
|
367 | # Could be in source but not converted during this run | |
368 | self.ui.warn( |
|
368 | self.ui.warn( | |
369 | _( |
|
369 | _( | |
370 | b'splice map revision %s is not being ' |
|
370 | b'splice map revision %s is not being ' | |
371 | b'converted, ignoring\n' |
|
371 | b'converted, ignoring\n' | |
372 | ) |
|
372 | ) | |
373 | % c |
|
373 | % c | |
374 | ) |
|
374 | ) | |
375 | continue |
|
375 | continue | |
376 | pc = [] |
|
376 | pc = [] | |
377 | for p in splicemap[c]: |
|
377 | for p in splicemap[c]: | |
378 | # We do not have to wait for nodes already in dest. |
|
378 | # We do not have to wait for nodes already in dest. | |
379 | if self.dest.hascommitforsplicemap(self.map.get(p, p)): |
|
379 | if self.dest.hascommitforsplicemap(self.map.get(p, p)): | |
380 | continue |
|
380 | continue | |
381 | # Parent is not in dest and not being converted, not good |
|
381 | # Parent is not in dest and not being converted, not good | |
382 | if p not in parents: |
|
382 | if p not in parents: | |
383 | raise error.Abort(_(b'unknown splice map parent: %s') % p) |
|
383 | raise error.Abort(_(b'unknown splice map parent: %s') % p) | |
384 | pc.append(p) |
|
384 | pc.append(p) | |
385 | parents[c] = pc |
|
385 | parents[c] = pc | |
386 |
|
386 | |||
387 | def toposort(self, parents, sortmode): |
|
387 | def toposort(self, parents, sortmode): | |
388 | """Return an ordering such that every uncommitted changeset is |
|
388 | """Return an ordering such that every uncommitted changeset is | |
389 | preceded by all its uncommitted ancestors.""" |
|
389 | preceded by all its uncommitted ancestors.""" | |
390 |
|
390 | |||
391 | def mapchildren(parents): |
|
391 | def mapchildren(parents): | |
392 | """Return a (children, roots) tuple where 'children' maps parent |
|
392 | """Return a (children, roots) tuple where 'children' maps parent | |
393 | revision identifiers to children ones, and 'roots' is the list of |
|
393 | revision identifiers to children ones, and 'roots' is the list of | |
394 | revisions without parents. 'parents' must be a mapping of revision |
|
394 | revisions without parents. 'parents' must be a mapping of revision | |
395 | identifier to its parents ones. |
|
395 | identifier to its parents ones. | |
396 | """ |
|
396 | """ | |
397 | visit = collections.deque(sorted(parents)) |
|
397 | visit = collections.deque(sorted(parents)) | |
398 | seen = set() |
|
398 | seen = set() | |
399 | children = {} |
|
399 | children = {} | |
400 | roots = [] |
|
400 | roots = [] | |
401 |
|
401 | |||
402 | while visit: |
|
402 | while visit: | |
403 | n = visit.popleft() |
|
403 | n = visit.popleft() | |
404 | if n in seen: |
|
404 | if n in seen: | |
405 | continue |
|
405 | continue | |
406 | seen.add(n) |
|
406 | seen.add(n) | |
407 | # Ensure that nodes without parents are present in the |
|
407 | # Ensure that nodes without parents are present in the | |
408 | # 'children' mapping. |
|
408 | # 'children' mapping. | |
409 | children.setdefault(n, []) |
|
409 | children.setdefault(n, []) | |
410 | hasparent = False |
|
410 | hasparent = False | |
411 | for p in parents[n]: |
|
411 | for p in parents[n]: | |
412 | if p not in self.map: |
|
412 | if p not in self.map: | |
413 | visit.append(p) |
|
413 | visit.append(p) | |
414 | hasparent = True |
|
414 | hasparent = True | |
415 | children.setdefault(p, []).append(n) |
|
415 | children.setdefault(p, []).append(n) | |
416 | if not hasparent: |
|
416 | if not hasparent: | |
417 | roots.append(n) |
|
417 | roots.append(n) | |
418 |
|
418 | |||
419 | return children, roots |
|
419 | return children, roots | |
420 |
|
420 | |||
421 | def makesourcesorter(): |
|
421 | def makesourcesorter(): | |
422 | """Source specific sort.""" |
|
422 | """Source specific sort.""" | |
423 | keyfn = lambda n: self.commitcache[n].sortkey |
|
423 | keyfn = lambda n: self.commitcache[n].sortkey | |
424 | return keysorter(keyfn) |
|
424 | return keysorter(keyfn) | |
425 |
|
425 | |||
426 | def makeclosesorter(): |
|
426 | def makeclosesorter(): | |
427 | """Close order sort.""" |
|
427 | """Close order sort.""" | |
428 | keyfn = lambda n: ( |
|
428 | keyfn = lambda n: ( | |
429 | b'close' not in self.commitcache[n].extra, |
|
429 | b'close' not in self.commitcache[n].extra, | |
430 | self.commitcache[n].sortkey, |
|
430 | self.commitcache[n].sortkey, | |
431 | ) |
|
431 | ) | |
432 | return keysorter(keyfn) |
|
432 | return keysorter(keyfn) | |
433 |
|
433 | |||
434 | def makedatesorter(): |
|
434 | def makedatesorter(): | |
435 | """Sort revisions by date.""" |
|
435 | """Sort revisions by date.""" | |
436 |
|
436 | |||
437 | def getdate(n): |
|
437 | def getdate(n): | |
438 |
|
|
438 | commit = self.commitcache[n] | |
|
439 | # The other entries are here as tie breaker for stability | |||
|
440 | return ( | |||
|
441 | dateutil.parsedate(commit.date), | |||
|
442 | commit.rev, | |||
|
443 | commit.branch, | |||
|
444 | ) | |||
439 |
|
445 | |||
440 | return keysorter(getdate) |
|
446 | return keysorter(getdate) | |
441 |
|
447 | |||
442 | if sortmode == b'branchsort': |
|
448 | if sortmode == b'branchsort': | |
443 | sorter = branchsorter(parents) |
|
449 | sorter = branchsorter(parents) | |
444 | elif sortmode == b'datesort': |
|
450 | elif sortmode == b'datesort': | |
445 | sorter = makedatesorter() |
|
451 | sorter = makedatesorter() | |
446 | elif sortmode == b'sourcesort': |
|
452 | elif sortmode == b'sourcesort': | |
447 | sorter = makesourcesorter() |
|
453 | sorter = makesourcesorter() | |
448 | elif sortmode == b'closesort': |
|
454 | elif sortmode == b'closesort': | |
449 | sorter = makeclosesorter() |
|
455 | sorter = makeclosesorter() | |
450 | else: |
|
456 | else: | |
451 | raise error.Abort(_(b'unknown sort mode: %s') % sortmode) |
|
457 | raise error.Abort(_(b'unknown sort mode: %s') % sortmode) | |
452 |
|
458 | |||
453 | children, roots = mapchildren(parents) |
|
459 | children, roots = mapchildren(parents) | |
454 |
|
460 | |||
455 | for node in roots: |
|
461 | for node in roots: | |
456 | sorter.insert(node) |
|
462 | sorter.insert(node) | |
457 |
|
463 | |||
458 | s = [] |
|
464 | s = [] | |
459 | pendings = {} |
|
465 | pendings = {} | |
460 | while sorter: |
|
466 | while sorter: | |
461 | n = sorter.picknext() |
|
467 | n = sorter.picknext() | |
462 | s.append(n) |
|
468 | s.append(n) | |
463 |
|
469 | |||
464 | # Update dependents list |
|
470 | # Update dependents list | |
465 | for c in children.get(n, []): |
|
471 | for c in children.get(n, []): | |
466 | if c not in pendings: |
|
472 | if c not in pendings: | |
467 | pendings[c] = [p for p in parents[c] if p not in self.map] |
|
473 | pendings[c] = [p for p in parents[c] if p not in self.map] | |
468 | try: |
|
474 | try: | |
469 | pendings[c].remove(n) |
|
475 | pendings[c].remove(n) | |
470 | except ValueError: |
|
476 | except ValueError: | |
471 | raise error.Abort( |
|
477 | raise error.Abort( | |
472 | _(b'cycle detected between %s and %s') |
|
478 | _(b'cycle detected between %s and %s') | |
473 | % (recode(c), recode(n)) |
|
479 | % (recode(c), recode(n)) | |
474 | ) |
|
480 | ) | |
475 | if not pendings[c]: |
|
481 | if not pendings[c]: | |
476 | # Parents are converted, node is eligible |
|
482 | # Parents are converted, node is eligible | |
477 | sorter.insert(c) |
|
483 | sorter.insert(c) | |
478 | pendings[c] = None |
|
484 | pendings[c] = None | |
479 |
|
485 | |||
480 | if len(s) != len(parents): |
|
486 | if len(s) != len(parents): | |
481 | raise error.Abort(_(b"not all revisions were sorted")) |
|
487 | raise error.Abort(_(b"not all revisions were sorted")) | |
482 |
|
488 | |||
483 | return s |
|
489 | return s | |
484 |
|
490 | |||
485 | def writeauthormap(self): |
|
491 | def writeauthormap(self): | |
486 | authorfile = self.authorfile |
|
492 | authorfile = self.authorfile | |
487 | if authorfile: |
|
493 | if authorfile: | |
488 | self.ui.status(_(b'writing author map file %s\n') % authorfile) |
|
494 | self.ui.status(_(b'writing author map file %s\n') % authorfile) | |
489 | ofile = open(authorfile, b'wb+') |
|
495 | ofile = open(authorfile, b'wb+') | |
490 | for author in self.authors: |
|
496 | for author in self.authors: | |
491 | ofile.write( |
|
497 | ofile.write( | |
492 | util.tonativeeol( |
|
498 | util.tonativeeol( | |
493 | b"%s=%s\n" % (author, self.authors[author]) |
|
499 | b"%s=%s\n" % (author, self.authors[author]) | |
494 | ) |
|
500 | ) | |
495 | ) |
|
501 | ) | |
496 | ofile.close() |
|
502 | ofile.close() | |
497 |
|
503 | |||
498 | def readauthormap(self, authorfile): |
|
504 | def readauthormap(self, authorfile): | |
499 | self.authors = readauthormap(self.ui, authorfile, self.authors) |
|
505 | self.authors = readauthormap(self.ui, authorfile, self.authors) | |
500 |
|
506 | |||
501 | def cachecommit(self, rev): |
|
507 | def cachecommit(self, rev): | |
502 | commit = self.source.getcommit(rev) |
|
508 | commit = self.source.getcommit(rev) | |
503 | commit.author = self.authors.get(commit.author, commit.author) |
|
509 | commit.author = self.authors.get(commit.author, commit.author) | |
504 | commit.branch = mapbranch(commit.branch, self.branchmap) |
|
510 | commit.branch = mapbranch(commit.branch, self.branchmap) | |
505 | self.commitcache[rev] = commit |
|
511 | self.commitcache[rev] = commit | |
506 | return commit |
|
512 | return commit | |
507 |
|
513 | |||
508 | def copy(self, rev): |
|
514 | def copy(self, rev): | |
509 | commit = self.commitcache[rev] |
|
515 | commit = self.commitcache[rev] | |
510 | full = self.opts.get(b'full') |
|
516 | full = self.opts.get(b'full') | |
511 | changes = self.source.getchanges(rev, full) |
|
517 | changes = self.source.getchanges(rev, full) | |
512 | if isinstance(changes, bytes): |
|
518 | if isinstance(changes, bytes): | |
513 | if changes == SKIPREV: |
|
519 | if changes == SKIPREV: | |
514 | dest = SKIPREV |
|
520 | dest = SKIPREV | |
515 | else: |
|
521 | else: | |
516 | dest = self.map[changes] |
|
522 | dest = self.map[changes] | |
517 | self.map[rev] = dest |
|
523 | self.map[rev] = dest | |
518 | return |
|
524 | return | |
519 | files, copies, cleanp2 = changes |
|
525 | files, copies, cleanp2 = changes | |
520 | pbranches = [] |
|
526 | pbranches = [] | |
521 | if commit.parents: |
|
527 | if commit.parents: | |
522 | for prev in commit.parents: |
|
528 | for prev in commit.parents: | |
523 | if prev not in self.commitcache: |
|
529 | if prev not in self.commitcache: | |
524 | self.cachecommit(prev) |
|
530 | self.cachecommit(prev) | |
525 | pbranches.append( |
|
531 | pbranches.append( | |
526 | (self.map[prev], self.commitcache[prev].branch) |
|
532 | (self.map[prev], self.commitcache[prev].branch) | |
527 | ) |
|
533 | ) | |
528 | self.dest.setbranch(commit.branch, pbranches) |
|
534 | self.dest.setbranch(commit.branch, pbranches) | |
529 | try: |
|
535 | try: | |
530 | parents = self.splicemap[rev] |
|
536 | parents = self.splicemap[rev] | |
531 | self.ui.status( |
|
537 | self.ui.status( | |
532 | _(b'spliced in %s as parents of %s\n') |
|
538 | _(b'spliced in %s as parents of %s\n') | |
533 | % (_(b' and ').join(parents), rev) |
|
539 | % (_(b' and ').join(parents), rev) | |
534 | ) |
|
540 | ) | |
535 | parents = [self.map.get(p, p) for p in parents] |
|
541 | parents = [self.map.get(p, p) for p in parents] | |
536 | except KeyError: |
|
542 | except KeyError: | |
537 | parents = [b[0] for b in pbranches] |
|
543 | parents = [b[0] for b in pbranches] | |
538 | parents.extend( |
|
544 | parents.extend( | |
539 | self.map[x] for x in commit.optparents if x in self.map |
|
545 | self.map[x] for x in commit.optparents if x in self.map | |
540 | ) |
|
546 | ) | |
541 | if len(pbranches) != 2: |
|
547 | if len(pbranches) != 2: | |
542 | cleanp2 = set() |
|
548 | cleanp2 = set() | |
543 | if len(parents) < 3: |
|
549 | if len(parents) < 3: | |
544 | source = progresssource(self.ui, self.source, len(files)) |
|
550 | source = progresssource(self.ui, self.source, len(files)) | |
545 | else: |
|
551 | else: | |
546 | # For an octopus merge, we end up traversing the list of |
|
552 | # For an octopus merge, we end up traversing the list of | |
547 | # changed files N-1 times. This tweak to the number of |
|
553 | # changed files N-1 times. This tweak to the number of | |
548 | # files makes it so the progress bar doesn't overflow |
|
554 | # files makes it so the progress bar doesn't overflow | |
549 | # itself. |
|
555 | # itself. | |
550 | source = progresssource( |
|
556 | source = progresssource( | |
551 | self.ui, self.source, len(files) * (len(parents) - 1) |
|
557 | self.ui, self.source, len(files) * (len(parents) - 1) | |
552 | ) |
|
558 | ) | |
553 | newnode = self.dest.putcommit( |
|
559 | newnode = self.dest.putcommit( | |
554 | files, copies, parents, commit, source, self.map, full, cleanp2 |
|
560 | files, copies, parents, commit, source, self.map, full, cleanp2 | |
555 | ) |
|
561 | ) | |
556 | source.close() |
|
562 | source.close() | |
557 | self.source.converted(rev, newnode) |
|
563 | self.source.converted(rev, newnode) | |
558 | self.map[rev] = newnode |
|
564 | self.map[rev] = newnode | |
559 |
|
565 | |||
560 | def convert(self, sortmode): |
|
566 | def convert(self, sortmode): | |
561 | try: |
|
567 | try: | |
562 | self.source.before() |
|
568 | self.source.before() | |
563 | self.dest.before() |
|
569 | self.dest.before() | |
564 | self.source.setrevmap(self.map) |
|
570 | self.source.setrevmap(self.map) | |
565 | self.ui.status(_(b"scanning source...\n")) |
|
571 | self.ui.status(_(b"scanning source...\n")) | |
566 | heads = self.source.getheads() |
|
572 | heads = self.source.getheads() | |
567 | parents = self.walktree(heads) |
|
573 | parents = self.walktree(heads) | |
568 | self.mergesplicemap(parents, self.splicemap) |
|
574 | self.mergesplicemap(parents, self.splicemap) | |
569 | self.ui.status(_(b"sorting...\n")) |
|
575 | self.ui.status(_(b"sorting...\n")) | |
570 | t = self.toposort(parents, sortmode) |
|
576 | t = self.toposort(parents, sortmode) | |
571 | num = len(t) |
|
577 | num = len(t) | |
572 | c = None |
|
578 | c = None | |
573 |
|
579 | |||
574 | self.ui.status(_(b"converting...\n")) |
|
580 | self.ui.status(_(b"converting...\n")) | |
575 | progress = self.ui.makeprogress( |
|
581 | progress = self.ui.makeprogress( | |
576 | _(b'converting'), unit=_(b'revisions'), total=len(t) |
|
582 | _(b'converting'), unit=_(b'revisions'), total=len(t) | |
577 | ) |
|
583 | ) | |
578 | for i, c in enumerate(t): |
|
584 | for i, c in enumerate(t): | |
579 | num -= 1 |
|
585 | num -= 1 | |
580 | desc = self.commitcache[c].desc |
|
586 | desc = self.commitcache[c].desc | |
581 | if b"\n" in desc: |
|
587 | if b"\n" in desc: | |
582 | desc = desc.splitlines()[0] |
|
588 | desc = desc.splitlines()[0] | |
583 | # convert log message to local encoding without using |
|
589 | # convert log message to local encoding without using | |
584 | # tolocal() because the encoding.encoding convert() |
|
590 | # tolocal() because the encoding.encoding convert() | |
585 | # uses is 'utf-8' |
|
591 | # uses is 'utf-8' | |
586 | self.ui.status(b"%d %s\n" % (num, recode(desc))) |
|
592 | self.ui.status(b"%d %s\n" % (num, recode(desc))) | |
587 | self.ui.note(_(b"source: %s\n") % recode(c)) |
|
593 | self.ui.note(_(b"source: %s\n") % recode(c)) | |
588 | progress.update(i) |
|
594 | progress.update(i) | |
589 | self.copy(c) |
|
595 | self.copy(c) | |
590 | progress.complete() |
|
596 | progress.complete() | |
591 |
|
597 | |||
592 | if not self.ui.configbool(b'convert', b'skiptags'): |
|
598 | if not self.ui.configbool(b'convert', b'skiptags'): | |
593 | tags = self.source.gettags() |
|
599 | tags = self.source.gettags() | |
594 | ctags = {} |
|
600 | ctags = {} | |
595 | for k in tags: |
|
601 | for k in tags: | |
596 | v = tags[k] |
|
602 | v = tags[k] | |
597 | if self.map.get(v, SKIPREV) != SKIPREV: |
|
603 | if self.map.get(v, SKIPREV) != SKIPREV: | |
598 | ctags[k] = self.map[v] |
|
604 | ctags[k] = self.map[v] | |
599 |
|
605 | |||
600 | if c and ctags: |
|
606 | if c and ctags: | |
601 | nrev, tagsparent = self.dest.puttags(ctags) |
|
607 | nrev, tagsparent = self.dest.puttags(ctags) | |
602 | if nrev and tagsparent: |
|
608 | if nrev and tagsparent: | |
603 | # write another hash correspondence to override the |
|
609 | # write another hash correspondence to override the | |
604 | # previous one so we don't end up with extra tag heads |
|
610 | # previous one so we don't end up with extra tag heads | |
605 | tagsparents = [ |
|
611 | tagsparents = [ | |
606 | e for e in self.map.items() if e[1] == tagsparent |
|
612 | e for e in self.map.items() if e[1] == tagsparent | |
607 | ] |
|
613 | ] | |
608 | if tagsparents: |
|
614 | if tagsparents: | |
609 | self.map[tagsparents[0][0]] = nrev |
|
615 | self.map[tagsparents[0][0]] = nrev | |
610 |
|
616 | |||
611 | bookmarks = self.source.getbookmarks() |
|
617 | bookmarks = self.source.getbookmarks() | |
612 | cbookmarks = {} |
|
618 | cbookmarks = {} | |
613 | for k in bookmarks: |
|
619 | for k in bookmarks: | |
614 | v = bookmarks[k] |
|
620 | v = bookmarks[k] | |
615 | if self.map.get(v, SKIPREV) != SKIPREV: |
|
621 | if self.map.get(v, SKIPREV) != SKIPREV: | |
616 | cbookmarks[k] = self.map[v] |
|
622 | cbookmarks[k] = self.map[v] | |
617 |
|
623 | |||
618 | if c and cbookmarks: |
|
624 | if c and cbookmarks: | |
619 | self.dest.putbookmarks(cbookmarks) |
|
625 | self.dest.putbookmarks(cbookmarks) | |
620 |
|
626 | |||
621 | self.writeauthormap() |
|
627 | self.writeauthormap() | |
622 | finally: |
|
628 | finally: | |
623 | self.cleanup() |
|
629 | self.cleanup() | |
624 |
|
630 | |||
625 | def cleanup(self): |
|
631 | def cleanup(self): | |
626 | try: |
|
632 | try: | |
627 | self.dest.after() |
|
633 | self.dest.after() | |
628 | finally: |
|
634 | finally: | |
629 | self.source.after() |
|
635 | self.source.after() | |
630 | self.map.close() |
|
636 | self.map.close() | |
631 |
|
637 | |||
632 |
|
638 | |||
633 | def convert(ui, src, dest=None, revmapfile=None, **opts): |
|
639 | def convert(ui, src, dest=None, revmapfile=None, **opts): | |
634 | opts = pycompat.byteskwargs(opts) |
|
640 | opts = pycompat.byteskwargs(opts) | |
635 | global orig_encoding |
|
641 | global orig_encoding | |
636 | orig_encoding = encoding.encoding |
|
642 | orig_encoding = encoding.encoding | |
637 | encoding.encoding = b'UTF-8' |
|
643 | encoding.encoding = b'UTF-8' | |
638 |
|
644 | |||
639 | # support --authors as an alias for --authormap |
|
645 | # support --authors as an alias for --authormap | |
640 | if not opts.get(b'authormap'): |
|
646 | if not opts.get(b'authormap'): | |
641 | opts[b'authormap'] = opts.get(b'authors') |
|
647 | opts[b'authormap'] = opts.get(b'authors') | |
642 |
|
648 | |||
643 | if not dest: |
|
649 | if not dest: | |
644 | dest = hg.defaultdest(src) + b"-hg" |
|
650 | dest = hg.defaultdest(src) + b"-hg" | |
645 | ui.status(_(b"assuming destination %s\n") % dest) |
|
651 | ui.status(_(b"assuming destination %s\n") % dest) | |
646 |
|
652 | |||
647 | destc = convertsink(ui, dest, opts.get(b'dest_type')) |
|
653 | destc = convertsink(ui, dest, opts.get(b'dest_type')) | |
648 | destc = scmutil.wrapconvertsink(destc) |
|
654 | destc = scmutil.wrapconvertsink(destc) | |
649 |
|
655 | |||
650 | try: |
|
656 | try: | |
651 | srcc, defaultsort = convertsource( |
|
657 | srcc, defaultsort = convertsource( | |
652 | ui, src, opts.get(b'source_type'), opts.get(b'rev') |
|
658 | ui, src, opts.get(b'source_type'), opts.get(b'rev') | |
653 | ) |
|
659 | ) | |
654 | except Exception: |
|
660 | except Exception: | |
655 | for path in destc.created: |
|
661 | for path in destc.created: | |
656 | shutil.rmtree(path, True) |
|
662 | shutil.rmtree(path, True) | |
657 | raise |
|
663 | raise | |
658 |
|
664 | |||
659 | sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort') |
|
665 | sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort') | |
660 | sortmode = [m for m in sortmodes if opts.get(m)] |
|
666 | sortmode = [m for m in sortmodes if opts.get(m)] | |
661 | if len(sortmode) > 1: |
|
667 | if len(sortmode) > 1: | |
662 | raise error.Abort(_(b'more than one sort mode specified')) |
|
668 | raise error.Abort(_(b'more than one sort mode specified')) | |
663 | if sortmode: |
|
669 | if sortmode: | |
664 | sortmode = sortmode[0] |
|
670 | sortmode = sortmode[0] | |
665 | else: |
|
671 | else: | |
666 | sortmode = defaultsort |
|
672 | sortmode = defaultsort | |
667 |
|
673 | |||
668 | if sortmode == b'sourcesort' and not srcc.hasnativeorder(): |
|
674 | if sortmode == b'sourcesort' and not srcc.hasnativeorder(): | |
669 | raise error.Abort( |
|
675 | raise error.Abort( | |
670 | _(b'--sourcesort is not supported by this data source') |
|
676 | _(b'--sourcesort is not supported by this data source') | |
671 | ) |
|
677 | ) | |
672 | if sortmode == b'closesort' and not srcc.hasnativeclose(): |
|
678 | if sortmode == b'closesort' and not srcc.hasnativeclose(): | |
673 | raise error.Abort( |
|
679 | raise error.Abort( | |
674 | _(b'--closesort is not supported by this data source') |
|
680 | _(b'--closesort is not supported by this data source') | |
675 | ) |
|
681 | ) | |
676 |
|
682 | |||
677 | fmap = opts.get(b'filemap') |
|
683 | fmap = opts.get(b'filemap') | |
678 | if fmap: |
|
684 | if fmap: | |
679 | srcc = filemap.filemap_source(ui, srcc, fmap) |
|
685 | srcc = filemap.filemap_source(ui, srcc, fmap) | |
680 | destc.setfilemapmode(True) |
|
686 | destc.setfilemapmode(True) | |
681 |
|
687 | |||
682 | if not revmapfile: |
|
688 | if not revmapfile: | |
683 | revmapfile = destc.revmapfile() |
|
689 | revmapfile = destc.revmapfile() | |
684 |
|
690 | |||
685 | c = converter(ui, srcc, destc, revmapfile, opts) |
|
691 | c = converter(ui, srcc, destc, revmapfile, opts) | |
686 | c.convert(sortmode) |
|
692 | c.convert(sortmode) |
General Comments 0
You need to be logged in to leave comments.
Login now