|
@@
-1,331
+1,342
b''
|
|
1
|
# convcmd - convert extension commands definition
|
|
1
|
# convcmd - convert extension commands definition
|
|
2
|
#
|
|
2
|
#
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
3
|
# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
|
|
4
|
#
|
|
4
|
#
|
|
5
|
# This software may be used and distributed according to the terms
|
|
5
|
# This software may be used and distributed according to the terms
|
|
6
|
# of the GNU General Public License, incorporated herein by reference.
|
|
6
|
# of the GNU General Public License, incorporated herein by reference.
|
|
7
|
|
|
7
|
|
|
8
|
from common import NoRepo, SKIPREV, converter_source, converter_sink
|
|
8
|
from common import NoRepo, SKIPREV, converter_source, converter_sink
|
|
9
|
from cvs import convert_cvs
|
|
9
|
from cvs import convert_cvs
|
|
10
|
from darcs import darcs_source
|
|
10
|
from darcs import darcs_source
|
|
11
|
from git import convert_git
|
|
11
|
from git import convert_git
|
|
12
|
from hg import mercurial_source, mercurial_sink
|
|
12
|
from hg import mercurial_source, mercurial_sink
|
|
13
|
from subversion import svn_source, debugsvnlog
|
|
13
|
from subversion import svn_source, debugsvnlog
|
|
14
|
import filemap
|
|
14
|
import filemap
|
|
15
|
|
|
15
|
|
|
16
|
import os, shutil
|
|
16
|
import os, shutil
|
|
17
|
from mercurial import hg, util
|
|
17
|
from mercurial import hg, util
|
|
18
|
from mercurial.i18n import _
|
|
18
|
from mercurial.i18n import _
|
|
19
|
|
|
19
|
|
|
20
|
source_converters = [
|
|
20
|
source_converters = [
|
|
21
|
('cvs', convert_cvs),
|
|
21
|
('cvs', convert_cvs),
|
|
22
|
('git', convert_git),
|
|
22
|
('git', convert_git),
|
|
23
|
('svn', svn_source),
|
|
23
|
('svn', svn_source),
|
|
24
|
('hg', mercurial_source),
|
|
24
|
('hg', mercurial_source),
|
|
25
|
('darcs', darcs_source),
|
|
25
|
('darcs', darcs_source),
|
|
26
|
]
|
|
26
|
]
|
|
27
|
|
|
27
|
|
|
28
|
sink_converters = [
|
|
28
|
sink_converters = [
|
|
29
|
('hg', mercurial_sink),
|
|
29
|
('hg', mercurial_sink),
|
|
30
|
]
|
|
30
|
]
|
|
31
|
|
|
31
|
|
|
32
|
def convertsource(ui, path, type, rev):
|
|
32
|
def convertsource(ui, path, type, rev):
|
|
33
|
exceptions = []
|
|
33
|
exceptions = []
|
|
34
|
for name, source in source_converters:
|
|
34
|
for name, source in source_converters:
|
|
35
|
try:
|
|
35
|
try:
|
|
36
|
if not type or name == type:
|
|
36
|
if not type or name == type:
|
|
37
|
return source(ui, path, rev)
|
|
37
|
return source(ui, path, rev)
|
|
38
|
except NoRepo, inst:
|
|
38
|
except NoRepo, inst:
|
|
39
|
exceptions.append(inst)
|
|
39
|
exceptions.append(inst)
|
|
40
|
if not ui.quiet:
|
|
40
|
if not ui.quiet:
|
|
41
|
for inst in exceptions:
|
|
41
|
for inst in exceptions:
|
|
42
|
ui.write(_("%s\n") % inst)
|
|
42
|
ui.write(_("%s\n") % inst)
|
|
43
|
raise util.Abort('%s: unknown repository type' % path)
|
|
43
|
raise util.Abort('%s: unknown repository type' % path)
|
|
44
|
|
|
44
|
|
|
45
|
def convertsink(ui, path, type):
|
|
45
|
def convertsink(ui, path, type):
|
|
46
|
for name, sink in sink_converters:
|
|
46
|
for name, sink in sink_converters:
|
|
47
|
try:
|
|
47
|
try:
|
|
48
|
if not type or name == type:
|
|
48
|
if not type or name == type:
|
|
49
|
return sink(ui, path)
|
|
49
|
return sink(ui, path)
|
|
50
|
except NoRepo, inst:
|
|
50
|
except NoRepo, inst:
|
|
51
|
ui.note(_("convert: %s\n") % inst)
|
|
51
|
ui.note(_("convert: %s\n") % inst)
|
|
52
|
raise util.Abort('%s: unknown repository type' % path)
|
|
52
|
raise util.Abort('%s: unknown repository type' % path)
|
|
53
|
|
|
53
|
|
|
54
|
class converter(object):
|
|
54
|
class converter(object):
|
|
55
|
def __init__(self, ui, source, dest, revmapfile, opts):
|
|
55
|
def __init__(self, ui, source, dest, revmapfile, opts):
|
|
56
|
|
|
56
|
|
|
57
|
self.source = source
|
|
57
|
self.source = source
|
|
58
|
self.dest = dest
|
|
58
|
self.dest = dest
|
|
59
|
self.ui = ui
|
|
59
|
self.ui = ui
|
|
60
|
self.opts = opts
|
|
60
|
self.opts = opts
|
|
61
|
self.commitcache = {}
|
|
61
|
self.commitcache = {}
|
|
62
|
self.revmapfile = revmapfile
|
|
62
|
self.revmapfile = revmapfile
|
|
63
|
self.revmapfilefd = None
|
|
63
|
self.revmapfilefd = None
|
|
64
|
self.authors = {}
|
|
64
|
self.authors = {}
|
|
65
|
self.authorfile = None
|
|
65
|
self.authorfile = None
|
|
66
|
|
|
66
|
|
|
67
|
self.maporder = []
|
|
67
|
self.maporder = []
|
|
68
|
self.map = {}
|
|
68
|
self.map = {}
|
|
69
|
try:
|
|
69
|
try:
|
|
70
|
origrevmapfile = open(self.revmapfile, 'r')
|
|
70
|
origrevmapfile = open(self.revmapfile, 'r')
|
|
71
|
for l in origrevmapfile:
|
|
71
|
for l in origrevmapfile:
|
|
72
|
sv, dv = l[:-1].split()
|
|
72
|
sv, dv = l[:-1].split()
|
|
73
|
if sv not in self.map:
|
|
73
|
if sv not in self.map:
|
|
74
|
self.maporder.append(sv)
|
|
74
|
self.maporder.append(sv)
|
|
75
|
self.map[sv] = dv
|
|
75
|
self.map[sv] = dv
|
|
76
|
origrevmapfile.close()
|
|
76
|
origrevmapfile.close()
|
|
77
|
except IOError:
|
|
77
|
except IOError:
|
|
78
|
pass
|
|
78
|
pass
|
|
79
|
|
|
79
|
|
|
80
|
# Read first the dst author map if any
|
|
80
|
# Read first the dst author map if any
|
|
81
|
authorfile = self.dest.authorfile()
|
|
81
|
authorfile = self.dest.authorfile()
|
|
82
|
if authorfile and os.path.exists(authorfile):
|
|
82
|
if authorfile and os.path.exists(authorfile):
|
|
83
|
self.readauthormap(authorfile)
|
|
83
|
self.readauthormap(authorfile)
|
|
84
|
# Extend/Override with new author map if necessary
|
|
84
|
# Extend/Override with new author map if necessary
|
|
85
|
if opts.get('authors'):
|
|
85
|
if opts.get('authors'):
|
|
86
|
self.readauthormap(opts.get('authors'))
|
|
86
|
self.readauthormap(opts.get('authors'))
|
|
87
|
self.authorfile = self.dest.authorfile()
|
|
87
|
self.authorfile = self.dest.authorfile()
|
|
88
|
|
|
88
|
|
|
89
|
def walktree(self, heads):
|
|
89
|
def walktree(self, heads):
|
|
90
|
'''Return a mapping that identifies the uncommitted parents of every
|
|
90
|
'''Return a mapping that identifies the uncommitted parents of every
|
|
91
|
uncommitted changeset.'''
|
|
91
|
uncommitted changeset.'''
|
|
92
|
visit = heads
|
|
92
|
visit = heads
|
|
93
|
known = {}
|
|
93
|
known = {}
|
|
94
|
parents = {}
|
|
94
|
parents = {}
|
|
95
|
while visit:
|
|
95
|
while visit:
|
|
96
|
n = visit.pop(0)
|
|
96
|
n = visit.pop(0)
|
|
97
|
if n in known or n in self.map: continue
|
|
97
|
if n in known or n in self.map: continue
|
|
98
|
known[n] = 1
|
|
98
|
known[n] = 1
|
|
99
|
commit = self.cachecommit(n)
|
|
99
|
commit = self.cachecommit(n)
|
|
100
|
parents[n] = []
|
|
100
|
parents[n] = []
|
|
101
|
for p in commit.parents:
|
|
101
|
for p in commit.parents:
|
|
102
|
parents[n].append(p)
|
|
102
|
parents[n].append(p)
|
|
103
|
visit.append(p)
|
|
103
|
visit.append(p)
|
|
104
|
|
|
104
|
|
|
105
|
return parents
|
|
105
|
return parents
|
|
106
|
|
|
106
|
|
|
107
|
def toposort(self, parents):
|
|
107
|
def toposort(self, parents):
|
|
108
|
'''Return an ordering such that every uncommitted changeset is
|
|
108
|
'''Return an ordering such that every uncommitted changeset is
|
|
109
|
preceeded by all its uncommitted ancestors.'''
|
|
109
|
preceeded by all its uncommitted ancestors.'''
|
|
110
|
visit = parents.keys()
|
|
110
|
visit = parents.keys()
|
|
111
|
seen = {}
|
|
111
|
seen = {}
|
|
112
|
children = {}
|
|
112
|
children = {}
|
|
|
|
|
113
|
actives = []
|
|
113
|
|
|
114
|
|
|
114
|
while visit:
|
|
115
|
while visit:
|
|
115
|
n = visit.pop(0)
|
|
116
|
n = visit.pop(0)
|
|
116
|
if n in seen: continue
|
|
117
|
if n in seen: continue
|
|
117
|
seen[n] = 1
|
|
118
|
seen[n] = 1
|
|
118
|
# Ensure that nodes without parents are present in the 'children'
|
|
119
|
# Ensure that nodes without parents are present in the 'children'
|
|
119
|
# mapping.
|
|
120
|
# mapping.
|
|
120
|
children.setdefault(n, [])
|
|
121
|
children.setdefault(n, [])
|
|
|
|
|
122
|
hasparent = False
|
|
121
|
for p in parents[n]:
|
|
123
|
for p in parents[n]:
|
|
122
|
if not p in self.map:
|
|
124
|
if not p in self.map:
|
|
123
|
visit.append(p)
|
|
125
|
visit.append(p)
|
|
|
|
|
126
|
hasparent = True
|
|
124
|
children.setdefault(p, []).append(n)
|
|
127
|
children.setdefault(p, []).append(n)
|
|
|
|
|
128
|
if not hasparent:
|
|
|
|
|
129
|
actives.append(n)
|
|
|
|
|
130
|
|
|
|
|
|
131
|
del seen
|
|
|
|
|
132
|
del visit
|
|
|
|
|
133
|
|
|
|
|
|
134
|
if self.opts.get('datesort'):
|
|
|
|
|
135
|
dates = {}
|
|
|
|
|
136
|
def getdate(n):
|
|
|
|
|
137
|
if n not in dates:
|
|
|
|
|
138
|
dates[n] = util.parsedate(self.commitcache[n].date)
|
|
|
|
|
139
|
return dates[n]
|
|
|
|
|
140
|
|
|
|
|
|
141
|
def picknext(nodes):
|
|
|
|
|
142
|
return min([(getdate(n), n) for n in nodes])[1]
|
|
|
|
|
143
|
else:
|
|
|
|
|
144
|
prev = [None]
|
|
|
|
|
145
|
def picknext(nodes):
|
|
|
|
|
146
|
# Return the first eligible child of the previously converted
|
|
|
|
|
147
|
# revision, or any of them.
|
|
|
|
|
148
|
next = nodes[0]
|
|
|
|
|
149
|
for n in nodes:
|
|
|
|
|
150
|
if prev[0] in parents[n]:
|
|
|
|
|
151
|
next = n
|
|
|
|
|
152
|
break
|
|
|
|
|
153
|
prev[0] = next
|
|
|
|
|
154
|
return next
|
|
125
|
|
|
155
|
|
|
126
|
s = []
|
|
156
|
s = []
|
|
127
|
removed = {}
|
|
157
|
pendings = {}
|
|
128
|
visit = children.keys()
|
|
158
|
while actives:
|
|
129
|
while visit:
|
|
159
|
n = picknext(actives)
|
|
130
|
n = visit.pop(0)
|
|
160
|
actives.remove(n)
|
|
131
|
if n in removed: continue
|
|
161
|
s.append(n)
|
|
132
|
dep = 0
|
|
|
|
|
133
|
if n in parents:
|
|
|
|
|
134
|
for p in parents[n]:
|
|
|
|
|
135
|
if p in self.map: continue
|
|
|
|
|
136
|
if p not in removed:
|
|
|
|
|
137
|
# we're still dependent
|
|
|
|
|
138
|
visit.append(n)
|
|
|
|
|
139
|
dep = 1
|
|
|
|
|
140
|
break
|
|
|
|
|
141
|
|
|
162
|
|
|
142
|
if not dep:
|
|
163
|
# Update dependents list
|
|
143
|
# all n's parents are in the list
|
|
164
|
for c in children.get(n, []):
|
|
144
|
removed[n] = 1
|
|
165
|
if c not in pendings:
|
|
145
|
if n not in self.map:
|
|
166
|
pendings[c] = [p for p in parents[c] if p not in self.map]
|
|
146
|
s.append(n)
|
|
167
|
pendings[c].remove(n)
|
|
147
|
if n in children:
|
|
168
|
if not pendings[c]:
|
|
148
|
for c in children[n]:
|
|
169
|
# Parents are converted, node is eligible
|
|
149
|
visit.insert(0, c)
|
|
170
|
actives.insert(0, c)
|
|
|
|
|
171
|
pendings[c] = None
|
|
150
|
|
|
172
|
|
|
151
|
if self.opts.get('datesort'):
|
|
173
|
if len(s) != len(parents):
|
|
152
|
depth = {}
|
|
174
|
raise util.Abort(_("not all revisions were sorted"))
|
|
153
|
for n in s:
|
|
|
|
|
154
|
depth[n] = 0
|
|
|
|
|
155
|
pl = [p for p in self.commitcache[n].parents
|
|
|
|
|
156
|
if p not in self.map]
|
|
|
|
|
157
|
if pl:
|
|
|
|
|
158
|
depth[n] = max([depth[p] for p in pl]) + 1
|
|
|
|
|
159
|
|
|
|
|
|
160
|
s = [(depth[n], util.parsedate(self.commitcache[n].date), n)
|
|
|
|
|
161
|
for n in s]
|
|
|
|
|
162
|
s.sort()
|
|
|
|
|
163
|
s = [e[2] for e in s]
|
|
|
|
|
164
|
|
|
175
|
|
|
165
|
return s
|
|
176
|
return s
|
|
166
|
|
|
177
|
|
|
167
|
def mapentry(self, src, dst):
|
|
178
|
def mapentry(self, src, dst):
|
|
168
|
if self.revmapfilefd is None:
|
|
179
|
if self.revmapfilefd is None:
|
|
169
|
try:
|
|
180
|
try:
|
|
170
|
self.revmapfilefd = open(self.revmapfile, "a")
|
|
181
|
self.revmapfilefd = open(self.revmapfile, "a")
|
|
171
|
except IOError, (errno, strerror):
|
|
182
|
except IOError, (errno, strerror):
|
|
172
|
raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
|
|
183
|
raise util.Abort("Could not open map file %s: %s, %s\n" % (self.revmapfile, errno, strerror))
|
|
173
|
self.map[src] = dst
|
|
184
|
self.map[src] = dst
|
|
174
|
self.revmapfilefd.write("%s %s\n" % (src, dst))
|
|
185
|
self.revmapfilefd.write("%s %s\n" % (src, dst))
|
|
175
|
self.revmapfilefd.flush()
|
|
186
|
self.revmapfilefd.flush()
|
|
176
|
|
|
187
|
|
|
177
|
def writeauthormap(self):
|
|
188
|
def writeauthormap(self):
|
|
178
|
authorfile = self.authorfile
|
|
189
|
authorfile = self.authorfile
|
|
179
|
if authorfile:
|
|
190
|
if authorfile:
|
|
180
|
self.ui.status('Writing author map file %s\n' % authorfile)
|
|
191
|
self.ui.status('Writing author map file %s\n' % authorfile)
|
|
181
|
ofile = open(authorfile, 'w+')
|
|
192
|
ofile = open(authorfile, 'w+')
|
|
182
|
for author in self.authors:
|
|
193
|
for author in self.authors:
|
|
183
|
ofile.write("%s=%s\n" % (author, self.authors[author]))
|
|
194
|
ofile.write("%s=%s\n" % (author, self.authors[author]))
|
|
184
|
ofile.close()
|
|
195
|
ofile.close()
|
|
185
|
|
|
196
|
|
|
186
|
def readauthormap(self, authorfile):
|
|
197
|
def readauthormap(self, authorfile):
|
|
187
|
afile = open(authorfile, 'r')
|
|
198
|
afile = open(authorfile, 'r')
|
|
188
|
for line in afile:
|
|
199
|
for line in afile:
|
|
189
|
try:
|
|
200
|
try:
|
|
190
|
srcauthor = line.split('=')[0].strip()
|
|
201
|
srcauthor = line.split('=')[0].strip()
|
|
191
|
dstauthor = line.split('=')[1].strip()
|
|
202
|
dstauthor = line.split('=')[1].strip()
|
|
192
|
if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
|
|
203
|
if srcauthor in self.authors and dstauthor != self.authors[srcauthor]:
|
|
193
|
self.ui.status(
|
|
204
|
self.ui.status(
|
|
194
|
'Overriding mapping for author %s, was %s, will be %s\n'
|
|
205
|
'Overriding mapping for author %s, was %s, will be %s\n'
|
|
195
|
% (srcauthor, self.authors[srcauthor], dstauthor))
|
|
206
|
% (srcauthor, self.authors[srcauthor], dstauthor))
|
|
196
|
else:
|
|
207
|
else:
|
|
197
|
self.ui.debug('Mapping author %s to %s\n'
|
|
208
|
self.ui.debug('Mapping author %s to %s\n'
|
|
198
|
% (srcauthor, dstauthor))
|
|
209
|
% (srcauthor, dstauthor))
|
|
199
|
self.authors[srcauthor] = dstauthor
|
|
210
|
self.authors[srcauthor] = dstauthor
|
|
200
|
except IndexError:
|
|
211
|
except IndexError:
|
|
201
|
self.ui.warn(
|
|
212
|
self.ui.warn(
|
|
202
|
'Ignoring bad line in author file map %s: %s\n'
|
|
213
|
'Ignoring bad line in author file map %s: %s\n'
|
|
203
|
% (authorfile, line))
|
|
214
|
% (authorfile, line))
|
|
204
|
afile.close()
|
|
215
|
afile.close()
|
|
205
|
|
|
216
|
|
|
206
|
def cachecommit(self, rev):
|
|
217
|
def cachecommit(self, rev):
|
|
207
|
commit = self.source.getcommit(rev)
|
|
218
|
commit = self.source.getcommit(rev)
|
|
208
|
commit.author = self.authors.get(commit.author, commit.author)
|
|
219
|
commit.author = self.authors.get(commit.author, commit.author)
|
|
209
|
self.commitcache[rev] = commit
|
|
220
|
self.commitcache[rev] = commit
|
|
210
|
return commit
|
|
221
|
return commit
|
|
211
|
|
|
222
|
|
|
212
|
def copy(self, rev):
|
|
223
|
def copy(self, rev):
|
|
213
|
commit = self.commitcache[rev]
|
|
224
|
commit = self.commitcache[rev]
|
|
214
|
do_copies = hasattr(self.dest, 'copyfile')
|
|
225
|
do_copies = hasattr(self.dest, 'copyfile')
|
|
215
|
filenames = []
|
|
226
|
filenames = []
|
|
216
|
|
|
227
|
|
|
217
|
changes = self.source.getchanges(rev)
|
|
228
|
changes = self.source.getchanges(rev)
|
|
218
|
if isinstance(changes, basestring):
|
|
229
|
if isinstance(changes, basestring):
|
|
219
|
if changes == SKIPREV:
|
|
230
|
if changes == SKIPREV:
|
|
220
|
dest = SKIPREV
|
|
231
|
dest = SKIPREV
|
|
221
|
else:
|
|
232
|
else:
|
|
222
|
dest = self.map[changes]
|
|
233
|
dest = self.map[changes]
|
|
223
|
self.mapentry(rev, dest)
|
|
234
|
self.mapentry(rev, dest)
|
|
224
|
return
|
|
235
|
return
|
|
225
|
files, copies = changes
|
|
236
|
files, copies = changes
|
|
226
|
pbranches = []
|
|
237
|
pbranches = []
|
|
227
|
if commit.parents:
|
|
238
|
if commit.parents:
|
|
228
|
for prev in commit.parents:
|
|
239
|
for prev in commit.parents:
|
|
229
|
if prev not in self.commitcache:
|
|
240
|
if prev not in self.commitcache:
|
|
230
|
self.cachecommit(prev)
|
|
241
|
self.cachecommit(prev)
|
|
231
|
pbranches.append((self.map[prev],
|
|
242
|
pbranches.append((self.map[prev],
|
|
232
|
self.commitcache[prev].branch))
|
|
243
|
self.commitcache[prev].branch))
|
|
233
|
self.dest.setbranch(commit.branch, pbranches)
|
|
244
|
self.dest.setbranch(commit.branch, pbranches)
|
|
234
|
for f, v in files:
|
|
245
|
for f, v in files:
|
|
235
|
filenames.append(f)
|
|
246
|
filenames.append(f)
|
|
236
|
try:
|
|
247
|
try:
|
|
237
|
data = self.source.getfile(f, v)
|
|
248
|
data = self.source.getfile(f, v)
|
|
238
|
except IOError, inst:
|
|
249
|
except IOError, inst:
|
|
239
|
self.dest.delfile(f)
|
|
250
|
self.dest.delfile(f)
|
|
240
|
else:
|
|
251
|
else:
|
|
241
|
e = self.source.getmode(f, v)
|
|
252
|
e = self.source.getmode(f, v)
|
|
242
|
self.dest.putfile(f, e, data)
|
|
253
|
self.dest.putfile(f, e, data)
|
|
243
|
if do_copies:
|
|
254
|
if do_copies:
|
|
244
|
if f in copies:
|
|
255
|
if f in copies:
|
|
245
|
copyf = copies[f]
|
|
256
|
copyf = copies[f]
|
|
246
|
# Merely marks that a copy happened.
|
|
257
|
# Merely marks that a copy happened.
|
|
247
|
self.dest.copyfile(copyf, f)
|
|
258
|
self.dest.copyfile(copyf, f)
|
|
248
|
|
|
259
|
|
|
249
|
parents = [b[0] for b in pbranches]
|
|
260
|
parents = [b[0] for b in pbranches]
|
|
250
|
newnode = self.dest.putcommit(filenames, parents, commit)
|
|
261
|
newnode = self.dest.putcommit(filenames, parents, commit)
|
|
251
|
self.mapentry(rev, newnode)
|
|
262
|
self.mapentry(rev, newnode)
|
|
252
|
|
|
263
|
|
|
253
|
def convert(self):
|
|
264
|
def convert(self):
|
|
254
|
try:
|
|
265
|
try:
|
|
255
|
self.source.before()
|
|
266
|
self.source.before()
|
|
256
|
self.dest.before()
|
|
267
|
self.dest.before()
|
|
257
|
self.source.setrevmap(self.map, self.maporder)
|
|
268
|
self.source.setrevmap(self.map, self.maporder)
|
|
258
|
self.ui.status("scanning source...\n")
|
|
269
|
self.ui.status("scanning source...\n")
|
|
259
|
heads = self.source.getheads()
|
|
270
|
heads = self.source.getheads()
|
|
260
|
parents = self.walktree(heads)
|
|
271
|
parents = self.walktree(heads)
|
|
261
|
self.ui.status("sorting...\n")
|
|
272
|
self.ui.status("sorting...\n")
|
|
262
|
t = self.toposort(parents)
|
|
273
|
t = self.toposort(parents)
|
|
263
|
num = len(t)
|
|
274
|
num = len(t)
|
|
264
|
c = None
|
|
275
|
c = None
|
|
265
|
|
|
276
|
|
|
266
|
self.ui.status("converting...\n")
|
|
277
|
self.ui.status("converting...\n")
|
|
267
|
for c in t:
|
|
278
|
for c in t:
|
|
268
|
num -= 1
|
|
279
|
num -= 1
|
|
269
|
desc = self.commitcache[c].desc
|
|
280
|
desc = self.commitcache[c].desc
|
|
270
|
if "\n" in desc:
|
|
281
|
if "\n" in desc:
|
|
271
|
desc = desc.splitlines()[0]
|
|
282
|
desc = desc.splitlines()[0]
|
|
272
|
self.ui.status("%d %s\n" % (num, desc))
|
|
283
|
self.ui.status("%d %s\n" % (num, desc))
|
|
273
|
self.copy(c)
|
|
284
|
self.copy(c)
|
|
274
|
|
|
285
|
|
|
275
|
tags = self.source.gettags()
|
|
286
|
tags = self.source.gettags()
|
|
276
|
ctags = {}
|
|
287
|
ctags = {}
|
|
277
|
for k in tags:
|
|
288
|
for k in tags:
|
|
278
|
v = tags[k]
|
|
289
|
v = tags[k]
|
|
279
|
if self.map.get(v, SKIPREV) != SKIPREV:
|
|
290
|
if self.map.get(v, SKIPREV) != SKIPREV:
|
|
280
|
ctags[k] = self.map[v]
|
|
291
|
ctags[k] = self.map[v]
|
|
281
|
|
|
292
|
|
|
282
|
if c and ctags:
|
|
293
|
if c and ctags:
|
|
283
|
nrev = self.dest.puttags(ctags)
|
|
294
|
nrev = self.dest.puttags(ctags)
|
|
284
|
# write another hash correspondence to override the previous
|
|
295
|
# write another hash correspondence to override the previous
|
|
285
|
# one so we don't end up with extra tag heads
|
|
296
|
# one so we don't end up with extra tag heads
|
|
286
|
if nrev:
|
|
297
|
if nrev:
|
|
287
|
self.mapentry(c, nrev)
|
|
298
|
self.mapentry(c, nrev)
|
|
288
|
|
|
299
|
|
|
289
|
self.writeauthormap()
|
|
300
|
self.writeauthormap()
|
|
290
|
finally:
|
|
301
|
finally:
|
|
291
|
self.cleanup()
|
|
302
|
self.cleanup()
|
|
292
|
|
|
303
|
|
|
293
|
def cleanup(self):
|
|
304
|
def cleanup(self):
|
|
294
|
try:
|
|
305
|
try:
|
|
295
|
self.dest.after()
|
|
306
|
self.dest.after()
|
|
296
|
finally:
|
|
307
|
finally:
|
|
297
|
self.source.after()
|
|
308
|
self.source.after()
|
|
298
|
if self.revmapfilefd:
|
|
309
|
if self.revmapfilefd:
|
|
299
|
self.revmapfilefd.close()
|
|
310
|
self.revmapfilefd.close()
|
|
300
|
|
|
311
|
|
|
301
|
def convert(ui, src, dest=None, revmapfile=None, **opts):
|
|
312
|
def convert(ui, src, dest=None, revmapfile=None, **opts):
|
|
302
|
util._encoding = 'UTF-8'
|
|
313
|
util._encoding = 'UTF-8'
|
|
303
|
|
|
314
|
|
|
304
|
if not dest:
|
|
315
|
if not dest:
|
|
305
|
dest = hg.defaultdest(src) + "-hg"
|
|
316
|
dest = hg.defaultdest(src) + "-hg"
|
|
306
|
ui.status("assuming destination %s\n" % dest)
|
|
317
|
ui.status("assuming destination %s\n" % dest)
|
|
307
|
|
|
318
|
|
|
308
|
destc = convertsink(ui, dest, opts.get('dest_type'))
|
|
319
|
destc = convertsink(ui, dest, opts.get('dest_type'))
|
|
309
|
|
|
320
|
|
|
310
|
try:
|
|
321
|
try:
|
|
311
|
srcc = convertsource(ui, src, opts.get('source_type'),
|
|
322
|
srcc = convertsource(ui, src, opts.get('source_type'),
|
|
312
|
opts.get('rev'))
|
|
323
|
opts.get('rev'))
|
|
313
|
except Exception:
|
|
324
|
except Exception:
|
|
314
|
for path in destc.created:
|
|
325
|
for path in destc.created:
|
|
315
|
shutil.rmtree(path, True)
|
|
326
|
shutil.rmtree(path, True)
|
|
316
|
raise
|
|
327
|
raise
|
|
317
|
|
|
328
|
|
|
318
|
fmap = opts.get('filemap')
|
|
329
|
fmap = opts.get('filemap')
|
|
319
|
if fmap:
|
|
330
|
if fmap:
|
|
320
|
srcc = filemap.filemap_source(ui, srcc, fmap)
|
|
331
|
srcc = filemap.filemap_source(ui, srcc, fmap)
|
|
321
|
destc.setfilemapmode(True)
|
|
332
|
destc.setfilemapmode(True)
|
|
322
|
|
|
333
|
|
|
323
|
if not revmapfile:
|
|
334
|
if not revmapfile:
|
|
324
|
try:
|
|
335
|
try:
|
|
325
|
revmapfile = destc.revmapfile()
|
|
336
|
revmapfile = destc.revmapfile()
|
|
326
|
except:
|
|
337
|
except:
|
|
327
|
revmapfile = os.path.join(destc, "map")
|
|
338
|
revmapfile = os.path.join(destc, "map")
|
|
328
|
|
|
339
|
|
|
329
|
c = converter(ui, srcc, destc, revmapfile, opts)
|
|
340
|
c = converter(ui, srcc, destc, revmapfile, opts)
|
|
330
|
c.convert()
|
|
341
|
c.convert()
|
|
331
|
|
|
342
|
|