##// END OF EJS Templates
splicemap: move parsesplicemap to convcmd.py (issue2084)...
Ben Goswami -
r19119:61f1223a default
parent child Browse files
Show More
@@ -1,460 +1,432 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno, subprocess, os, datetime
8 import base64, errno, subprocess, os, datetime
9 import cPickle as pickle
9 import cPickle as pickle
10 from mercurial import util
10 from mercurial import util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 def encodeargs(args):
15 def encodeargs(args):
16 def encodearg(s):
16 def encodearg(s):
17 lines = base64.encodestring(s)
17 lines = base64.encodestring(s)
18 lines = [l.splitlines()[0] for l in lines]
18 lines = [l.splitlines()[0] for l in lines]
19 return ''.join(lines)
19 return ''.join(lines)
20
20
21 s = pickle.dumps(args)
21 s = pickle.dumps(args)
22 return encodearg(s)
22 return encodearg(s)
23
23
24 def decodeargs(s):
24 def decodeargs(s):
25 s = base64.decodestring(s)
25 s = base64.decodestring(s)
26 return pickle.loads(s)
26 return pickle.loads(s)
27
27
28 class MissingTool(Exception):
28 class MissingTool(Exception):
29 pass
29 pass
30
30
31 def checktool(exe, name=None, abort=True):
31 def checktool(exe, name=None, abort=True):
32 name = name or exe
32 name = name or exe
33 if not util.findexe(exe):
33 if not util.findexe(exe):
34 exc = abort and util.Abort or MissingTool
34 exc = abort and util.Abort or MissingTool
35 raise exc(_('cannot find required "%s" tool') % name)
35 raise exc(_('cannot find required "%s" tool') % name)
36
36
37 class NoRepo(Exception):
37 class NoRepo(Exception):
38 pass
38 pass
39
39
40 SKIPREV = 'SKIP'
40 SKIPREV = 'SKIP'
41
41
42 class commit(object):
42 class commit(object):
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 extra={}, sortkey=None):
44 extra={}, sortkey=None):
45 self.author = author or 'unknown'
45 self.author = author or 'unknown'
46 self.date = date or '0 0'
46 self.date = date or '0 0'
47 self.desc = desc
47 self.desc = desc
48 self.parents = parents
48 self.parents = parents
49 self.branch = branch
49 self.branch = branch
50 self.rev = rev
50 self.rev = rev
51 self.extra = extra
51 self.extra = extra
52 self.sortkey = sortkey
52 self.sortkey = sortkey
53
53
54 class converter_source(object):
54 class converter_source(object):
55 """Conversion source interface"""
55 """Conversion source interface"""
56
56
57 def __init__(self, ui, path=None, rev=None):
57 def __init__(self, ui, path=None, rev=None):
58 """Initialize conversion source (or raise NoRepo("message")
58 """Initialize conversion source (or raise NoRepo("message")
59 exception if path is not a valid repository)"""
59 exception if path is not a valid repository)"""
60 self.ui = ui
60 self.ui = ui
61 self.path = path
61 self.path = path
62 self.rev = rev
62 self.rev = rev
63
63
64 self.encoding = 'utf-8'
64 self.encoding = 'utf-8'
65
65
66 def before(self):
66 def before(self):
67 pass
67 pass
68
68
69 def after(self):
69 def after(self):
70 pass
70 pass
71
71
72 def setrevmap(self, revmap):
72 def setrevmap(self, revmap):
73 """set the map of already-converted revisions"""
73 """set the map of already-converted revisions"""
74 pass
74 pass
75
75
76 def getheads(self):
76 def getheads(self):
77 """Return a list of this repository's heads"""
77 """Return a list of this repository's heads"""
78 raise NotImplementedError
78 raise NotImplementedError
79
79
80 def getfile(self, name, rev):
80 def getfile(self, name, rev):
81 """Return a pair (data, mode) where data is the file content
81 """Return a pair (data, mode) where data is the file content
82 as a string and mode one of '', 'x' or 'l'. rev is the
82 as a string and mode one of '', 'x' or 'l'. rev is the
83 identifier returned by a previous call to getchanges(). Raise
83 identifier returned by a previous call to getchanges(). Raise
84 IOError to indicate that name was deleted in rev.
84 IOError to indicate that name was deleted in rev.
85 """
85 """
86 raise NotImplementedError
86 raise NotImplementedError
87
87
88 def getchanges(self, version):
88 def getchanges(self, version):
89 """Returns a tuple of (files, copies).
89 """Returns a tuple of (files, copies).
90
90
91 files is a sorted list of (filename, id) tuples for all files
91 files is a sorted list of (filename, id) tuples for all files
92 changed between version and its first parent returned by
92 changed between version and its first parent returned by
93 getcommit(). id is the source revision id of the file.
93 getcommit(). id is the source revision id of the file.
94
94
95 copies is a dictionary of dest: source
95 copies is a dictionary of dest: source
96 """
96 """
97 raise NotImplementedError
97 raise NotImplementedError
98
98
99 def getcommit(self, version):
99 def getcommit(self, version):
100 """Return the commit object for version"""
100 """Return the commit object for version"""
101 raise NotImplementedError
101 raise NotImplementedError
102
102
103 def gettags(self):
103 def gettags(self):
104 """Return the tags as a dictionary of name: revision
104 """Return the tags as a dictionary of name: revision
105
105
106 Tag names must be UTF-8 strings.
106 Tag names must be UTF-8 strings.
107 """
107 """
108 raise NotImplementedError
108 raise NotImplementedError
109
109
110 def recode(self, s, encoding=None):
110 def recode(self, s, encoding=None):
111 if not encoding:
111 if not encoding:
112 encoding = self.encoding or 'utf-8'
112 encoding = self.encoding or 'utf-8'
113
113
114 if isinstance(s, unicode):
114 if isinstance(s, unicode):
115 return s.encode("utf-8")
115 return s.encode("utf-8")
116 try:
116 try:
117 return s.decode(encoding).encode("utf-8")
117 return s.decode(encoding).encode("utf-8")
118 except UnicodeError:
118 except UnicodeError:
119 try:
119 try:
120 return s.decode("latin-1").encode("utf-8")
120 return s.decode("latin-1").encode("utf-8")
121 except UnicodeError:
121 except UnicodeError:
122 return s.decode(encoding, "replace").encode("utf-8")
122 return s.decode(encoding, "replace").encode("utf-8")
123
123
124 def getchangedfiles(self, rev, i):
124 def getchangedfiles(self, rev, i):
125 """Return the files changed by rev compared to parent[i].
125 """Return the files changed by rev compared to parent[i].
126
126
127 i is an index selecting one of the parents of rev. The return
127 i is an index selecting one of the parents of rev. The return
128 value should be the list of files that are different in rev and
128 value should be the list of files that are different in rev and
129 this parent.
129 this parent.
130
130
131 If rev has no parents, i is None.
131 If rev has no parents, i is None.
132
132
133 This function is only needed to support --filemap
133 This function is only needed to support --filemap
134 """
134 """
135 raise NotImplementedError
135 raise NotImplementedError
136
136
137 def converted(self, rev, sinkrev):
137 def converted(self, rev, sinkrev):
138 '''Notify the source that a revision has been converted.'''
138 '''Notify the source that a revision has been converted.'''
139 pass
139 pass
140
140
141 def hasnativeorder(self):
141 def hasnativeorder(self):
142 """Return true if this source has a meaningful, native revision
142 """Return true if this source has a meaningful, native revision
143 order. For instance, Mercurial revisions are store sequentially
143 order. For instance, Mercurial revisions are store sequentially
144 while there is no such global ordering with Darcs.
144 while there is no such global ordering with Darcs.
145 """
145 """
146 return False
146 return False
147
147
148 def hasnativeclose(self):
148 def hasnativeclose(self):
149 """Return true if this source has ability to close branch.
149 """Return true if this source has ability to close branch.
150 """
150 """
151 return False
151 return False
152
152
153 def lookuprev(self, rev):
153 def lookuprev(self, rev):
154 """If rev is a meaningful revision reference in source, return
154 """If rev is a meaningful revision reference in source, return
155 the referenced identifier in the same format used by getcommit().
155 the referenced identifier in the same format used by getcommit().
156 return None otherwise.
156 return None otherwise.
157 """
157 """
158 return None
158 return None
159
159
160 def getbookmarks(self):
160 def getbookmarks(self):
161 """Return the bookmarks as a dictionary of name: revision
161 """Return the bookmarks as a dictionary of name: revision
162
162
163 Bookmark names are to be UTF-8 strings.
163 Bookmark names are to be UTF-8 strings.
164 """
164 """
165 return {}
165 return {}
166
166
167 class converter_sink(object):
167 class converter_sink(object):
168 """Conversion sink (target) interface"""
168 """Conversion sink (target) interface"""
169
169
170 def __init__(self, ui, path):
170 def __init__(self, ui, path):
171 """Initialize conversion sink (or raise NoRepo("message")
171 """Initialize conversion sink (or raise NoRepo("message")
172 exception if path is not a valid repository)
172 exception if path is not a valid repository)
173
173
174 created is a list of paths to remove if a fatal error occurs
174 created is a list of paths to remove if a fatal error occurs
175 later"""
175 later"""
176 self.ui = ui
176 self.ui = ui
177 self.path = path
177 self.path = path
178 self.created = []
178 self.created = []
179
179
180 def getheads(self):
180 def getheads(self):
181 """Return a list of this repository's heads"""
181 """Return a list of this repository's heads"""
182 raise NotImplementedError
182 raise NotImplementedError
183
183
184 def revmapfile(self):
184 def revmapfile(self):
185 """Path to a file that will contain lines
185 """Path to a file that will contain lines
186 source_rev_id sink_rev_id
186 source_rev_id sink_rev_id
187 mapping equivalent revision identifiers for each system."""
187 mapping equivalent revision identifiers for each system."""
188 raise NotImplementedError
188 raise NotImplementedError
189
189
190 def authorfile(self):
190 def authorfile(self):
191 """Path to a file that will contain lines
191 """Path to a file that will contain lines
192 srcauthor=dstauthor
192 srcauthor=dstauthor
193 mapping equivalent authors identifiers for each system."""
193 mapping equivalent authors identifiers for each system."""
194 return None
194 return None
195
195
196 def putcommit(self, files, copies, parents, commit, source, revmap):
196 def putcommit(self, files, copies, parents, commit, source, revmap):
197 """Create a revision with all changed files listed in 'files'
197 """Create a revision with all changed files listed in 'files'
198 and having listed parents. 'commit' is a commit object
198 and having listed parents. 'commit' is a commit object
199 containing at a minimum the author, date, and message for this
199 containing at a minimum the author, date, and message for this
200 changeset. 'files' is a list of (path, version) tuples,
200 changeset. 'files' is a list of (path, version) tuples,
201 'copies' is a dictionary mapping destinations to sources,
201 'copies' is a dictionary mapping destinations to sources,
202 'source' is the source repository, and 'revmap' is a mapfile
202 'source' is the source repository, and 'revmap' is a mapfile
203 of source revisions to converted revisions. Only getfile() and
203 of source revisions to converted revisions. Only getfile() and
204 lookuprev() should be called on 'source'.
204 lookuprev() should be called on 'source'.
205
205
206 Note that the sink repository is not told to update itself to
206 Note that the sink repository is not told to update itself to
207 a particular revision (or even what that revision would be)
207 a particular revision (or even what that revision would be)
208 before it receives the file data.
208 before it receives the file data.
209 """
209 """
210 raise NotImplementedError
210 raise NotImplementedError
211
211
212 def puttags(self, tags):
212 def puttags(self, tags):
213 """Put tags into sink.
213 """Put tags into sink.
214
214
215 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
215 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
216 Return a pair (tag_revision, tag_parent_revision), or (None, None)
216 Return a pair (tag_revision, tag_parent_revision), or (None, None)
217 if nothing was changed.
217 if nothing was changed.
218 """
218 """
219 raise NotImplementedError
219 raise NotImplementedError
220
220
221 def setbranch(self, branch, pbranches):
221 def setbranch(self, branch, pbranches):
222 """Set the current branch name. Called before the first putcommit
222 """Set the current branch name. Called before the first putcommit
223 on the branch.
223 on the branch.
224 branch: branch name for subsequent commits
224 branch: branch name for subsequent commits
225 pbranches: (converted parent revision, parent branch) tuples"""
225 pbranches: (converted parent revision, parent branch) tuples"""
226 pass
226 pass
227
227
228 def setfilemapmode(self, active):
228 def setfilemapmode(self, active):
229 """Tell the destination that we're using a filemap
229 """Tell the destination that we're using a filemap
230
230
231 Some converter_sources (svn in particular) can claim that a file
231 Some converter_sources (svn in particular) can claim that a file
232 was changed in a revision, even if there was no change. This method
232 was changed in a revision, even if there was no change. This method
233 tells the destination that we're using a filemap and that it should
233 tells the destination that we're using a filemap and that it should
234 filter empty revisions.
234 filter empty revisions.
235 """
235 """
236 pass
236 pass
237
237
238 def before(self):
238 def before(self):
239 pass
239 pass
240
240
241 def after(self):
241 def after(self):
242 pass
242 pass
243
243
244 def putbookmarks(self, bookmarks):
244 def putbookmarks(self, bookmarks):
245 """Put bookmarks into sink.
245 """Put bookmarks into sink.
246
246
247 bookmarks: {bookmarkname: sink_rev_id, ...}
247 bookmarks: {bookmarkname: sink_rev_id, ...}
248 where bookmarkname is an UTF-8 string.
248 where bookmarkname is an UTF-8 string.
249 """
249 """
250 pass
250 pass
251
251
252 def hascommit(self, rev):
252 def hascommit(self, rev):
253 """Return True if the sink contains rev"""
253 """Return True if the sink contains rev"""
254 raise NotImplementedError
254 raise NotImplementedError
255
255
256 class commandline(object):
256 class commandline(object):
257 def __init__(self, ui, command):
257 def __init__(self, ui, command):
258 self.ui = ui
258 self.ui = ui
259 self.command = command
259 self.command = command
260
260
261 def prerun(self):
261 def prerun(self):
262 pass
262 pass
263
263
264 def postrun(self):
264 def postrun(self):
265 pass
265 pass
266
266
267 def _cmdline(self, cmd, *args, **kwargs):
267 def _cmdline(self, cmd, *args, **kwargs):
268 cmdline = [self.command, cmd] + list(args)
268 cmdline = [self.command, cmd] + list(args)
269 for k, v in kwargs.iteritems():
269 for k, v in kwargs.iteritems():
270 if len(k) == 1:
270 if len(k) == 1:
271 cmdline.append('-' + k)
271 cmdline.append('-' + k)
272 else:
272 else:
273 cmdline.append('--' + k.replace('_', '-'))
273 cmdline.append('--' + k.replace('_', '-'))
274 try:
274 try:
275 if len(k) == 1:
275 if len(k) == 1:
276 cmdline.append('' + v)
276 cmdline.append('' + v)
277 else:
277 else:
278 cmdline[-1] += '=' + v
278 cmdline[-1] += '=' + v
279 except TypeError:
279 except TypeError:
280 pass
280 pass
281 cmdline = [util.shellquote(arg) for arg in cmdline]
281 cmdline = [util.shellquote(arg) for arg in cmdline]
282 if not self.ui.debugflag:
282 if not self.ui.debugflag:
283 cmdline += ['2>', os.devnull]
283 cmdline += ['2>', os.devnull]
284 cmdline = ' '.join(cmdline)
284 cmdline = ' '.join(cmdline)
285 return cmdline
285 return cmdline
286
286
287 def _run(self, cmd, *args, **kwargs):
287 def _run(self, cmd, *args, **kwargs):
288 def popen(cmdline):
288 def popen(cmdline):
289 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
289 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
290 close_fds=util.closefds,
290 close_fds=util.closefds,
291 stdout=subprocess.PIPE)
291 stdout=subprocess.PIPE)
292 return p
292 return p
293 return self._dorun(popen, cmd, *args, **kwargs)
293 return self._dorun(popen, cmd, *args, **kwargs)
294
294
295 def _run2(self, cmd, *args, **kwargs):
295 def _run2(self, cmd, *args, **kwargs):
296 return self._dorun(util.popen2, cmd, *args, **kwargs)
296 return self._dorun(util.popen2, cmd, *args, **kwargs)
297
297
298 def _dorun(self, openfunc, cmd, *args, **kwargs):
298 def _dorun(self, openfunc, cmd, *args, **kwargs):
299 cmdline = self._cmdline(cmd, *args, **kwargs)
299 cmdline = self._cmdline(cmd, *args, **kwargs)
300 self.ui.debug('running: %s\n' % (cmdline,))
300 self.ui.debug('running: %s\n' % (cmdline,))
301 self.prerun()
301 self.prerun()
302 try:
302 try:
303 return openfunc(cmdline)
303 return openfunc(cmdline)
304 finally:
304 finally:
305 self.postrun()
305 self.postrun()
306
306
307 def run(self, cmd, *args, **kwargs):
307 def run(self, cmd, *args, **kwargs):
308 p = self._run(cmd, *args, **kwargs)
308 p = self._run(cmd, *args, **kwargs)
309 output = p.communicate()[0]
309 output = p.communicate()[0]
310 self.ui.debug(output)
310 self.ui.debug(output)
311 return output, p.returncode
311 return output, p.returncode
312
312
313 def runlines(self, cmd, *args, **kwargs):
313 def runlines(self, cmd, *args, **kwargs):
314 p = self._run(cmd, *args, **kwargs)
314 p = self._run(cmd, *args, **kwargs)
315 output = p.stdout.readlines()
315 output = p.stdout.readlines()
316 p.wait()
316 p.wait()
317 self.ui.debug(''.join(output))
317 self.ui.debug(''.join(output))
318 return output, p.returncode
318 return output, p.returncode
319
319
320 def checkexit(self, status, output=''):
320 def checkexit(self, status, output=''):
321 if status:
321 if status:
322 if output:
322 if output:
323 self.ui.warn(_('%s error:\n') % self.command)
323 self.ui.warn(_('%s error:\n') % self.command)
324 self.ui.warn(output)
324 self.ui.warn(output)
325 msg = util.explainexit(status)[0]
325 msg = util.explainexit(status)[0]
326 raise util.Abort('%s %s' % (self.command, msg))
326 raise util.Abort('%s %s' % (self.command, msg))
327
327
328 def run0(self, cmd, *args, **kwargs):
328 def run0(self, cmd, *args, **kwargs):
329 output, status = self.run(cmd, *args, **kwargs)
329 output, status = self.run(cmd, *args, **kwargs)
330 self.checkexit(status, output)
330 self.checkexit(status, output)
331 return output
331 return output
332
332
333 def runlines0(self, cmd, *args, **kwargs):
333 def runlines0(self, cmd, *args, **kwargs):
334 output, status = self.runlines(cmd, *args, **kwargs)
334 output, status = self.runlines(cmd, *args, **kwargs)
335 self.checkexit(status, ''.join(output))
335 self.checkexit(status, ''.join(output))
336 return output
336 return output
337
337
338 @propertycache
338 @propertycache
339 def argmax(self):
339 def argmax(self):
340 # POSIX requires at least 4096 bytes for ARG_MAX
340 # POSIX requires at least 4096 bytes for ARG_MAX
341 argmax = 4096
341 argmax = 4096
342 try:
342 try:
343 argmax = os.sysconf("SC_ARG_MAX")
343 argmax = os.sysconf("SC_ARG_MAX")
344 except (AttributeError, ValueError):
344 except (AttributeError, ValueError):
345 pass
345 pass
346
346
347 # Windows shells impose their own limits on command line length,
347 # Windows shells impose their own limits on command line length,
348 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
348 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
349 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
349 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
350 # details about cmd.exe limitations.
350 # details about cmd.exe limitations.
351
351
352 # Since ARG_MAX is for command line _and_ environment, lower our limit
352 # Since ARG_MAX is for command line _and_ environment, lower our limit
353 # (and make happy Windows shells while doing this).
353 # (and make happy Windows shells while doing this).
354 return argmax // 2 - 1
354 return argmax // 2 - 1
355
355
356 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
356 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
357 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
357 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
358 limit = self.argmax - cmdlen
358 limit = self.argmax - cmdlen
359 bytes = 0
359 bytes = 0
360 fl = []
360 fl = []
361 for fn in arglist:
361 for fn in arglist:
362 b = len(fn) + 3
362 b = len(fn) + 3
363 if bytes + b < limit or len(fl) == 0:
363 if bytes + b < limit or len(fl) == 0:
364 fl.append(fn)
364 fl.append(fn)
365 bytes += b
365 bytes += b
366 else:
366 else:
367 yield fl
367 yield fl
368 fl = [fn]
368 fl = [fn]
369 bytes = b
369 bytes = b
370 if fl:
370 if fl:
371 yield fl
371 yield fl
372
372
373 def xargs(self, arglist, cmd, *args, **kwargs):
373 def xargs(self, arglist, cmd, *args, **kwargs):
374 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
374 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
375 self.run0(cmd, *(list(args) + l), **kwargs)
375 self.run0(cmd, *(list(args) + l), **kwargs)
376
376
377 class mapfile(dict):
377 class mapfile(dict):
378 def __init__(self, ui, path):
378 def __init__(self, ui, path):
379 super(mapfile, self).__init__()
379 super(mapfile, self).__init__()
380 self.ui = ui
380 self.ui = ui
381 self.path = path
381 self.path = path
382 self.fp = None
382 self.fp = None
383 self.order = []
383 self.order = []
384 self._read()
384 self._read()
385
385
386 def _read(self):
386 def _read(self):
387 if not self.path:
387 if not self.path:
388 return
388 return
389 try:
389 try:
390 fp = open(self.path, 'r')
390 fp = open(self.path, 'r')
391 except IOError, err:
391 except IOError, err:
392 if err.errno != errno.ENOENT:
392 if err.errno != errno.ENOENT:
393 raise
393 raise
394 return
394 return
395 for i, line in enumerate(fp):
395 for i, line in enumerate(fp):
396 line = line.splitlines()[0].rstrip()
396 line = line.splitlines()[0].rstrip()
397 if not line:
397 if not line:
398 # Ignore blank lines
398 # Ignore blank lines
399 continue
399 continue
400 try:
400 try:
401 key, value = line.rsplit(' ', 1)
401 key, value = line.rsplit(' ', 1)
402 except ValueError:
402 except ValueError:
403 raise util.Abort(
403 raise util.Abort(
404 _('syntax error in %s(%d): key/value pair expected')
404 _('syntax error in %s(%d): key/value pair expected')
405 % (self.path, i + 1))
405 % (self.path, i + 1))
406 if key not in self:
406 if key not in self:
407 self.order.append(key)
407 self.order.append(key)
408 super(mapfile, self).__setitem__(key, value)
408 super(mapfile, self).__setitem__(key, value)
409 fp.close()
409 fp.close()
410
410
411 def __setitem__(self, key, value):
411 def __setitem__(self, key, value):
412 if self.fp is None:
412 if self.fp is None:
413 try:
413 try:
414 self.fp = open(self.path, 'a')
414 self.fp = open(self.path, 'a')
415 except IOError, err:
415 except IOError, err:
416 raise util.Abort(_('could not open map file %r: %s') %
416 raise util.Abort(_('could not open map file %r: %s') %
417 (self.path, err.strerror))
417 (self.path, err.strerror))
418 self.fp.write('%s %s\n' % (key, value))
418 self.fp.write('%s %s\n' % (key, value))
419 self.fp.flush()
419 self.fp.flush()
420 super(mapfile, self).__setitem__(key, value)
420 super(mapfile, self).__setitem__(key, value)
421
421
422 def close(self):
422 def close(self):
423 if self.fp:
423 if self.fp:
424 self.fp.close()
424 self.fp.close()
425 self.fp = None
425 self.fp = None
426
426
427 def parsesplicemap(path):
428 """Parse a splicemap, return a child/parents dictionary."""
429 if not path:
430 return {}
431 m = {}
432 try:
433 fp = open(path, 'r')
434 for i, line in enumerate(fp):
435 line = line.splitlines()[0].rstrip()
436 if not line:
437 # Ignore blank lines
438 continue
439 try:
440 child, parents = line.split(' ', 1)
441 parents = parents.replace(',', ' ').split()
442 except ValueError:
443 raise util.Abort(_('syntax error in %s(%d): child parent1'
444 '[,parent2] expected') % (path, i + 1))
445 pp = []
446 for p in parents:
447 if p not in pp:
448 pp.append(p)
449 m[child] = pp
450 except IOError, e:
451 if e.errno != errno.ENOENT:
452 raise
453 return m
454
455 def makedatetimestamp(t):
427 def makedatetimestamp(t):
456 """Like util.makedate() but for time t instead of current time"""
428 """Like util.makedate() but for time t instead of current time"""
457 delta = (datetime.datetime.utcfromtimestamp(t) -
429 delta = (datetime.datetime.utcfromtimestamp(t) -
458 datetime.datetime.fromtimestamp(t))
430 datetime.datetime.fromtimestamp(t))
459 tz = delta.days * 86400 + delta.seconds
431 tz = delta.days * 86400 + delta.seconds
460 return t, tz
432 return t, tz
@@ -1,482 +1,512 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap, common
18 import filemap
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = common.parsesplicemap(opts.get('splicemap'))
121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124
125 def parsesplicemap(self, path):
126 """Parse a splicemap, return a child/parents dictionary."""
127 if not path:
128 return {}
129 m = {}
130 try:
131 fp = open(path, 'r')
132 for i, line in enumerate(fp):
133 line = line.splitlines()[0].rstrip()
134 if not line:
135 # Ignore blank lines
136 continue
137 try:
138 child, parents = line.split(' ', 1)
139 parents = parents.replace(',', ' ').split()
140 except ValueError:
141 raise util.Abort(_('syntax error in %s(%d): child parent1'
142 '[,parent2] expected') % (path, i + 1))
143 pp = []
144 for p in parents:
145 if p not in pp:
146 pp.append(p)
147 m[child] = pp
148 except IOError, e:
149 if e.errno != errno.ENOENT:
150 raise
151 return m
152
153
124 def walktree(self, heads):
154 def walktree(self, heads):
125 '''Return a mapping that identifies the uncommitted parents of every
155 '''Return a mapping that identifies the uncommitted parents of every
126 uncommitted changeset.'''
156 uncommitted changeset.'''
127 visit = heads
157 visit = heads
128 known = set()
158 known = set()
129 parents = {}
159 parents = {}
130 while visit:
160 while visit:
131 n = visit.pop(0)
161 n = visit.pop(0)
132 if n in known or n in self.map:
162 if n in known or n in self.map:
133 continue
163 continue
134 known.add(n)
164 known.add(n)
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
165 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
136 commit = self.cachecommit(n)
166 commit = self.cachecommit(n)
137 parents[n] = []
167 parents[n] = []
138 for p in commit.parents:
168 for p in commit.parents:
139 parents[n].append(p)
169 parents[n].append(p)
140 visit.append(p)
170 visit.append(p)
141 self.ui.progress(_('scanning'), None)
171 self.ui.progress(_('scanning'), None)
142
172
143 return parents
173 return parents
144
174
145 def mergesplicemap(self, parents, splicemap):
175 def mergesplicemap(self, parents, splicemap):
146 """A splicemap redefines child/parent relationships. Check the
176 """A splicemap redefines child/parent relationships. Check the
147 map contains valid revision identifiers and merge the new
177 map contains valid revision identifiers and merge the new
148 links in the source graph.
178 links in the source graph.
149 """
179 """
150 for c in sorted(splicemap):
180 for c in sorted(splicemap):
151 if c not in parents:
181 if c not in parents:
152 if not self.dest.hascommit(self.map.get(c, c)):
182 if not self.dest.hascommit(self.map.get(c, c)):
153 # Could be in source but not converted during this run
183 # Could be in source but not converted during this run
154 self.ui.warn(_('splice map revision %s is not being '
184 self.ui.warn(_('splice map revision %s is not being '
155 'converted, ignoring\n') % c)
185 'converted, ignoring\n') % c)
156 continue
186 continue
157 pc = []
187 pc = []
158 for p in splicemap[c]:
188 for p in splicemap[c]:
159 # We do not have to wait for nodes already in dest.
189 # We do not have to wait for nodes already in dest.
160 if self.dest.hascommit(self.map.get(p, p)):
190 if self.dest.hascommit(self.map.get(p, p)):
161 continue
191 continue
162 # Parent is not in dest and not being converted, not good
192 # Parent is not in dest and not being converted, not good
163 if p not in parents:
193 if p not in parents:
164 raise util.Abort(_('unknown splice map parent: %s') % p)
194 raise util.Abort(_('unknown splice map parent: %s') % p)
165 pc.append(p)
195 pc.append(p)
166 parents[c] = pc
196 parents[c] = pc
167
197
168 def toposort(self, parents, sortmode):
198 def toposort(self, parents, sortmode):
169 '''Return an ordering such that every uncommitted changeset is
199 '''Return an ordering such that every uncommitted changeset is
170 preceded by all its uncommitted ancestors.'''
200 preceded by all its uncommitted ancestors.'''
171
201
172 def mapchildren(parents):
202 def mapchildren(parents):
173 """Return a (children, roots) tuple where 'children' maps parent
203 """Return a (children, roots) tuple where 'children' maps parent
174 revision identifiers to children ones, and 'roots' is the list of
204 revision identifiers to children ones, and 'roots' is the list of
175 revisions without parents. 'parents' must be a mapping of revision
205 revisions without parents. 'parents' must be a mapping of revision
176 identifier to its parents ones.
206 identifier to its parents ones.
177 """
207 """
178 visit = sorted(parents)
208 visit = sorted(parents)
179 seen = set()
209 seen = set()
180 children = {}
210 children = {}
181 roots = []
211 roots = []
182
212
183 while visit:
213 while visit:
184 n = visit.pop(0)
214 n = visit.pop(0)
185 if n in seen:
215 if n in seen:
186 continue
216 continue
187 seen.add(n)
217 seen.add(n)
188 # Ensure that nodes without parents are present in the
218 # Ensure that nodes without parents are present in the
189 # 'children' mapping.
219 # 'children' mapping.
190 children.setdefault(n, [])
220 children.setdefault(n, [])
191 hasparent = False
221 hasparent = False
192 for p in parents[n]:
222 for p in parents[n]:
193 if p not in self.map:
223 if p not in self.map:
194 visit.append(p)
224 visit.append(p)
195 hasparent = True
225 hasparent = True
196 children.setdefault(p, []).append(n)
226 children.setdefault(p, []).append(n)
197 if not hasparent:
227 if not hasparent:
198 roots.append(n)
228 roots.append(n)
199
229
200 return children, roots
230 return children, roots
201
231
202 # Sort functions are supposed to take a list of revisions which
232 # Sort functions are supposed to take a list of revisions which
203 # can be converted immediately and pick one
233 # can be converted immediately and pick one
204
234
205 def makebranchsorter():
235 def makebranchsorter():
206 """If the previously converted revision has a child in the
236 """If the previously converted revision has a child in the
207 eligible revisions list, pick it. Return the list head
237 eligible revisions list, pick it. Return the list head
208 otherwise. Branch sort attempts to minimize branch
238 otherwise. Branch sort attempts to minimize branch
209 switching, which is harmful for Mercurial backend
239 switching, which is harmful for Mercurial backend
210 compression.
240 compression.
211 """
241 """
212 prev = [None]
242 prev = [None]
213 def picknext(nodes):
243 def picknext(nodes):
214 next = nodes[0]
244 next = nodes[0]
215 for n in nodes:
245 for n in nodes:
216 if prev[0] in parents[n]:
246 if prev[0] in parents[n]:
217 next = n
247 next = n
218 break
248 break
219 prev[0] = next
249 prev[0] = next
220 return next
250 return next
221 return picknext
251 return picknext
222
252
223 def makesourcesorter():
253 def makesourcesorter():
224 """Source specific sort."""
254 """Source specific sort."""
225 keyfn = lambda n: self.commitcache[n].sortkey
255 keyfn = lambda n: self.commitcache[n].sortkey
226 def picknext(nodes):
256 def picknext(nodes):
227 return sorted(nodes, key=keyfn)[0]
257 return sorted(nodes, key=keyfn)[0]
228 return picknext
258 return picknext
229
259
230 def makeclosesorter():
260 def makeclosesorter():
231 """Close order sort."""
261 """Close order sort."""
232 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
262 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
233 self.commitcache[n].sortkey)
263 self.commitcache[n].sortkey)
234 def picknext(nodes):
264 def picknext(nodes):
235 return sorted(nodes, key=keyfn)[0]
265 return sorted(nodes, key=keyfn)[0]
236 return picknext
266 return picknext
237
267
238 def makedatesorter():
268 def makedatesorter():
239 """Sort revisions by date."""
269 """Sort revisions by date."""
240 dates = {}
270 dates = {}
241 def getdate(n):
271 def getdate(n):
242 if n not in dates:
272 if n not in dates:
243 dates[n] = util.parsedate(self.commitcache[n].date)
273 dates[n] = util.parsedate(self.commitcache[n].date)
244 return dates[n]
274 return dates[n]
245
275
246 def picknext(nodes):
276 def picknext(nodes):
247 return min([(getdate(n), n) for n in nodes])[1]
277 return min([(getdate(n), n) for n in nodes])[1]
248
278
249 return picknext
279 return picknext
250
280
251 if sortmode == 'branchsort':
281 if sortmode == 'branchsort':
252 picknext = makebranchsorter()
282 picknext = makebranchsorter()
253 elif sortmode == 'datesort':
283 elif sortmode == 'datesort':
254 picknext = makedatesorter()
284 picknext = makedatesorter()
255 elif sortmode == 'sourcesort':
285 elif sortmode == 'sourcesort':
256 picknext = makesourcesorter()
286 picknext = makesourcesorter()
257 elif sortmode == 'closesort':
287 elif sortmode == 'closesort':
258 picknext = makeclosesorter()
288 picknext = makeclosesorter()
259 else:
289 else:
260 raise util.Abort(_('unknown sort mode: %s') % sortmode)
290 raise util.Abort(_('unknown sort mode: %s') % sortmode)
261
291
262 children, actives = mapchildren(parents)
292 children, actives = mapchildren(parents)
263
293
264 s = []
294 s = []
265 pendings = {}
295 pendings = {}
266 while actives:
296 while actives:
267 n = picknext(actives)
297 n = picknext(actives)
268 actives.remove(n)
298 actives.remove(n)
269 s.append(n)
299 s.append(n)
270
300
271 # Update dependents list
301 # Update dependents list
272 for c in children.get(n, []):
302 for c in children.get(n, []):
273 if c not in pendings:
303 if c not in pendings:
274 pendings[c] = [p for p in parents[c] if p not in self.map]
304 pendings[c] = [p for p in parents[c] if p not in self.map]
275 try:
305 try:
276 pendings[c].remove(n)
306 pendings[c].remove(n)
277 except ValueError:
307 except ValueError:
278 raise util.Abort(_('cycle detected between %s and %s')
308 raise util.Abort(_('cycle detected between %s and %s')
279 % (recode(c), recode(n)))
309 % (recode(c), recode(n)))
280 if not pendings[c]:
310 if not pendings[c]:
281 # Parents are converted, node is eligible
311 # Parents are converted, node is eligible
282 actives.insert(0, c)
312 actives.insert(0, c)
283 pendings[c] = None
313 pendings[c] = None
284
314
285 if len(s) != len(parents):
315 if len(s) != len(parents):
286 raise util.Abort(_("not all revisions were sorted"))
316 raise util.Abort(_("not all revisions were sorted"))
287
317
288 return s
318 return s
289
319
290 def writeauthormap(self):
320 def writeauthormap(self):
291 authorfile = self.authorfile
321 authorfile = self.authorfile
292 if authorfile:
322 if authorfile:
293 self.ui.status(_('writing author map file %s\n') % authorfile)
323 self.ui.status(_('writing author map file %s\n') % authorfile)
294 ofile = open(authorfile, 'w+')
324 ofile = open(authorfile, 'w+')
295 for author in self.authors:
325 for author in self.authors:
296 ofile.write("%s=%s\n" % (author, self.authors[author]))
326 ofile.write("%s=%s\n" % (author, self.authors[author]))
297 ofile.close()
327 ofile.close()
298
328
299 def readauthormap(self, authorfile):
329 def readauthormap(self, authorfile):
300 afile = open(authorfile, 'r')
330 afile = open(authorfile, 'r')
301 for line in afile:
331 for line in afile:
302
332
303 line = line.strip()
333 line = line.strip()
304 if not line or line.startswith('#'):
334 if not line or line.startswith('#'):
305 continue
335 continue
306
336
307 try:
337 try:
308 srcauthor, dstauthor = line.split('=', 1)
338 srcauthor, dstauthor = line.split('=', 1)
309 except ValueError:
339 except ValueError:
310 msg = _('ignoring bad line in author map file %s: %s\n')
340 msg = _('ignoring bad line in author map file %s: %s\n')
311 self.ui.warn(msg % (authorfile, line.rstrip()))
341 self.ui.warn(msg % (authorfile, line.rstrip()))
312 continue
342 continue
313
343
314 srcauthor = srcauthor.strip()
344 srcauthor = srcauthor.strip()
315 dstauthor = dstauthor.strip()
345 dstauthor = dstauthor.strip()
316 if self.authors.get(srcauthor) in (None, dstauthor):
346 if self.authors.get(srcauthor) in (None, dstauthor):
317 msg = _('mapping author %s to %s\n')
347 msg = _('mapping author %s to %s\n')
318 self.ui.debug(msg % (srcauthor, dstauthor))
348 self.ui.debug(msg % (srcauthor, dstauthor))
319 self.authors[srcauthor] = dstauthor
349 self.authors[srcauthor] = dstauthor
320 continue
350 continue
321
351
322 m = _('overriding mapping for author %s, was %s, will be %s\n')
352 m = _('overriding mapping for author %s, was %s, will be %s\n')
323 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
353 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
324
354
325 afile.close()
355 afile.close()
326
356
327 def cachecommit(self, rev):
357 def cachecommit(self, rev):
328 commit = self.source.getcommit(rev)
358 commit = self.source.getcommit(rev)
329 commit.author = self.authors.get(commit.author, commit.author)
359 commit.author = self.authors.get(commit.author, commit.author)
330 commit.branch = self.branchmap.get(commit.branch, commit.branch)
360 commit.branch = self.branchmap.get(commit.branch, commit.branch)
331 self.commitcache[rev] = commit
361 self.commitcache[rev] = commit
332 return commit
362 return commit
333
363
334 def copy(self, rev):
364 def copy(self, rev):
335 commit = self.commitcache[rev]
365 commit = self.commitcache[rev]
336
366
337 changes = self.source.getchanges(rev)
367 changes = self.source.getchanges(rev)
338 if isinstance(changes, basestring):
368 if isinstance(changes, basestring):
339 if changes == SKIPREV:
369 if changes == SKIPREV:
340 dest = SKIPREV
370 dest = SKIPREV
341 else:
371 else:
342 dest = self.map[changes]
372 dest = self.map[changes]
343 self.map[rev] = dest
373 self.map[rev] = dest
344 return
374 return
345 files, copies = changes
375 files, copies = changes
346 pbranches = []
376 pbranches = []
347 if commit.parents:
377 if commit.parents:
348 for prev in commit.parents:
378 for prev in commit.parents:
349 if prev not in self.commitcache:
379 if prev not in self.commitcache:
350 self.cachecommit(prev)
380 self.cachecommit(prev)
351 pbranches.append((self.map[prev],
381 pbranches.append((self.map[prev],
352 self.commitcache[prev].branch))
382 self.commitcache[prev].branch))
353 self.dest.setbranch(commit.branch, pbranches)
383 self.dest.setbranch(commit.branch, pbranches)
354 try:
384 try:
355 parents = self.splicemap[rev]
385 parents = self.splicemap[rev]
356 self.ui.status(_('spliced in %s as parents of %s\n') %
386 self.ui.status(_('spliced in %s as parents of %s\n') %
357 (parents, rev))
387 (parents, rev))
358 parents = [self.map.get(p, p) for p in parents]
388 parents = [self.map.get(p, p) for p in parents]
359 except KeyError:
389 except KeyError:
360 parents = [b[0] for b in pbranches]
390 parents = [b[0] for b in pbranches]
361 source = progresssource(self.ui, self.source, len(files))
391 source = progresssource(self.ui, self.source, len(files))
362 newnode = self.dest.putcommit(files, copies, parents, commit,
392 newnode = self.dest.putcommit(files, copies, parents, commit,
363 source, self.map)
393 source, self.map)
364 source.close()
394 source.close()
365 self.source.converted(rev, newnode)
395 self.source.converted(rev, newnode)
366 self.map[rev] = newnode
396 self.map[rev] = newnode
367
397
368 def convert(self, sortmode):
398 def convert(self, sortmode):
369 try:
399 try:
370 self.source.before()
400 self.source.before()
371 self.dest.before()
401 self.dest.before()
372 self.source.setrevmap(self.map)
402 self.source.setrevmap(self.map)
373 self.ui.status(_("scanning source...\n"))
403 self.ui.status(_("scanning source...\n"))
374 heads = self.source.getheads()
404 heads = self.source.getheads()
375 parents = self.walktree(heads)
405 parents = self.walktree(heads)
376 self.mergesplicemap(parents, self.splicemap)
406 self.mergesplicemap(parents, self.splicemap)
377 self.ui.status(_("sorting...\n"))
407 self.ui.status(_("sorting...\n"))
378 t = self.toposort(parents, sortmode)
408 t = self.toposort(parents, sortmode)
379 num = len(t)
409 num = len(t)
380 c = None
410 c = None
381
411
382 self.ui.status(_("converting...\n"))
412 self.ui.status(_("converting...\n"))
383 for i, c in enumerate(t):
413 for i, c in enumerate(t):
384 num -= 1
414 num -= 1
385 desc = self.commitcache[c].desc
415 desc = self.commitcache[c].desc
386 if "\n" in desc:
416 if "\n" in desc:
387 desc = desc.splitlines()[0]
417 desc = desc.splitlines()[0]
388 # convert log message to local encoding without using
418 # convert log message to local encoding without using
389 # tolocal() because the encoding.encoding convert()
419 # tolocal() because the encoding.encoding convert()
390 # uses is 'utf-8'
420 # uses is 'utf-8'
391 self.ui.status("%d %s\n" % (num, recode(desc)))
421 self.ui.status("%d %s\n" % (num, recode(desc)))
392 self.ui.note(_("source: %s\n") % recode(c))
422 self.ui.note(_("source: %s\n") % recode(c))
393 self.ui.progress(_('converting'), i, unit=_('revisions'),
423 self.ui.progress(_('converting'), i, unit=_('revisions'),
394 total=len(t))
424 total=len(t))
395 self.copy(c)
425 self.copy(c)
396 self.ui.progress(_('converting'), None)
426 self.ui.progress(_('converting'), None)
397
427
398 tags = self.source.gettags()
428 tags = self.source.gettags()
399 ctags = {}
429 ctags = {}
400 for k in tags:
430 for k in tags:
401 v = tags[k]
431 v = tags[k]
402 if self.map.get(v, SKIPREV) != SKIPREV:
432 if self.map.get(v, SKIPREV) != SKIPREV:
403 ctags[k] = self.map[v]
433 ctags[k] = self.map[v]
404
434
405 if c and ctags:
435 if c and ctags:
406 nrev, tagsparent = self.dest.puttags(ctags)
436 nrev, tagsparent = self.dest.puttags(ctags)
407 if nrev and tagsparent:
437 if nrev and tagsparent:
408 # write another hash correspondence to override the previous
438 # write another hash correspondence to override the previous
409 # one so we don't end up with extra tag heads
439 # one so we don't end up with extra tag heads
410 tagsparents = [e for e in self.map.iteritems()
440 tagsparents = [e for e in self.map.iteritems()
411 if e[1] == tagsparent]
441 if e[1] == tagsparent]
412 if tagsparents:
442 if tagsparents:
413 self.map[tagsparents[0][0]] = nrev
443 self.map[tagsparents[0][0]] = nrev
414
444
415 bookmarks = self.source.getbookmarks()
445 bookmarks = self.source.getbookmarks()
416 cbookmarks = {}
446 cbookmarks = {}
417 for k in bookmarks:
447 for k in bookmarks:
418 v = bookmarks[k]
448 v = bookmarks[k]
419 if self.map.get(v, SKIPREV) != SKIPREV:
449 if self.map.get(v, SKIPREV) != SKIPREV:
420 cbookmarks[k] = self.map[v]
450 cbookmarks[k] = self.map[v]
421
451
422 if c and cbookmarks:
452 if c and cbookmarks:
423 self.dest.putbookmarks(cbookmarks)
453 self.dest.putbookmarks(cbookmarks)
424
454
425 self.writeauthormap()
455 self.writeauthormap()
426 finally:
456 finally:
427 self.cleanup()
457 self.cleanup()
428
458
429 def cleanup(self):
459 def cleanup(self):
430 try:
460 try:
431 self.dest.after()
461 self.dest.after()
432 finally:
462 finally:
433 self.source.after()
463 self.source.after()
434 self.map.close()
464 self.map.close()
435
465
436 def convert(ui, src, dest=None, revmapfile=None, **opts):
466 def convert(ui, src, dest=None, revmapfile=None, **opts):
437 global orig_encoding
467 global orig_encoding
438 orig_encoding = encoding.encoding
468 orig_encoding = encoding.encoding
439 encoding.encoding = 'UTF-8'
469 encoding.encoding = 'UTF-8'
440
470
441 # support --authors as an alias for --authormap
471 # support --authors as an alias for --authormap
442 if not opts.get('authormap'):
472 if not opts.get('authormap'):
443 opts['authormap'] = opts.get('authors')
473 opts['authormap'] = opts.get('authors')
444
474
445 if not dest:
475 if not dest:
446 dest = hg.defaultdest(src) + "-hg"
476 dest = hg.defaultdest(src) + "-hg"
447 ui.status(_("assuming destination %s\n") % dest)
477 ui.status(_("assuming destination %s\n") % dest)
448
478
449 destc = convertsink(ui, dest, opts.get('dest_type'))
479 destc = convertsink(ui, dest, opts.get('dest_type'))
450
480
451 try:
481 try:
452 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
482 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
453 opts.get('rev'))
483 opts.get('rev'))
454 except Exception:
484 except Exception:
455 for path in destc.created:
485 for path in destc.created:
456 shutil.rmtree(path, True)
486 shutil.rmtree(path, True)
457 raise
487 raise
458
488
459 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
489 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
460 sortmode = [m for m in sortmodes if opts.get(m)]
490 sortmode = [m for m in sortmodes if opts.get(m)]
461 if len(sortmode) > 1:
491 if len(sortmode) > 1:
462 raise util.Abort(_('more than one sort mode specified'))
492 raise util.Abort(_('more than one sort mode specified'))
463 sortmode = sortmode and sortmode[0] or defaultsort
493 sortmode = sortmode and sortmode[0] or defaultsort
464 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
494 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
465 raise util.Abort(_('--sourcesort is not supported by this data source'))
495 raise util.Abort(_('--sourcesort is not supported by this data source'))
466 if sortmode == 'closesort' and not srcc.hasnativeclose():
496 if sortmode == 'closesort' and not srcc.hasnativeclose():
467 raise util.Abort(_('--closesort is not supported by this data source'))
497 raise util.Abort(_('--closesort is not supported by this data source'))
468
498
469 fmap = opts.get('filemap')
499 fmap = opts.get('filemap')
470 if fmap:
500 if fmap:
471 srcc = filemap.filemap_source(ui, srcc, fmap)
501 srcc = filemap.filemap_source(ui, srcc, fmap)
472 destc.setfilemapmode(True)
502 destc.setfilemapmode(True)
473
503
474 if not revmapfile:
504 if not revmapfile:
475 try:
505 try:
476 revmapfile = destc.revmapfile()
506 revmapfile = destc.revmapfile()
477 except Exception:
507 except Exception:
478 revmapfile = os.path.join(destc, "map")
508 revmapfile = os.path.join(destc, "map")
479
509
480 c = converter(ui, srcc, destc, revmapfile, opts)
510 c = converter(ui, srcc, destc, revmapfile, opts)
481 c.convert(sortmode)
511 c.convert(sortmode)
482
512
General Comments 0
You need to be logged in to leave comments. Login now