##// END OF EJS Templates
convert: turn splicemap into a simple dictionary...
Patrick Mezard -
r16105:ebaa0aa7 stable
parent child Browse files
Show More
@@ -1,409 +1,431 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno
8 import base64, errno
9 import os
9 import os
10 import cPickle as pickle
10 import cPickle as pickle
11 from mercurial import util
11 from mercurial import util
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 propertycache = util.propertycache
14 propertycache = util.propertycache
15
15
16 def encodeargs(args):
16 def encodeargs(args):
17 def encodearg(s):
17 def encodearg(s):
18 lines = base64.encodestring(s)
18 lines = base64.encodestring(s)
19 lines = [l.splitlines()[0] for l in lines]
19 lines = [l.splitlines()[0] for l in lines]
20 return ''.join(lines)
20 return ''.join(lines)
21
21
22 s = pickle.dumps(args)
22 s = pickle.dumps(args)
23 return encodearg(s)
23 return encodearg(s)
24
24
25 def decodeargs(s):
25 def decodeargs(s):
26 s = base64.decodestring(s)
26 s = base64.decodestring(s)
27 return pickle.loads(s)
27 return pickle.loads(s)
28
28
29 class MissingTool(Exception):
29 class MissingTool(Exception):
30 pass
30 pass
31
31
32 def checktool(exe, name=None, abort=True):
32 def checktool(exe, name=None, abort=True):
33 name = name or exe
33 name = name or exe
34 if not util.findexe(exe):
34 if not util.findexe(exe):
35 exc = abort and util.Abort or MissingTool
35 exc = abort and util.Abort or MissingTool
36 raise exc(_('cannot find required "%s" tool') % name)
36 raise exc(_('cannot find required "%s" tool') % name)
37
37
38 class NoRepo(Exception):
38 class NoRepo(Exception):
39 pass
39 pass
40
40
41 SKIPREV = 'SKIP'
41 SKIPREV = 'SKIP'
42
42
43 class commit(object):
43 class commit(object):
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 def __init__(self, author, date, desc, parents, branch=None, rev=None,
45 extra={}, sortkey=None):
45 extra={}, sortkey=None):
46 self.author = author or 'unknown'
46 self.author = author or 'unknown'
47 self.date = date or '0 0'
47 self.date = date or '0 0'
48 self.desc = desc
48 self.desc = desc
49 self.parents = parents
49 self.parents = parents
50 self.branch = branch
50 self.branch = branch
51 self.rev = rev
51 self.rev = rev
52 self.extra = extra
52 self.extra = extra
53 self.sortkey = sortkey
53 self.sortkey = sortkey
54
54
55 class converter_source(object):
55 class converter_source(object):
56 """Conversion source interface"""
56 """Conversion source interface"""
57
57
58 def __init__(self, ui, path=None, rev=None):
58 def __init__(self, ui, path=None, rev=None):
59 """Initialize conversion source (or raise NoRepo("message")
59 """Initialize conversion source (or raise NoRepo("message")
60 exception if path is not a valid repository)"""
60 exception if path is not a valid repository)"""
61 self.ui = ui
61 self.ui = ui
62 self.path = path
62 self.path = path
63 self.rev = rev
63 self.rev = rev
64
64
65 self.encoding = 'utf-8'
65 self.encoding = 'utf-8'
66
66
67 def before(self):
67 def before(self):
68 pass
68 pass
69
69
70 def after(self):
70 def after(self):
71 pass
71 pass
72
72
73 def setrevmap(self, revmap):
73 def setrevmap(self, revmap):
74 """set the map of already-converted revisions"""
74 """set the map of already-converted revisions"""
75 pass
75 pass
76
76
77 def getheads(self):
77 def getheads(self):
78 """Return a list of this repository's heads"""
78 """Return a list of this repository's heads"""
79 raise NotImplementedError()
79 raise NotImplementedError()
80
80
81 def getfile(self, name, rev):
81 def getfile(self, name, rev):
82 """Return a pair (data, mode) where data is the file content
82 """Return a pair (data, mode) where data is the file content
83 as a string and mode one of '', 'x' or 'l'. rev is the
83 as a string and mode one of '', 'x' or 'l'. rev is the
84 identifier returned by a previous call to getchanges(). Raise
84 identifier returned by a previous call to getchanges(). Raise
85 IOError to indicate that name was deleted in rev.
85 IOError to indicate that name was deleted in rev.
86 """
86 """
87 raise NotImplementedError()
87 raise NotImplementedError()
88
88
89 def getchanges(self, version):
89 def getchanges(self, version):
90 """Returns a tuple of (files, copies).
90 """Returns a tuple of (files, copies).
91
91
92 files is a sorted list of (filename, id) tuples for all files
92 files is a sorted list of (filename, id) tuples for all files
93 changed between version and its first parent returned by
93 changed between version and its first parent returned by
94 getcommit(). id is the source revision id of the file.
94 getcommit(). id is the source revision id of the file.
95
95
96 copies is a dictionary of dest: source
96 copies is a dictionary of dest: source
97 """
97 """
98 raise NotImplementedError()
98 raise NotImplementedError()
99
99
100 def getcommit(self, version):
100 def getcommit(self, version):
101 """Return the commit object for version"""
101 """Return the commit object for version"""
102 raise NotImplementedError()
102 raise NotImplementedError()
103
103
104 def gettags(self):
104 def gettags(self):
105 """Return the tags as a dictionary of name: revision
105 """Return the tags as a dictionary of name: revision
106
106
107 Tag names must be UTF-8 strings.
107 Tag names must be UTF-8 strings.
108 """
108 """
109 raise NotImplementedError()
109 raise NotImplementedError()
110
110
111 def recode(self, s, encoding=None):
111 def recode(self, s, encoding=None):
112 if not encoding:
112 if not encoding:
113 encoding = self.encoding or 'utf-8'
113 encoding = self.encoding or 'utf-8'
114
114
115 if isinstance(s, unicode):
115 if isinstance(s, unicode):
116 return s.encode("utf-8")
116 return s.encode("utf-8")
117 try:
117 try:
118 return s.decode(encoding).encode("utf-8")
118 return s.decode(encoding).encode("utf-8")
119 except:
119 except:
120 try:
120 try:
121 return s.decode("latin-1").encode("utf-8")
121 return s.decode("latin-1").encode("utf-8")
122 except:
122 except:
123 return s.decode(encoding, "replace").encode("utf-8")
123 return s.decode(encoding, "replace").encode("utf-8")
124
124
125 def getchangedfiles(self, rev, i):
125 def getchangedfiles(self, rev, i):
126 """Return the files changed by rev compared to parent[i].
126 """Return the files changed by rev compared to parent[i].
127
127
128 i is an index selecting one of the parents of rev. The return
128 i is an index selecting one of the parents of rev. The return
129 value should be the list of files that are different in rev and
129 value should be the list of files that are different in rev and
130 this parent.
130 this parent.
131
131
132 If rev has no parents, i is None.
132 If rev has no parents, i is None.
133
133
134 This function is only needed to support --filemap
134 This function is only needed to support --filemap
135 """
135 """
136 raise NotImplementedError()
136 raise NotImplementedError()
137
137
138 def converted(self, rev, sinkrev):
138 def converted(self, rev, sinkrev):
139 '''Notify the source that a revision has been converted.'''
139 '''Notify the source that a revision has been converted.'''
140 pass
140 pass
141
141
142 def hasnativeorder(self):
142 def hasnativeorder(self):
143 """Return true if this source has a meaningful, native revision
143 """Return true if this source has a meaningful, native revision
144 order. For instance, Mercurial revisions are store sequentially
144 order. For instance, Mercurial revisions are store sequentially
145 while there is no such global ordering with Darcs.
145 while there is no such global ordering with Darcs.
146 """
146 """
147 return False
147 return False
148
148
149 def lookuprev(self, rev):
149 def lookuprev(self, rev):
150 """If rev is a meaningful revision reference in source, return
150 """If rev is a meaningful revision reference in source, return
151 the referenced identifier in the same format used by getcommit().
151 the referenced identifier in the same format used by getcommit().
152 return None otherwise.
152 return None otherwise.
153 """
153 """
154 return None
154 return None
155
155
156 def getbookmarks(self):
156 def getbookmarks(self):
157 """Return the bookmarks as a dictionary of name: revision
157 """Return the bookmarks as a dictionary of name: revision
158
158
159 Bookmark names are to be UTF-8 strings.
159 Bookmark names are to be UTF-8 strings.
160 """
160 """
161 return {}
161 return {}
162
162
163 class converter_sink(object):
163 class converter_sink(object):
164 """Conversion sink (target) interface"""
164 """Conversion sink (target) interface"""
165
165
166 def __init__(self, ui, path):
166 def __init__(self, ui, path):
167 """Initialize conversion sink (or raise NoRepo("message")
167 """Initialize conversion sink (or raise NoRepo("message")
168 exception if path is not a valid repository)
168 exception if path is not a valid repository)
169
169
170 created is a list of paths to remove if a fatal error occurs
170 created is a list of paths to remove if a fatal error occurs
171 later"""
171 later"""
172 self.ui = ui
172 self.ui = ui
173 self.path = path
173 self.path = path
174 self.created = []
174 self.created = []
175
175
176 def getheads(self):
176 def getheads(self):
177 """Return a list of this repository's heads"""
177 """Return a list of this repository's heads"""
178 raise NotImplementedError()
178 raise NotImplementedError()
179
179
180 def revmapfile(self):
180 def revmapfile(self):
181 """Path to a file that will contain lines
181 """Path to a file that will contain lines
182 source_rev_id sink_rev_id
182 source_rev_id sink_rev_id
183 mapping equivalent revision identifiers for each system."""
183 mapping equivalent revision identifiers for each system."""
184 raise NotImplementedError()
184 raise NotImplementedError()
185
185
186 def authorfile(self):
186 def authorfile(self):
187 """Path to a file that will contain lines
187 """Path to a file that will contain lines
188 srcauthor=dstauthor
188 srcauthor=dstauthor
189 mapping equivalent authors identifiers for each system."""
189 mapping equivalent authors identifiers for each system."""
190 return None
190 return None
191
191
192 def putcommit(self, files, copies, parents, commit, source, revmap):
192 def putcommit(self, files, copies, parents, commit, source, revmap):
193 """Create a revision with all changed files listed in 'files'
193 """Create a revision with all changed files listed in 'files'
194 and having listed parents. 'commit' is a commit object
194 and having listed parents. 'commit' is a commit object
195 containing at a minimum the author, date, and message for this
195 containing at a minimum the author, date, and message for this
196 changeset. 'files' is a list of (path, version) tuples,
196 changeset. 'files' is a list of (path, version) tuples,
197 'copies' is a dictionary mapping destinations to sources,
197 'copies' is a dictionary mapping destinations to sources,
198 'source' is the source repository, and 'revmap' is a mapfile
198 'source' is the source repository, and 'revmap' is a mapfile
199 of source revisions to converted revisions. Only getfile() and
199 of source revisions to converted revisions. Only getfile() and
200 lookuprev() should be called on 'source'.
200 lookuprev() should be called on 'source'.
201
201
202 Note that the sink repository is not told to update itself to
202 Note that the sink repository is not told to update itself to
203 a particular revision (or even what that revision would be)
203 a particular revision (or even what that revision would be)
204 before it receives the file data.
204 before it receives the file data.
205 """
205 """
206 raise NotImplementedError()
206 raise NotImplementedError()
207
207
208 def puttags(self, tags):
208 def puttags(self, tags):
209 """Put tags into sink.
209 """Put tags into sink.
210
210
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
211 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
212 Return a pair (tag_revision, tag_parent_revision), or (None, None)
213 if nothing was changed.
213 if nothing was changed.
214 """
214 """
215 raise NotImplementedError()
215 raise NotImplementedError()
216
216
217 def setbranch(self, branch, pbranches):
217 def setbranch(self, branch, pbranches):
218 """Set the current branch name. Called before the first putcommit
218 """Set the current branch name. Called before the first putcommit
219 on the branch.
219 on the branch.
220 branch: branch name for subsequent commits
220 branch: branch name for subsequent commits
221 pbranches: (converted parent revision, parent branch) tuples"""
221 pbranches: (converted parent revision, parent branch) tuples"""
222 pass
222 pass
223
223
224 def setfilemapmode(self, active):
224 def setfilemapmode(self, active):
225 """Tell the destination that we're using a filemap
225 """Tell the destination that we're using a filemap
226
226
227 Some converter_sources (svn in particular) can claim that a file
227 Some converter_sources (svn in particular) can claim that a file
228 was changed in a revision, even if there was no change. This method
228 was changed in a revision, even if there was no change. This method
229 tells the destination that we're using a filemap and that it should
229 tells the destination that we're using a filemap and that it should
230 filter empty revisions.
230 filter empty revisions.
231 """
231 """
232 pass
232 pass
233
233
234 def before(self):
234 def before(self):
235 pass
235 pass
236
236
237 def after(self):
237 def after(self):
238 pass
238 pass
239
239
240 def putbookmarks(self, bookmarks):
240 def putbookmarks(self, bookmarks):
241 """Put bookmarks into sink.
241 """Put bookmarks into sink.
242
242
243 bookmarks: {bookmarkname: sink_rev_id, ...}
243 bookmarks: {bookmarkname: sink_rev_id, ...}
244 where bookmarkname is an UTF-8 string.
244 where bookmarkname is an UTF-8 string.
245 """
245 """
246 pass
246 pass
247
247
248 class commandline(object):
248 class commandline(object):
249 def __init__(self, ui, command):
249 def __init__(self, ui, command):
250 self.ui = ui
250 self.ui = ui
251 self.command = command
251 self.command = command
252
252
253 def prerun(self):
253 def prerun(self):
254 pass
254 pass
255
255
256 def postrun(self):
256 def postrun(self):
257 pass
257 pass
258
258
259 def _cmdline(self, cmd, closestdin, *args, **kwargs):
259 def _cmdline(self, cmd, closestdin, *args, **kwargs):
260 cmdline = [self.command, cmd] + list(args)
260 cmdline = [self.command, cmd] + list(args)
261 for k, v in kwargs.iteritems():
261 for k, v in kwargs.iteritems():
262 if len(k) == 1:
262 if len(k) == 1:
263 cmdline.append('-' + k)
263 cmdline.append('-' + k)
264 else:
264 else:
265 cmdline.append('--' + k.replace('_', '-'))
265 cmdline.append('--' + k.replace('_', '-'))
266 try:
266 try:
267 if len(k) == 1:
267 if len(k) == 1:
268 cmdline.append('' + v)
268 cmdline.append('' + v)
269 else:
269 else:
270 cmdline[-1] += '=' + v
270 cmdline[-1] += '=' + v
271 except TypeError:
271 except TypeError:
272 pass
272 pass
273 cmdline = [util.shellquote(arg) for arg in cmdline]
273 cmdline = [util.shellquote(arg) for arg in cmdline]
274 if not self.ui.debugflag:
274 if not self.ui.debugflag:
275 cmdline += ['2>', util.nulldev]
275 cmdline += ['2>', util.nulldev]
276 if closestdin:
276 if closestdin:
277 cmdline += ['<', util.nulldev]
277 cmdline += ['<', util.nulldev]
278 cmdline = ' '.join(cmdline)
278 cmdline = ' '.join(cmdline)
279 return cmdline
279 return cmdline
280
280
281 def _run(self, cmd, *args, **kwargs):
281 def _run(self, cmd, *args, **kwargs):
282 return self._dorun(util.popen, cmd, True, *args, **kwargs)
282 return self._dorun(util.popen, cmd, True, *args, **kwargs)
283
283
284 def _run2(self, cmd, *args, **kwargs):
284 def _run2(self, cmd, *args, **kwargs):
285 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
285 return self._dorun(util.popen2, cmd, False, *args, **kwargs)
286
286
287 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
287 def _dorun(self, openfunc, cmd, closestdin, *args, **kwargs):
288 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
288 cmdline = self._cmdline(cmd, closestdin, *args, **kwargs)
289 self.ui.debug('running: %s\n' % (cmdline,))
289 self.ui.debug('running: %s\n' % (cmdline,))
290 self.prerun()
290 self.prerun()
291 try:
291 try:
292 return openfunc(cmdline)
292 return openfunc(cmdline)
293 finally:
293 finally:
294 self.postrun()
294 self.postrun()
295
295
296 def run(self, cmd, *args, **kwargs):
296 def run(self, cmd, *args, **kwargs):
297 fp = self._run(cmd, *args, **kwargs)
297 fp = self._run(cmd, *args, **kwargs)
298 output = fp.read()
298 output = fp.read()
299 self.ui.debug(output)
299 self.ui.debug(output)
300 return output, fp.close()
300 return output, fp.close()
301
301
302 def runlines(self, cmd, *args, **kwargs):
302 def runlines(self, cmd, *args, **kwargs):
303 fp = self._run(cmd, *args, **kwargs)
303 fp = self._run(cmd, *args, **kwargs)
304 output = fp.readlines()
304 output = fp.readlines()
305 self.ui.debug(''.join(output))
305 self.ui.debug(''.join(output))
306 return output, fp.close()
306 return output, fp.close()
307
307
308 def checkexit(self, status, output=''):
308 def checkexit(self, status, output=''):
309 if status:
309 if status:
310 if output:
310 if output:
311 self.ui.warn(_('%s error:\n') % self.command)
311 self.ui.warn(_('%s error:\n') % self.command)
312 self.ui.warn(output)
312 self.ui.warn(output)
313 msg = util.explainexit(status)[0]
313 msg = util.explainexit(status)[0]
314 raise util.Abort('%s %s' % (self.command, msg))
314 raise util.Abort('%s %s' % (self.command, msg))
315
315
316 def run0(self, cmd, *args, **kwargs):
316 def run0(self, cmd, *args, **kwargs):
317 output, status = self.run(cmd, *args, **kwargs)
317 output, status = self.run(cmd, *args, **kwargs)
318 self.checkexit(status, output)
318 self.checkexit(status, output)
319 return output
319 return output
320
320
321 def runlines0(self, cmd, *args, **kwargs):
321 def runlines0(self, cmd, *args, **kwargs):
322 output, status = self.runlines(cmd, *args, **kwargs)
322 output, status = self.runlines(cmd, *args, **kwargs)
323 self.checkexit(status, ''.join(output))
323 self.checkexit(status, ''.join(output))
324 return output
324 return output
325
325
326 @propertycache
326 @propertycache
327 def argmax(self):
327 def argmax(self):
328 # POSIX requires at least 4096 bytes for ARG_MAX
328 # POSIX requires at least 4096 bytes for ARG_MAX
329 argmax = 4096
329 argmax = 4096
330 try:
330 try:
331 argmax = os.sysconf("SC_ARG_MAX")
331 argmax = os.sysconf("SC_ARG_MAX")
332 except:
332 except:
333 pass
333 pass
334
334
335 # Windows shells impose their own limits on command line length,
335 # Windows shells impose their own limits on command line length,
336 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
336 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
337 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
337 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
338 # details about cmd.exe limitations.
338 # details about cmd.exe limitations.
339
339
340 # Since ARG_MAX is for command line _and_ environment, lower our limit
340 # Since ARG_MAX is for command line _and_ environment, lower our limit
341 # (and make happy Windows shells while doing this).
341 # (and make happy Windows shells while doing this).
342 return argmax // 2 - 1
342 return argmax // 2 - 1
343
343
344 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
344 def limit_arglist(self, arglist, cmd, closestdin, *args, **kwargs):
345 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
345 cmdlen = len(self._cmdline(cmd, closestdin, *args, **kwargs))
346 limit = self.argmax - cmdlen
346 limit = self.argmax - cmdlen
347 bytes = 0
347 bytes = 0
348 fl = []
348 fl = []
349 for fn in arglist:
349 for fn in arglist:
350 b = len(fn) + 3
350 b = len(fn) + 3
351 if bytes + b < limit or len(fl) == 0:
351 if bytes + b < limit or len(fl) == 0:
352 fl.append(fn)
352 fl.append(fn)
353 bytes += b
353 bytes += b
354 else:
354 else:
355 yield fl
355 yield fl
356 fl = [fn]
356 fl = [fn]
357 bytes = b
357 bytes = b
358 if fl:
358 if fl:
359 yield fl
359 yield fl
360
360
361 def xargs(self, arglist, cmd, *args, **kwargs):
361 def xargs(self, arglist, cmd, *args, **kwargs):
362 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
362 for l in self.limit_arglist(arglist, cmd, True, *args, **kwargs):
363 self.run0(cmd, *(list(args) + l), **kwargs)
363 self.run0(cmd, *(list(args) + l), **kwargs)
364
364
365 class mapfile(dict):
365 class mapfile(dict):
366 def __init__(self, ui, path):
366 def __init__(self, ui, path):
367 super(mapfile, self).__init__()
367 super(mapfile, self).__init__()
368 self.ui = ui
368 self.ui = ui
369 self.path = path
369 self.path = path
370 self.fp = None
370 self.fp = None
371 self.order = []
371 self.order = []
372 self._read()
372 self._read()
373
373
374 def _read(self):
374 def _read(self):
375 if not self.path:
375 if not self.path:
376 return
376 return
377 try:
377 try:
378 fp = open(self.path, 'r')
378 fp = open(self.path, 'r')
379 except IOError, err:
379 except IOError, err:
380 if err.errno != errno.ENOENT:
380 if err.errno != errno.ENOENT:
381 raise
381 raise
382 return
382 return
383 for i, line in enumerate(fp):
383 for i, line in enumerate(fp):
384 try:
384 try:
385 key, value = line.splitlines()[0].rstrip().rsplit(' ', 1)
385 key, value = line.splitlines()[0].rstrip().rsplit(' ', 1)
386 except ValueError:
386 except ValueError:
387 raise util.Abort(
387 raise util.Abort(
388 _('syntax error in %s(%d): key/value pair expected')
388 _('syntax error in %s(%d): key/value pair expected')
389 % (self.path, i + 1))
389 % (self.path, i + 1))
390 if key not in self:
390 if key not in self:
391 self.order.append(key)
391 self.order.append(key)
392 super(mapfile, self).__setitem__(key, value)
392 super(mapfile, self).__setitem__(key, value)
393 fp.close()
393 fp.close()
394
394
395 def __setitem__(self, key, value):
395 def __setitem__(self, key, value):
396 if self.fp is None:
396 if self.fp is None:
397 try:
397 try:
398 self.fp = open(self.path, 'a')
398 self.fp = open(self.path, 'a')
399 except IOError, err:
399 except IOError, err:
400 raise util.Abort(_('could not open map file %r: %s') %
400 raise util.Abort(_('could not open map file %r: %s') %
401 (self.path, err.strerror))
401 (self.path, err.strerror))
402 self.fp.write('%s %s\n' % (key, value))
402 self.fp.write('%s %s\n' % (key, value))
403 self.fp.flush()
403 self.fp.flush()
404 super(mapfile, self).__setitem__(key, value)
404 super(mapfile, self).__setitem__(key, value)
405
405
406 def close(self):
406 def close(self):
407 if self.fp:
407 if self.fp:
408 self.fp.close()
408 self.fp.close()
409 self.fp = None
409 self.fp = None
410
411 def parsesplicemap(path):
412 """Parse a splicemap, return a child/parents dictionary."""
413 m = {}
414 try:
415 fp = open(path, 'r')
416 for i, line in enumerate(fp):
417 try:
418 child, parents = line.splitlines()[0].rstrip().rsplit(' ', 1)
419 parents = parents.replace(',', ' ').split()
420 except ValueError:
421 raise util.Abort(_('syntax error in %s(%d): child parent1'
422 '[,parent2] expected') % (path, i + 1))
423 pp = []
424 for p in parents:
425 if p not in pp:
426 pp.append(p)
427 m[child] = pp
428 except IOError, e:
429 if e.errno != errno.ENOENT:
430 raise
431 return m
@@ -1,446 +1,446 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap
18 import filemap, common
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = mapfile(ui, opts.get('splicemap'))
121 self.splicemap = common.parsesplicemap(opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124 def walktree(self, heads):
124 def walktree(self, heads):
125 '''Return a mapping that identifies the uncommitted parents of every
125 '''Return a mapping that identifies the uncommitted parents of every
126 uncommitted changeset.'''
126 uncommitted changeset.'''
127 visit = heads
127 visit = heads
128 known = set()
128 known = set()
129 parents = {}
129 parents = {}
130 while visit:
130 while visit:
131 n = visit.pop(0)
131 n = visit.pop(0)
132 if n in known or n in self.map:
132 if n in known or n in self.map:
133 continue
133 continue
134 known.add(n)
134 known.add(n)
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
135 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
136 commit = self.cachecommit(n)
136 commit = self.cachecommit(n)
137 parents[n] = []
137 parents[n] = []
138 for p in commit.parents:
138 for p in commit.parents:
139 parents[n].append(p)
139 parents[n].append(p)
140 visit.append(p)
140 visit.append(p)
141 self.ui.progress(_('scanning'), None)
141 self.ui.progress(_('scanning'), None)
142
142
143 return parents
143 return parents
144
144
145 def toposort(self, parents, sortmode):
145 def toposort(self, parents, sortmode):
146 '''Return an ordering such that every uncommitted changeset is
146 '''Return an ordering such that every uncommitted changeset is
147 preceeded by all its uncommitted ancestors.'''
147 preceeded by all its uncommitted ancestors.'''
148
148
149 def mapchildren(parents):
149 def mapchildren(parents):
150 """Return a (children, roots) tuple where 'children' maps parent
150 """Return a (children, roots) tuple where 'children' maps parent
151 revision identifiers to children ones, and 'roots' is the list of
151 revision identifiers to children ones, and 'roots' is the list of
152 revisions without parents. 'parents' must be a mapping of revision
152 revisions without parents. 'parents' must be a mapping of revision
153 identifier to its parents ones.
153 identifier to its parents ones.
154 """
154 """
155 visit = parents.keys()
155 visit = parents.keys()
156 seen = set()
156 seen = set()
157 children = {}
157 children = {}
158 roots = []
158 roots = []
159
159
160 while visit:
160 while visit:
161 n = visit.pop(0)
161 n = visit.pop(0)
162 if n in seen:
162 if n in seen:
163 continue
163 continue
164 seen.add(n)
164 seen.add(n)
165 # Ensure that nodes without parents are present in the
165 # Ensure that nodes without parents are present in the
166 # 'children' mapping.
166 # 'children' mapping.
167 children.setdefault(n, [])
167 children.setdefault(n, [])
168 hasparent = False
168 hasparent = False
169 for p in parents[n]:
169 for p in parents[n]:
170 if not p in self.map:
170 if not p in self.map:
171 visit.append(p)
171 visit.append(p)
172 hasparent = True
172 hasparent = True
173 children.setdefault(p, []).append(n)
173 children.setdefault(p, []).append(n)
174 if not hasparent:
174 if not hasparent:
175 roots.append(n)
175 roots.append(n)
176
176
177 return children, roots
177 return children, roots
178
178
179 # Sort functions are supposed to take a list of revisions which
179 # Sort functions are supposed to take a list of revisions which
180 # can be converted immediately and pick one
180 # can be converted immediately and pick one
181
181
182 def makebranchsorter():
182 def makebranchsorter():
183 """If the previously converted revision has a child in the
183 """If the previously converted revision has a child in the
184 eligible revisions list, pick it. Return the list head
184 eligible revisions list, pick it. Return the list head
185 otherwise. Branch sort attempts to minimize branch
185 otherwise. Branch sort attempts to minimize branch
186 switching, which is harmful for Mercurial backend
186 switching, which is harmful for Mercurial backend
187 compression.
187 compression.
188 """
188 """
189 prev = [None]
189 prev = [None]
190 def picknext(nodes):
190 def picknext(nodes):
191 next = nodes[0]
191 next = nodes[0]
192 for n in nodes:
192 for n in nodes:
193 if prev[0] in parents[n]:
193 if prev[0] in parents[n]:
194 next = n
194 next = n
195 break
195 break
196 prev[0] = next
196 prev[0] = next
197 return next
197 return next
198 return picknext
198 return picknext
199
199
200 def makesourcesorter():
200 def makesourcesorter():
201 """Source specific sort."""
201 """Source specific sort."""
202 keyfn = lambda n: self.commitcache[n].sortkey
202 keyfn = lambda n: self.commitcache[n].sortkey
203 def picknext(nodes):
203 def picknext(nodes):
204 return sorted(nodes, key=keyfn)[0]
204 return sorted(nodes, key=keyfn)[0]
205 return picknext
205 return picknext
206
206
207 def makedatesorter():
207 def makedatesorter():
208 """Sort revisions by date."""
208 """Sort revisions by date."""
209 dates = {}
209 dates = {}
210 def getdate(n):
210 def getdate(n):
211 if n not in dates:
211 if n not in dates:
212 dates[n] = util.parsedate(self.commitcache[n].date)
212 dates[n] = util.parsedate(self.commitcache[n].date)
213 return dates[n]
213 return dates[n]
214
214
215 def picknext(nodes):
215 def picknext(nodes):
216 return min([(getdate(n), n) for n in nodes])[1]
216 return min([(getdate(n), n) for n in nodes])[1]
217
217
218 return picknext
218 return picknext
219
219
220 if sortmode == 'branchsort':
220 if sortmode == 'branchsort':
221 picknext = makebranchsorter()
221 picknext = makebranchsorter()
222 elif sortmode == 'datesort':
222 elif sortmode == 'datesort':
223 picknext = makedatesorter()
223 picknext = makedatesorter()
224 elif sortmode == 'sourcesort':
224 elif sortmode == 'sourcesort':
225 picknext = makesourcesorter()
225 picknext = makesourcesorter()
226 else:
226 else:
227 raise util.Abort(_('unknown sort mode: %s') % sortmode)
227 raise util.Abort(_('unknown sort mode: %s') % sortmode)
228
228
229 children, actives = mapchildren(parents)
229 children, actives = mapchildren(parents)
230
230
231 s = []
231 s = []
232 pendings = {}
232 pendings = {}
233 while actives:
233 while actives:
234 n = picknext(actives)
234 n = picknext(actives)
235 actives.remove(n)
235 actives.remove(n)
236 s.append(n)
236 s.append(n)
237
237
238 # Update dependents list
238 # Update dependents list
239 for c in children.get(n, []):
239 for c in children.get(n, []):
240 if c not in pendings:
240 if c not in pendings:
241 pendings[c] = [p for p in parents[c] if p not in self.map]
241 pendings[c] = [p for p in parents[c] if p not in self.map]
242 try:
242 try:
243 pendings[c].remove(n)
243 pendings[c].remove(n)
244 except ValueError:
244 except ValueError:
245 raise util.Abort(_('cycle detected between %s and %s')
245 raise util.Abort(_('cycle detected between %s and %s')
246 % (recode(c), recode(n)))
246 % (recode(c), recode(n)))
247 if not pendings[c]:
247 if not pendings[c]:
248 # Parents are converted, node is eligible
248 # Parents are converted, node is eligible
249 actives.insert(0, c)
249 actives.insert(0, c)
250 pendings[c] = None
250 pendings[c] = None
251
251
252 if len(s) != len(parents):
252 if len(s) != len(parents):
253 raise util.Abort(_("not all revisions were sorted"))
253 raise util.Abort(_("not all revisions were sorted"))
254
254
255 return s
255 return s
256
256
257 def writeauthormap(self):
257 def writeauthormap(self):
258 authorfile = self.authorfile
258 authorfile = self.authorfile
259 if authorfile:
259 if authorfile:
260 self.ui.status(_('Writing author map file %s\n') % authorfile)
260 self.ui.status(_('Writing author map file %s\n') % authorfile)
261 ofile = open(authorfile, 'w+')
261 ofile = open(authorfile, 'w+')
262 for author in self.authors:
262 for author in self.authors:
263 ofile.write("%s=%s\n" % (author, self.authors[author]))
263 ofile.write("%s=%s\n" % (author, self.authors[author]))
264 ofile.close()
264 ofile.close()
265
265
266 def readauthormap(self, authorfile):
266 def readauthormap(self, authorfile):
267 afile = open(authorfile, 'r')
267 afile = open(authorfile, 'r')
268 for line in afile:
268 for line in afile:
269
269
270 line = line.strip()
270 line = line.strip()
271 if not line or line.startswith('#'):
271 if not line or line.startswith('#'):
272 continue
272 continue
273
273
274 try:
274 try:
275 srcauthor, dstauthor = line.split('=', 1)
275 srcauthor, dstauthor = line.split('=', 1)
276 except ValueError:
276 except ValueError:
277 msg = _('Ignoring bad line in author map file %s: %s\n')
277 msg = _('Ignoring bad line in author map file %s: %s\n')
278 self.ui.warn(msg % (authorfile, line.rstrip()))
278 self.ui.warn(msg % (authorfile, line.rstrip()))
279 continue
279 continue
280
280
281 srcauthor = srcauthor.strip()
281 srcauthor = srcauthor.strip()
282 dstauthor = dstauthor.strip()
282 dstauthor = dstauthor.strip()
283 if self.authors.get(srcauthor) in (None, dstauthor):
283 if self.authors.get(srcauthor) in (None, dstauthor):
284 msg = _('mapping author %s to %s\n')
284 msg = _('mapping author %s to %s\n')
285 self.ui.debug(msg % (srcauthor, dstauthor))
285 self.ui.debug(msg % (srcauthor, dstauthor))
286 self.authors[srcauthor] = dstauthor
286 self.authors[srcauthor] = dstauthor
287 continue
287 continue
288
288
289 m = _('overriding mapping for author %s, was %s, will be %s\n')
289 m = _('overriding mapping for author %s, was %s, will be %s\n')
290 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
290 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
291
291
292 afile.close()
292 afile.close()
293
293
294 def cachecommit(self, rev):
294 def cachecommit(self, rev):
295 commit = self.source.getcommit(rev)
295 commit = self.source.getcommit(rev)
296 commit.author = self.authors.get(commit.author, commit.author)
296 commit.author = self.authors.get(commit.author, commit.author)
297 commit.branch = self.branchmap.get(commit.branch, commit.branch)
297 commit.branch = self.branchmap.get(commit.branch, commit.branch)
298 self.commitcache[rev] = commit
298 self.commitcache[rev] = commit
299 return commit
299 return commit
300
300
301 def copy(self, rev):
301 def copy(self, rev):
302 commit = self.commitcache[rev]
302 commit = self.commitcache[rev]
303
303
304 changes = self.source.getchanges(rev)
304 changes = self.source.getchanges(rev)
305 if isinstance(changes, basestring):
305 if isinstance(changes, basestring):
306 if changes == SKIPREV:
306 if changes == SKIPREV:
307 dest = SKIPREV
307 dest = SKIPREV
308 else:
308 else:
309 dest = self.map[changes]
309 dest = self.map[changes]
310 self.map[rev] = dest
310 self.map[rev] = dest
311 return
311 return
312 files, copies = changes
312 files, copies = changes
313 pbranches = []
313 pbranches = []
314 if commit.parents:
314 if commit.parents:
315 for prev in commit.parents:
315 for prev in commit.parents:
316 if prev not in self.commitcache:
316 if prev not in self.commitcache:
317 self.cachecommit(prev)
317 self.cachecommit(prev)
318 pbranches.append((self.map[prev],
318 pbranches.append((self.map[prev],
319 self.commitcache[prev].branch))
319 self.commitcache[prev].branch))
320 self.dest.setbranch(commit.branch, pbranches)
320 self.dest.setbranch(commit.branch, pbranches)
321 try:
321 try:
322 parents = self.splicemap[rev].replace(',', ' ').split()
322 parents = self.splicemap[rev]
323 self.ui.status(_('spliced in %s as parents of %s\n') %
323 self.ui.status(_('spliced in %s as parents of %s\n') %
324 (parents, rev))
324 (parents, rev))
325 parents = [self.map.get(p, p) for p in parents]
325 parents = [self.map.get(p, p) for p in parents]
326 except KeyError:
326 except KeyError:
327 parents = [b[0] for b in pbranches]
327 parents = [b[0] for b in pbranches]
328 source = progresssource(self.ui, self.source, len(files))
328 source = progresssource(self.ui, self.source, len(files))
329 newnode = self.dest.putcommit(files, copies, parents, commit,
329 newnode = self.dest.putcommit(files, copies, parents, commit,
330 source, self.map)
330 source, self.map)
331 source.close()
331 source.close()
332 self.source.converted(rev, newnode)
332 self.source.converted(rev, newnode)
333 self.map[rev] = newnode
333 self.map[rev] = newnode
334
334
335 def convert(self, sortmode):
335 def convert(self, sortmode):
336 try:
336 try:
337 self.source.before()
337 self.source.before()
338 self.dest.before()
338 self.dest.before()
339 self.source.setrevmap(self.map)
339 self.source.setrevmap(self.map)
340 self.ui.status(_("scanning source...\n"))
340 self.ui.status(_("scanning source...\n"))
341 heads = self.source.getheads()
341 heads = self.source.getheads()
342 parents = self.walktree(heads)
342 parents = self.walktree(heads)
343 self.ui.status(_("sorting...\n"))
343 self.ui.status(_("sorting...\n"))
344 t = self.toposort(parents, sortmode)
344 t = self.toposort(parents, sortmode)
345 num = len(t)
345 num = len(t)
346 c = None
346 c = None
347
347
348 self.ui.status(_("converting...\n"))
348 self.ui.status(_("converting...\n"))
349 for i, c in enumerate(t):
349 for i, c in enumerate(t):
350 num -= 1
350 num -= 1
351 desc = self.commitcache[c].desc
351 desc = self.commitcache[c].desc
352 if "\n" in desc:
352 if "\n" in desc:
353 desc = desc.splitlines()[0]
353 desc = desc.splitlines()[0]
354 # convert log message to local encoding without using
354 # convert log message to local encoding without using
355 # tolocal() because the encoding.encoding convert()
355 # tolocal() because the encoding.encoding convert()
356 # uses is 'utf-8'
356 # uses is 'utf-8'
357 self.ui.status("%d %s\n" % (num, recode(desc)))
357 self.ui.status("%d %s\n" % (num, recode(desc)))
358 self.ui.note(_("source: %s\n") % recode(c))
358 self.ui.note(_("source: %s\n") % recode(c))
359 self.ui.progress(_('converting'), i, unit=_('revisions'),
359 self.ui.progress(_('converting'), i, unit=_('revisions'),
360 total=len(t))
360 total=len(t))
361 self.copy(c)
361 self.copy(c)
362 self.ui.progress(_('converting'), None)
362 self.ui.progress(_('converting'), None)
363
363
364 tags = self.source.gettags()
364 tags = self.source.gettags()
365 ctags = {}
365 ctags = {}
366 for k in tags:
366 for k in tags:
367 v = tags[k]
367 v = tags[k]
368 if self.map.get(v, SKIPREV) != SKIPREV:
368 if self.map.get(v, SKIPREV) != SKIPREV:
369 ctags[k] = self.map[v]
369 ctags[k] = self.map[v]
370
370
371 if c and ctags:
371 if c and ctags:
372 nrev, tagsparent = self.dest.puttags(ctags)
372 nrev, tagsparent = self.dest.puttags(ctags)
373 if nrev and tagsparent:
373 if nrev and tagsparent:
374 # write another hash correspondence to override the previous
374 # write another hash correspondence to override the previous
375 # one so we don't end up with extra tag heads
375 # one so we don't end up with extra tag heads
376 tagsparents = [e for e in self.map.iteritems()
376 tagsparents = [e for e in self.map.iteritems()
377 if e[1] == tagsparent]
377 if e[1] == tagsparent]
378 if tagsparents:
378 if tagsparents:
379 self.map[tagsparents[0][0]] = nrev
379 self.map[tagsparents[0][0]] = nrev
380
380
381 bookmarks = self.source.getbookmarks()
381 bookmarks = self.source.getbookmarks()
382 cbookmarks = {}
382 cbookmarks = {}
383 for k in bookmarks:
383 for k in bookmarks:
384 v = bookmarks[k]
384 v = bookmarks[k]
385 if self.map.get(v, SKIPREV) != SKIPREV:
385 if self.map.get(v, SKIPREV) != SKIPREV:
386 cbookmarks[k] = self.map[v]
386 cbookmarks[k] = self.map[v]
387
387
388 if c and cbookmarks:
388 if c and cbookmarks:
389 self.dest.putbookmarks(cbookmarks)
389 self.dest.putbookmarks(cbookmarks)
390
390
391 self.writeauthormap()
391 self.writeauthormap()
392 finally:
392 finally:
393 self.cleanup()
393 self.cleanup()
394
394
395 def cleanup(self):
395 def cleanup(self):
396 try:
396 try:
397 self.dest.after()
397 self.dest.after()
398 finally:
398 finally:
399 self.source.after()
399 self.source.after()
400 self.map.close()
400 self.map.close()
401
401
402 def convert(ui, src, dest=None, revmapfile=None, **opts):
402 def convert(ui, src, dest=None, revmapfile=None, **opts):
403 global orig_encoding
403 global orig_encoding
404 orig_encoding = encoding.encoding
404 orig_encoding = encoding.encoding
405 encoding.encoding = 'UTF-8'
405 encoding.encoding = 'UTF-8'
406
406
407 # support --authors as an alias for --authormap
407 # support --authors as an alias for --authormap
408 if not opts.get('authormap'):
408 if not opts.get('authormap'):
409 opts['authormap'] = opts.get('authors')
409 opts['authormap'] = opts.get('authors')
410
410
411 if not dest:
411 if not dest:
412 dest = hg.defaultdest(src) + "-hg"
412 dest = hg.defaultdest(src) + "-hg"
413 ui.status(_("assuming destination %s\n") % dest)
413 ui.status(_("assuming destination %s\n") % dest)
414
414
415 destc = convertsink(ui, dest, opts.get('dest_type'))
415 destc = convertsink(ui, dest, opts.get('dest_type'))
416
416
417 try:
417 try:
418 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
418 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
419 opts.get('rev'))
419 opts.get('rev'))
420 except Exception:
420 except Exception:
421 for path in destc.created:
421 for path in destc.created:
422 shutil.rmtree(path, True)
422 shutil.rmtree(path, True)
423 raise
423 raise
424
424
425 sortmodes = ('branchsort', 'datesort', 'sourcesort')
425 sortmodes = ('branchsort', 'datesort', 'sourcesort')
426 sortmode = [m for m in sortmodes if opts.get(m)]
426 sortmode = [m for m in sortmodes if opts.get(m)]
427 if len(sortmode) > 1:
427 if len(sortmode) > 1:
428 raise util.Abort(_('more than one sort mode specified'))
428 raise util.Abort(_('more than one sort mode specified'))
429 sortmode = sortmode and sortmode[0] or defaultsort
429 sortmode = sortmode and sortmode[0] or defaultsort
430 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
430 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
431 raise util.Abort(_('--sourcesort is not supported by this data source'))
431 raise util.Abort(_('--sourcesort is not supported by this data source'))
432
432
433 fmap = opts.get('filemap')
433 fmap = opts.get('filemap')
434 if fmap:
434 if fmap:
435 srcc = filemap.filemap_source(ui, srcc, fmap)
435 srcc = filemap.filemap_source(ui, srcc, fmap)
436 destc.setfilemapmode(True)
436 destc.setfilemapmode(True)
437
437
438 if not revmapfile:
438 if not revmapfile:
439 try:
439 try:
440 revmapfile = destc.revmapfile()
440 revmapfile = destc.revmapfile()
441 except:
441 except:
442 revmapfile = os.path.join(destc, "map")
442 revmapfile = os.path.join(destc, "map")
443
443
444 c = converter(ui, srcc, destc, revmapfile, opts)
444 c = converter(ui, srcc, destc, revmapfile, opts)
445 c.convert(sortmode)
445 c.convert(sortmode)
446
446
@@ -1,79 +1,79 b''
1
1
2 $ echo "[extensions]" >> $HGRCPATH
2 $ echo "[extensions]" >> $HGRCPATH
3 $ echo "convert=" >> $HGRCPATH
3 $ echo "convert=" >> $HGRCPATH
4 $ echo 'graphlog =' >> $HGRCPATH
4 $ echo 'graphlog =' >> $HGRCPATH
5 $ glog()
5 $ glog()
6 > {
6 > {
7 > hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
7 > hg glog --template '{rev} "{desc|firstline}" files: {files}\n' "$@"
8 > }
8 > }
9 $ hg init repo1
9 $ hg init repo1
10 $ cd repo1
10 $ cd repo1
11 $ echo a > a
11 $ echo a > a
12 $ hg ci -Am adda
12 $ hg ci -Am adda
13 adding a
13 adding a
14 $ echo b > b
14 $ echo b > b
15 $ echo a >> a
15 $ echo a >> a
16 $ hg ci -Am addb
16 $ hg ci -Am addb
17 adding b
17 adding b
18 $ PARENTID1=`hg id --debug -i`
18 $ PARENTID1=`hg id --debug -i`
19 $ echo c > c
19 $ echo c > c
20 $ hg ci -Am addc
20 $ hg ci -Am addc
21 adding c
21 adding c
22 $ PARENTID2=`hg id --debug -i`
22 $ PARENTID2=`hg id --debug -i`
23 $ cd ..
23 $ cd ..
24 $ hg init repo2
24 $ hg init repo2
25 $ cd repo2
25 $ cd repo2
26 $ echo b > a
26 $ echo b > a
27 $ echo d > d
27 $ echo d > d
28 $ hg ci -Am addaandd
28 $ hg ci -Am addaandd
29 adding a
29 adding a
30 adding d
30 adding d
31 $ CHILDID1=`hg id --debug -i`
31 $ CHILDID1=`hg id --debug -i`
32 $ echo d >> d
32 $ echo d >> d
33 $ hg ci -Am changed
33 $ hg ci -Am changed
34 $ CHILDID2=`hg id --debug -i`
34 $ CHILDID2=`hg id --debug -i`
35 $ echo e > e
35 $ echo e > e
36 $ hg ci -Am adde
36 $ hg ci -Am adde
37 adding e
37 adding e
38 $ cd ..
38 $ cd ..
39
39
40 test invalid splicemap
40 test invalid splicemap
41
41
42 $ cat > splicemap <<EOF
42 $ cat > splicemap <<EOF
43 > $CHILDID2
43 > $CHILDID2
44 > EOF
44 > EOF
45 $ hg convert --splicemap splicemap repo2 repo1
45 $ hg convert --splicemap splicemap repo2 repo1
46 abort: syntax error in splicemap(1): key/value pair expected
46 abort: syntax error in splicemap(1): child parent1[,parent2] expected
47 [255]
47 [255]
48
48
49 splice repo2 on repo1
49 splice repo2 on repo1
50
50
51 $ cat > splicemap <<EOF
51 $ cat > splicemap <<EOF
52 > $CHILDID1 $PARENTID1
52 > $CHILDID1 $PARENTID1
53 > $CHILDID2 $PARENTID2,$CHILDID1
53 > $CHILDID2 $PARENTID2,$CHILDID1
54 > EOF
54 > EOF
55 $ hg clone repo1 target1
55 $ hg clone repo1 target1
56 updating to branch default
56 updating to branch default
57 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
57 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
58 $ hg convert --splicemap splicemap repo2 target1
58 $ hg convert --splicemap splicemap repo2 target1
59 scanning source...
59 scanning source...
60 sorting...
60 sorting...
61 converting...
61 converting...
62 2 addaandd
62 2 addaandd
63 spliced in ['6d4c2037ddc2cb2627ac3a244ecce35283268f8e'] as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
63 spliced in ['6d4c2037ddc2cb2627ac3a244ecce35283268f8e'] as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
64 1 changed
64 1 changed
65 spliced in ['e55c719b85b60e5102fac26110ba626e7cb6b7dc', '527cdedf31fbd5ea708aa14eeecf53d4676f38db'] as parents of e4ea00df91897da3079a10fab658c1eddba6617b
65 spliced in ['e55c719b85b60e5102fac26110ba626e7cb6b7dc', '527cdedf31fbd5ea708aa14eeecf53d4676f38db'] as parents of e4ea00df91897da3079a10fab658c1eddba6617b
66 0 adde
66 0 adde
67 $ glog -R target1
67 $ glog -R target1
68 o 5 "adde" files: e
68 o 5 "adde" files: e
69 |
69 |
70 o 4 "changed" files: d
70 o 4 "changed" files: d
71 |\
71 |\
72 | o 3 "addaandd" files: a d
72 | o 3 "addaandd" files: a d
73 | |
73 | |
74 @ | 2 "addc" files: c
74 @ | 2 "addc" files: c
75 |/
75 |/
76 o 1 "addb" files: a b
76 o 1 "addb" files: a b
77 |
77 |
78 o 0 "adda" files: a
78 o 0 "adda" files: a
79
79
General Comments 0
You need to be logged in to leave comments. Login now