##// END OF EJS Templates
splicemap: improve error handling when source is hg (issue2084)...
Ben Goswami -
r19120:58e782f0 default
parent child Browse files
Show More
@@ -1,432 +1,448 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno, subprocess, os, datetime
8 import base64, errno, subprocess, os, datetime, re
9 import cPickle as pickle
9 import cPickle as pickle
10 from mercurial import util
10 from mercurial import util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 def encodeargs(args):
15 def encodeargs(args):
16 def encodearg(s):
16 def encodearg(s):
17 lines = base64.encodestring(s)
17 lines = base64.encodestring(s)
18 lines = [l.splitlines()[0] for l in lines]
18 lines = [l.splitlines()[0] for l in lines]
19 return ''.join(lines)
19 return ''.join(lines)
20
20
21 s = pickle.dumps(args)
21 s = pickle.dumps(args)
22 return encodearg(s)
22 return encodearg(s)
23
23
24 def decodeargs(s):
24 def decodeargs(s):
25 s = base64.decodestring(s)
25 s = base64.decodestring(s)
26 return pickle.loads(s)
26 return pickle.loads(s)
27
27
28 class MissingTool(Exception):
28 class MissingTool(Exception):
29 pass
29 pass
30
30
31 def checktool(exe, name=None, abort=True):
31 def checktool(exe, name=None, abort=True):
32 name = name or exe
32 name = name or exe
33 if not util.findexe(exe):
33 if not util.findexe(exe):
34 exc = abort and util.Abort or MissingTool
34 exc = abort and util.Abort or MissingTool
35 raise exc(_('cannot find required "%s" tool') % name)
35 raise exc(_('cannot find required "%s" tool') % name)
36
36
37 class NoRepo(Exception):
37 class NoRepo(Exception):
38 pass
38 pass
39
39
40 SKIPREV = 'SKIP'
40 SKIPREV = 'SKIP'
41
41
42 class commit(object):
42 class commit(object):
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 extra={}, sortkey=None):
44 extra={}, sortkey=None):
45 self.author = author or 'unknown'
45 self.author = author or 'unknown'
46 self.date = date or '0 0'
46 self.date = date or '0 0'
47 self.desc = desc
47 self.desc = desc
48 self.parents = parents
48 self.parents = parents
49 self.branch = branch
49 self.branch = branch
50 self.rev = rev
50 self.rev = rev
51 self.extra = extra
51 self.extra = extra
52 self.sortkey = sortkey
52 self.sortkey = sortkey
53
53
54 class converter_source(object):
54 class converter_source(object):
55 """Conversion source interface"""
55 """Conversion source interface"""
56
56
57 def __init__(self, ui, path=None, rev=None):
57 def __init__(self, ui, path=None, rev=None):
58 """Initialize conversion source (or raise NoRepo("message")
58 """Initialize conversion source (or raise NoRepo("message")
59 exception if path is not a valid repository)"""
59 exception if path is not a valid repository)"""
60 self.ui = ui
60 self.ui = ui
61 self.path = path
61 self.path = path
62 self.rev = rev
62 self.rev = rev
63
63
64 self.encoding = 'utf-8'
64 self.encoding = 'utf-8'
65
65
66 def checkhexformat(self, revstr):
67 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
68 such format for their revision numbering
69 """
70 matchobj = re.match(r'[0-9a-fA-F]{40,40}$', revstr)
71 if matchobj is None:
72 raise util.Abort(_('splicemap entry %s is not a valid revision'
73 ' identifier') % revstr)
74
66 def before(self):
75 def before(self):
67 pass
76 pass
68
77
69 def after(self):
78 def after(self):
70 pass
79 pass
71
80
72 def setrevmap(self, revmap):
81 def setrevmap(self, revmap):
73 """set the map of already-converted revisions"""
82 """set the map of already-converted revisions"""
74 pass
83 pass
75
84
76 def getheads(self):
85 def getheads(self):
77 """Return a list of this repository's heads"""
86 """Return a list of this repository's heads"""
78 raise NotImplementedError
87 raise NotImplementedError
79
88
80 def getfile(self, name, rev):
89 def getfile(self, name, rev):
81 """Return a pair (data, mode) where data is the file content
90 """Return a pair (data, mode) where data is the file content
82 as a string and mode one of '', 'x' or 'l'. rev is the
91 as a string and mode one of '', 'x' or 'l'. rev is the
83 identifier returned by a previous call to getchanges(). Raise
92 identifier returned by a previous call to getchanges(). Raise
84 IOError to indicate that name was deleted in rev.
93 IOError to indicate that name was deleted in rev.
85 """
94 """
86 raise NotImplementedError
95 raise NotImplementedError
87
96
88 def getchanges(self, version):
97 def getchanges(self, version):
89 """Returns a tuple of (files, copies).
98 """Returns a tuple of (files, copies).
90
99
91 files is a sorted list of (filename, id) tuples for all files
100 files is a sorted list of (filename, id) tuples for all files
92 changed between version and its first parent returned by
101 changed between version and its first parent returned by
93 getcommit(). id is the source revision id of the file.
102 getcommit(). id is the source revision id of the file.
94
103
95 copies is a dictionary of dest: source
104 copies is a dictionary of dest: source
96 """
105 """
97 raise NotImplementedError
106 raise NotImplementedError
98
107
99 def getcommit(self, version):
108 def getcommit(self, version):
100 """Return the commit object for version"""
109 """Return the commit object for version"""
101 raise NotImplementedError
110 raise NotImplementedError
102
111
103 def gettags(self):
112 def gettags(self):
104 """Return the tags as a dictionary of name: revision
113 """Return the tags as a dictionary of name: revision
105
114
106 Tag names must be UTF-8 strings.
115 Tag names must be UTF-8 strings.
107 """
116 """
108 raise NotImplementedError
117 raise NotImplementedError
109
118
110 def recode(self, s, encoding=None):
119 def recode(self, s, encoding=None):
111 if not encoding:
120 if not encoding:
112 encoding = self.encoding or 'utf-8'
121 encoding = self.encoding or 'utf-8'
113
122
114 if isinstance(s, unicode):
123 if isinstance(s, unicode):
115 return s.encode("utf-8")
124 return s.encode("utf-8")
116 try:
125 try:
117 return s.decode(encoding).encode("utf-8")
126 return s.decode(encoding).encode("utf-8")
118 except UnicodeError:
127 except UnicodeError:
119 try:
128 try:
120 return s.decode("latin-1").encode("utf-8")
129 return s.decode("latin-1").encode("utf-8")
121 except UnicodeError:
130 except UnicodeError:
122 return s.decode(encoding, "replace").encode("utf-8")
131 return s.decode(encoding, "replace").encode("utf-8")
123
132
124 def getchangedfiles(self, rev, i):
133 def getchangedfiles(self, rev, i):
125 """Return the files changed by rev compared to parent[i].
134 """Return the files changed by rev compared to parent[i].
126
135
127 i is an index selecting one of the parents of rev. The return
136 i is an index selecting one of the parents of rev. The return
128 value should be the list of files that are different in rev and
137 value should be the list of files that are different in rev and
129 this parent.
138 this parent.
130
139
131 If rev has no parents, i is None.
140 If rev has no parents, i is None.
132
141
133 This function is only needed to support --filemap
142 This function is only needed to support --filemap
134 """
143 """
135 raise NotImplementedError
144 raise NotImplementedError
136
145
137 def converted(self, rev, sinkrev):
146 def converted(self, rev, sinkrev):
138 '''Notify the source that a revision has been converted.'''
147 '''Notify the source that a revision has been converted.'''
139 pass
148 pass
140
149
141 def hasnativeorder(self):
150 def hasnativeorder(self):
142 """Return true if this source has a meaningful, native revision
151 """Return true if this source has a meaningful, native revision
143 order. For instance, Mercurial revisions are store sequentially
152 order. For instance, Mercurial revisions are store sequentially
144 while there is no such global ordering with Darcs.
153 while there is no such global ordering with Darcs.
145 """
154 """
146 return False
155 return False
147
156
148 def hasnativeclose(self):
157 def hasnativeclose(self):
149 """Return true if this source has ability to close branch.
158 """Return true if this source has ability to close branch.
150 """
159 """
151 return False
160 return False
152
161
153 def lookuprev(self, rev):
162 def lookuprev(self, rev):
154 """If rev is a meaningful revision reference in source, return
163 """If rev is a meaningful revision reference in source, return
155 the referenced identifier in the same format used by getcommit().
164 the referenced identifier in the same format used by getcommit().
156 return None otherwise.
165 return None otherwise.
157 """
166 """
158 return None
167 return None
159
168
160 def getbookmarks(self):
169 def getbookmarks(self):
161 """Return the bookmarks as a dictionary of name: revision
170 """Return the bookmarks as a dictionary of name: revision
162
171
163 Bookmark names are to be UTF-8 strings.
172 Bookmark names are to be UTF-8 strings.
164 """
173 """
165 return {}
174 return {}
166
175
176 def checkrevformat(self, revstr):
177 """revstr is a string that describes a revision in the given
178 source control system. Return true if revstr has correct
179 format.
180 """
181 return True
182
167 class converter_sink(object):
183 class converter_sink(object):
168 """Conversion sink (target) interface"""
184 """Conversion sink (target) interface"""
169
185
170 def __init__(self, ui, path):
186 def __init__(self, ui, path):
171 """Initialize conversion sink (or raise NoRepo("message")
187 """Initialize conversion sink (or raise NoRepo("message")
172 exception if path is not a valid repository)
188 exception if path is not a valid repository)
173
189
174 created is a list of paths to remove if a fatal error occurs
190 created is a list of paths to remove if a fatal error occurs
175 later"""
191 later"""
176 self.ui = ui
192 self.ui = ui
177 self.path = path
193 self.path = path
178 self.created = []
194 self.created = []
179
195
180 def getheads(self):
196 def getheads(self):
181 """Return a list of this repository's heads"""
197 """Return a list of this repository's heads"""
182 raise NotImplementedError
198 raise NotImplementedError
183
199
184 def revmapfile(self):
200 def revmapfile(self):
185 """Path to a file that will contain lines
201 """Path to a file that will contain lines
186 source_rev_id sink_rev_id
202 source_rev_id sink_rev_id
187 mapping equivalent revision identifiers for each system."""
203 mapping equivalent revision identifiers for each system."""
188 raise NotImplementedError
204 raise NotImplementedError
189
205
190 def authorfile(self):
206 def authorfile(self):
191 """Path to a file that will contain lines
207 """Path to a file that will contain lines
192 srcauthor=dstauthor
208 srcauthor=dstauthor
193 mapping equivalent authors identifiers for each system."""
209 mapping equivalent authors identifiers for each system."""
194 return None
210 return None
195
211
196 def putcommit(self, files, copies, parents, commit, source, revmap):
212 def putcommit(self, files, copies, parents, commit, source, revmap):
197 """Create a revision with all changed files listed in 'files'
213 """Create a revision with all changed files listed in 'files'
198 and having listed parents. 'commit' is a commit object
214 and having listed parents. 'commit' is a commit object
199 containing at a minimum the author, date, and message for this
215 containing at a minimum the author, date, and message for this
200 changeset. 'files' is a list of (path, version) tuples,
216 changeset. 'files' is a list of (path, version) tuples,
201 'copies' is a dictionary mapping destinations to sources,
217 'copies' is a dictionary mapping destinations to sources,
202 'source' is the source repository, and 'revmap' is a mapfile
218 'source' is the source repository, and 'revmap' is a mapfile
203 of source revisions to converted revisions. Only getfile() and
219 of source revisions to converted revisions. Only getfile() and
204 lookuprev() should be called on 'source'.
220 lookuprev() should be called on 'source'.
205
221
206 Note that the sink repository is not told to update itself to
222 Note that the sink repository is not told to update itself to
207 a particular revision (or even what that revision would be)
223 a particular revision (or even what that revision would be)
208 before it receives the file data.
224 before it receives the file data.
209 """
225 """
210 raise NotImplementedError
226 raise NotImplementedError
211
227
212 def puttags(self, tags):
228 def puttags(self, tags):
213 """Put tags into sink.
229 """Put tags into sink.
214
230
215 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
231 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
216 Return a pair (tag_revision, tag_parent_revision), or (None, None)
232 Return a pair (tag_revision, tag_parent_revision), or (None, None)
217 if nothing was changed.
233 if nothing was changed.
218 """
234 """
219 raise NotImplementedError
235 raise NotImplementedError
220
236
221 def setbranch(self, branch, pbranches):
237 def setbranch(self, branch, pbranches):
222 """Set the current branch name. Called before the first putcommit
238 """Set the current branch name. Called before the first putcommit
223 on the branch.
239 on the branch.
224 branch: branch name for subsequent commits
240 branch: branch name for subsequent commits
225 pbranches: (converted parent revision, parent branch) tuples"""
241 pbranches: (converted parent revision, parent branch) tuples"""
226 pass
242 pass
227
243
228 def setfilemapmode(self, active):
244 def setfilemapmode(self, active):
229 """Tell the destination that we're using a filemap
245 """Tell the destination that we're using a filemap
230
246
231 Some converter_sources (svn in particular) can claim that a file
247 Some converter_sources (svn in particular) can claim that a file
232 was changed in a revision, even if there was no change. This method
248 was changed in a revision, even if there was no change. This method
233 tells the destination that we're using a filemap and that it should
249 tells the destination that we're using a filemap and that it should
234 filter empty revisions.
250 filter empty revisions.
235 """
251 """
236 pass
252 pass
237
253
238 def before(self):
254 def before(self):
239 pass
255 pass
240
256
241 def after(self):
257 def after(self):
242 pass
258 pass
243
259
244 def putbookmarks(self, bookmarks):
260 def putbookmarks(self, bookmarks):
245 """Put bookmarks into sink.
261 """Put bookmarks into sink.
246
262
247 bookmarks: {bookmarkname: sink_rev_id, ...}
263 bookmarks: {bookmarkname: sink_rev_id, ...}
248 where bookmarkname is an UTF-8 string.
264 where bookmarkname is an UTF-8 string.
249 """
265 """
250 pass
266 pass
251
267
252 def hascommit(self, rev):
268 def hascommit(self, rev):
253 """Return True if the sink contains rev"""
269 """Return True if the sink contains rev"""
254 raise NotImplementedError
270 raise NotImplementedError
255
271
256 class commandline(object):
272 class commandline(object):
257 def __init__(self, ui, command):
273 def __init__(self, ui, command):
258 self.ui = ui
274 self.ui = ui
259 self.command = command
275 self.command = command
260
276
261 def prerun(self):
277 def prerun(self):
262 pass
278 pass
263
279
264 def postrun(self):
280 def postrun(self):
265 pass
281 pass
266
282
267 def _cmdline(self, cmd, *args, **kwargs):
283 def _cmdline(self, cmd, *args, **kwargs):
268 cmdline = [self.command, cmd] + list(args)
284 cmdline = [self.command, cmd] + list(args)
269 for k, v in kwargs.iteritems():
285 for k, v in kwargs.iteritems():
270 if len(k) == 1:
286 if len(k) == 1:
271 cmdline.append('-' + k)
287 cmdline.append('-' + k)
272 else:
288 else:
273 cmdline.append('--' + k.replace('_', '-'))
289 cmdline.append('--' + k.replace('_', '-'))
274 try:
290 try:
275 if len(k) == 1:
291 if len(k) == 1:
276 cmdline.append('' + v)
292 cmdline.append('' + v)
277 else:
293 else:
278 cmdline[-1] += '=' + v
294 cmdline[-1] += '=' + v
279 except TypeError:
295 except TypeError:
280 pass
296 pass
281 cmdline = [util.shellquote(arg) for arg in cmdline]
297 cmdline = [util.shellquote(arg) for arg in cmdline]
282 if not self.ui.debugflag:
298 if not self.ui.debugflag:
283 cmdline += ['2>', os.devnull]
299 cmdline += ['2>', os.devnull]
284 cmdline = ' '.join(cmdline)
300 cmdline = ' '.join(cmdline)
285 return cmdline
301 return cmdline
286
302
287 def _run(self, cmd, *args, **kwargs):
303 def _run(self, cmd, *args, **kwargs):
288 def popen(cmdline):
304 def popen(cmdline):
289 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
305 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
290 close_fds=util.closefds,
306 close_fds=util.closefds,
291 stdout=subprocess.PIPE)
307 stdout=subprocess.PIPE)
292 return p
308 return p
293 return self._dorun(popen, cmd, *args, **kwargs)
309 return self._dorun(popen, cmd, *args, **kwargs)
294
310
295 def _run2(self, cmd, *args, **kwargs):
311 def _run2(self, cmd, *args, **kwargs):
296 return self._dorun(util.popen2, cmd, *args, **kwargs)
312 return self._dorun(util.popen2, cmd, *args, **kwargs)
297
313
298 def _dorun(self, openfunc, cmd, *args, **kwargs):
314 def _dorun(self, openfunc, cmd, *args, **kwargs):
299 cmdline = self._cmdline(cmd, *args, **kwargs)
315 cmdline = self._cmdline(cmd, *args, **kwargs)
300 self.ui.debug('running: %s\n' % (cmdline,))
316 self.ui.debug('running: %s\n' % (cmdline,))
301 self.prerun()
317 self.prerun()
302 try:
318 try:
303 return openfunc(cmdline)
319 return openfunc(cmdline)
304 finally:
320 finally:
305 self.postrun()
321 self.postrun()
306
322
307 def run(self, cmd, *args, **kwargs):
323 def run(self, cmd, *args, **kwargs):
308 p = self._run(cmd, *args, **kwargs)
324 p = self._run(cmd, *args, **kwargs)
309 output = p.communicate()[0]
325 output = p.communicate()[0]
310 self.ui.debug(output)
326 self.ui.debug(output)
311 return output, p.returncode
327 return output, p.returncode
312
328
313 def runlines(self, cmd, *args, **kwargs):
329 def runlines(self, cmd, *args, **kwargs):
314 p = self._run(cmd, *args, **kwargs)
330 p = self._run(cmd, *args, **kwargs)
315 output = p.stdout.readlines()
331 output = p.stdout.readlines()
316 p.wait()
332 p.wait()
317 self.ui.debug(''.join(output))
333 self.ui.debug(''.join(output))
318 return output, p.returncode
334 return output, p.returncode
319
335
320 def checkexit(self, status, output=''):
336 def checkexit(self, status, output=''):
321 if status:
337 if status:
322 if output:
338 if output:
323 self.ui.warn(_('%s error:\n') % self.command)
339 self.ui.warn(_('%s error:\n') % self.command)
324 self.ui.warn(output)
340 self.ui.warn(output)
325 msg = util.explainexit(status)[0]
341 msg = util.explainexit(status)[0]
326 raise util.Abort('%s %s' % (self.command, msg))
342 raise util.Abort('%s %s' % (self.command, msg))
327
343
328 def run0(self, cmd, *args, **kwargs):
344 def run0(self, cmd, *args, **kwargs):
329 output, status = self.run(cmd, *args, **kwargs)
345 output, status = self.run(cmd, *args, **kwargs)
330 self.checkexit(status, output)
346 self.checkexit(status, output)
331 return output
347 return output
332
348
333 def runlines0(self, cmd, *args, **kwargs):
349 def runlines0(self, cmd, *args, **kwargs):
334 output, status = self.runlines(cmd, *args, **kwargs)
350 output, status = self.runlines(cmd, *args, **kwargs)
335 self.checkexit(status, ''.join(output))
351 self.checkexit(status, ''.join(output))
336 return output
352 return output
337
353
338 @propertycache
354 @propertycache
339 def argmax(self):
355 def argmax(self):
340 # POSIX requires at least 4096 bytes for ARG_MAX
356 # POSIX requires at least 4096 bytes for ARG_MAX
341 argmax = 4096
357 argmax = 4096
342 try:
358 try:
343 argmax = os.sysconf("SC_ARG_MAX")
359 argmax = os.sysconf("SC_ARG_MAX")
344 except (AttributeError, ValueError):
360 except (AttributeError, ValueError):
345 pass
361 pass
346
362
347 # Windows shells impose their own limits on command line length,
363 # Windows shells impose their own limits on command line length,
348 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
364 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
349 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
365 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
350 # details about cmd.exe limitations.
366 # details about cmd.exe limitations.
351
367
352 # Since ARG_MAX is for command line _and_ environment, lower our limit
368 # Since ARG_MAX is for command line _and_ environment, lower our limit
353 # (and make happy Windows shells while doing this).
369 # (and make happy Windows shells while doing this).
354 return argmax // 2 - 1
370 return argmax // 2 - 1
355
371
356 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
372 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
357 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
373 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
358 limit = self.argmax - cmdlen
374 limit = self.argmax - cmdlen
359 bytes = 0
375 bytes = 0
360 fl = []
376 fl = []
361 for fn in arglist:
377 for fn in arglist:
362 b = len(fn) + 3
378 b = len(fn) + 3
363 if bytes + b < limit or len(fl) == 0:
379 if bytes + b < limit or len(fl) == 0:
364 fl.append(fn)
380 fl.append(fn)
365 bytes += b
381 bytes += b
366 else:
382 else:
367 yield fl
383 yield fl
368 fl = [fn]
384 fl = [fn]
369 bytes = b
385 bytes = b
370 if fl:
386 if fl:
371 yield fl
387 yield fl
372
388
373 def xargs(self, arglist, cmd, *args, **kwargs):
389 def xargs(self, arglist, cmd, *args, **kwargs):
374 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
390 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
375 self.run0(cmd, *(list(args) + l), **kwargs)
391 self.run0(cmd, *(list(args) + l), **kwargs)
376
392
377 class mapfile(dict):
393 class mapfile(dict):
378 def __init__(self, ui, path):
394 def __init__(self, ui, path):
379 super(mapfile, self).__init__()
395 super(mapfile, self).__init__()
380 self.ui = ui
396 self.ui = ui
381 self.path = path
397 self.path = path
382 self.fp = None
398 self.fp = None
383 self.order = []
399 self.order = []
384 self._read()
400 self._read()
385
401
386 def _read(self):
402 def _read(self):
387 if not self.path:
403 if not self.path:
388 return
404 return
389 try:
405 try:
390 fp = open(self.path, 'r')
406 fp = open(self.path, 'r')
391 except IOError, err:
407 except IOError, err:
392 if err.errno != errno.ENOENT:
408 if err.errno != errno.ENOENT:
393 raise
409 raise
394 return
410 return
395 for i, line in enumerate(fp):
411 for i, line in enumerate(fp):
396 line = line.splitlines()[0].rstrip()
412 line = line.splitlines()[0].rstrip()
397 if not line:
413 if not line:
398 # Ignore blank lines
414 # Ignore blank lines
399 continue
415 continue
400 try:
416 try:
401 key, value = line.rsplit(' ', 1)
417 key, value = line.rsplit(' ', 1)
402 except ValueError:
418 except ValueError:
403 raise util.Abort(
419 raise util.Abort(
404 _('syntax error in %s(%d): key/value pair expected')
420 _('syntax error in %s(%d): key/value pair expected')
405 % (self.path, i + 1))
421 % (self.path, i + 1))
406 if key not in self:
422 if key not in self:
407 self.order.append(key)
423 self.order.append(key)
408 super(mapfile, self).__setitem__(key, value)
424 super(mapfile, self).__setitem__(key, value)
409 fp.close()
425 fp.close()
410
426
411 def __setitem__(self, key, value):
427 def __setitem__(self, key, value):
412 if self.fp is None:
428 if self.fp is None:
413 try:
429 try:
414 self.fp = open(self.path, 'a')
430 self.fp = open(self.path, 'a')
415 except IOError, err:
431 except IOError, err:
416 raise util.Abort(_('could not open map file %r: %s') %
432 raise util.Abort(_('could not open map file %r: %s') %
417 (self.path, err.strerror))
433 (self.path, err.strerror))
418 self.fp.write('%s %s\n' % (key, value))
434 self.fp.write('%s %s\n' % (key, value))
419 self.fp.flush()
435 self.fp.flush()
420 super(mapfile, self).__setitem__(key, value)
436 super(mapfile, self).__setitem__(key, value)
421
437
422 def close(self):
438 def close(self):
423 if self.fp:
439 if self.fp:
424 self.fp.close()
440 self.fp.close()
425 self.fp = None
441 self.fp = None
426
442
427 def makedatetimestamp(t):
443 def makedatetimestamp(t):
428 """Like util.makedate() but for time t instead of current time"""
444 """Like util.makedate() but for time t instead of current time"""
429 delta = (datetime.datetime.utcfromtimestamp(t) -
445 delta = (datetime.datetime.utcfromtimestamp(t) -
430 datetime.datetime.fromtimestamp(t))
446 datetime.datetime.fromtimestamp(t))
431 tz = delta.days * 86400 + delta.seconds
447 tz = delta.days * 86400 + delta.seconds
432 return t, tz
448 return t, tz
@@ -1,512 +1,530 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap
18 import filemap
19
19
20 import os, shutil
20 import os, shutil
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124 def parsesplicemap(self, path):
125 """ check and validate the splicemap format and
126 return a child/parents dictionary.
127 Format checking has two parts.
128 1. generic format which is same across all source types
129 2. specific format checking which may be different for
130 different source type. This logic is implemented in
131 checkrevformat function in source files like
132 hg.py, subversion.py etc.
133 """
124
134
125 def parsesplicemap(self, path):
126 """Parse a splicemap, return a child/parents dictionary."""
127 if not path:
135 if not path:
128 return {}
136 return {}
129 m = {}
137 m = {}
130 try:
138 try:
131 fp = open(path, 'r')
139 fp = open(path, 'r')
132 for i, line in enumerate(fp):
140 for i, line in enumerate(fp):
133 line = line.splitlines()[0].rstrip()
141 line = line.splitlines()[0].rstrip()
134 if not line:
142 if not line:
135 # Ignore blank lines
143 # Ignore blank lines
136 continue
144 continue
137 try:
145 try:
138 child, parents = line.split(' ', 1)
146 child, parents = line.split(' ', 1)
147 self.source.checkrevformat(child)
139 parents = parents.replace(',', ' ').split()
148 parents = parents.replace(',', ' ').split()
149 # check if number of parents are upto 2 max
150 if (len(parents) > 2):
151 raise util.Abort(_('syntax error in %s(%d): child '\
152 'parent1[,parent2] expected') \
153 % (path, i + 1))
154 for parent in parents:
155 self.source.checkrevformat(parent)
140 except ValueError:
156 except ValueError:
141 raise util.Abort(_('syntax error in %s(%d): child parent1'
157 raise util.Abort(_('syntax error in %s(%d): child '\
142 '[,parent2] expected') % (path, i + 1))
158 'parent1[,parent2] expected') \
159 % (path, i + 1))
143 pp = []
160 pp = []
144 for p in parents:
161 for p in parents:
145 if p not in pp:
162 if p not in pp:
146 pp.append(p)
163 pp.append(p)
147 m[child] = pp
164 m[child] = pp
148 except IOError, e:
165 # if file does not exist or error reading, exit
149 if e.errno != errno.ENOENT:
166 except IOError:
150 raise
167 raise util.Abort(_('splicemap file not found or error reading %s:')
168 % path)
151 return m
169 return m
152
170
153
171
154 def walktree(self, heads):
172 def walktree(self, heads):
155 '''Return a mapping that identifies the uncommitted parents of every
173 '''Return a mapping that identifies the uncommitted parents of every
156 uncommitted changeset.'''
174 uncommitted changeset.'''
157 visit = heads
175 visit = heads
158 known = set()
176 known = set()
159 parents = {}
177 parents = {}
160 while visit:
178 while visit:
161 n = visit.pop(0)
179 n = visit.pop(0)
162 if n in known or n in self.map:
180 if n in known or n in self.map:
163 continue
181 continue
164 known.add(n)
182 known.add(n)
165 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
183 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
166 commit = self.cachecommit(n)
184 commit = self.cachecommit(n)
167 parents[n] = []
185 parents[n] = []
168 for p in commit.parents:
186 for p in commit.parents:
169 parents[n].append(p)
187 parents[n].append(p)
170 visit.append(p)
188 visit.append(p)
171 self.ui.progress(_('scanning'), None)
189 self.ui.progress(_('scanning'), None)
172
190
173 return parents
191 return parents
174
192
175 def mergesplicemap(self, parents, splicemap):
193 def mergesplicemap(self, parents, splicemap):
176 """A splicemap redefines child/parent relationships. Check the
194 """A splicemap redefines child/parent relationships. Check the
177 map contains valid revision identifiers and merge the new
195 map contains valid revision identifiers and merge the new
178 links in the source graph.
196 links in the source graph.
179 """
197 """
180 for c in sorted(splicemap):
198 for c in sorted(splicemap):
181 if c not in parents:
199 if c not in parents:
182 if not self.dest.hascommit(self.map.get(c, c)):
200 if not self.dest.hascommit(self.map.get(c, c)):
183 # Could be in source but not converted during this run
201 # Could be in source but not converted during this run
184 self.ui.warn(_('splice map revision %s is not being '
202 self.ui.warn(_('splice map revision %s is not being '
185 'converted, ignoring\n') % c)
203 'converted, ignoring\n') % c)
186 continue
204 continue
187 pc = []
205 pc = []
188 for p in splicemap[c]:
206 for p in splicemap[c]:
189 # We do not have to wait for nodes already in dest.
207 # We do not have to wait for nodes already in dest.
190 if self.dest.hascommit(self.map.get(p, p)):
208 if self.dest.hascommit(self.map.get(p, p)):
191 continue
209 continue
192 # Parent is not in dest and not being converted, not good
210 # Parent is not in dest and not being converted, not good
193 if p not in parents:
211 if p not in parents:
194 raise util.Abort(_('unknown splice map parent: %s') % p)
212 raise util.Abort(_('unknown splice map parent: %s') % p)
195 pc.append(p)
213 pc.append(p)
196 parents[c] = pc
214 parents[c] = pc
197
215
198 def toposort(self, parents, sortmode):
216 def toposort(self, parents, sortmode):
199 '''Return an ordering such that every uncommitted changeset is
217 '''Return an ordering such that every uncommitted changeset is
200 preceded by all its uncommitted ancestors.'''
218 preceded by all its uncommitted ancestors.'''
201
219
202 def mapchildren(parents):
220 def mapchildren(parents):
203 """Return a (children, roots) tuple where 'children' maps parent
221 """Return a (children, roots) tuple where 'children' maps parent
204 revision identifiers to children ones, and 'roots' is the list of
222 revision identifiers to children ones, and 'roots' is the list of
205 revisions without parents. 'parents' must be a mapping of revision
223 revisions without parents. 'parents' must be a mapping of revision
206 identifier to its parents ones.
224 identifier to its parents ones.
207 """
225 """
208 visit = sorted(parents)
226 visit = sorted(parents)
209 seen = set()
227 seen = set()
210 children = {}
228 children = {}
211 roots = []
229 roots = []
212
230
213 while visit:
231 while visit:
214 n = visit.pop(0)
232 n = visit.pop(0)
215 if n in seen:
233 if n in seen:
216 continue
234 continue
217 seen.add(n)
235 seen.add(n)
218 # Ensure that nodes without parents are present in the
236 # Ensure that nodes without parents are present in the
219 # 'children' mapping.
237 # 'children' mapping.
220 children.setdefault(n, [])
238 children.setdefault(n, [])
221 hasparent = False
239 hasparent = False
222 for p in parents[n]:
240 for p in parents[n]:
223 if p not in self.map:
241 if p not in self.map:
224 visit.append(p)
242 visit.append(p)
225 hasparent = True
243 hasparent = True
226 children.setdefault(p, []).append(n)
244 children.setdefault(p, []).append(n)
227 if not hasparent:
245 if not hasparent:
228 roots.append(n)
246 roots.append(n)
229
247
230 return children, roots
248 return children, roots
231
249
232 # Sort functions are supposed to take a list of revisions which
250 # Sort functions are supposed to take a list of revisions which
233 # can be converted immediately and pick one
251 # can be converted immediately and pick one
234
252
235 def makebranchsorter():
253 def makebranchsorter():
236 """If the previously converted revision has a child in the
254 """If the previously converted revision has a child in the
237 eligible revisions list, pick it. Return the list head
255 eligible revisions list, pick it. Return the list head
238 otherwise. Branch sort attempts to minimize branch
256 otherwise. Branch sort attempts to minimize branch
239 switching, which is harmful for Mercurial backend
257 switching, which is harmful for Mercurial backend
240 compression.
258 compression.
241 """
259 """
242 prev = [None]
260 prev = [None]
243 def picknext(nodes):
261 def picknext(nodes):
244 next = nodes[0]
262 next = nodes[0]
245 for n in nodes:
263 for n in nodes:
246 if prev[0] in parents[n]:
264 if prev[0] in parents[n]:
247 next = n
265 next = n
248 break
266 break
249 prev[0] = next
267 prev[0] = next
250 return next
268 return next
251 return picknext
269 return picknext
252
270
253 def makesourcesorter():
271 def makesourcesorter():
254 """Source specific sort."""
272 """Source specific sort."""
255 keyfn = lambda n: self.commitcache[n].sortkey
273 keyfn = lambda n: self.commitcache[n].sortkey
256 def picknext(nodes):
274 def picknext(nodes):
257 return sorted(nodes, key=keyfn)[0]
275 return sorted(nodes, key=keyfn)[0]
258 return picknext
276 return picknext
259
277
260 def makeclosesorter():
278 def makeclosesorter():
261 """Close order sort."""
279 """Close order sort."""
262 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
280 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
263 self.commitcache[n].sortkey)
281 self.commitcache[n].sortkey)
264 def picknext(nodes):
282 def picknext(nodes):
265 return sorted(nodes, key=keyfn)[0]
283 return sorted(nodes, key=keyfn)[0]
266 return picknext
284 return picknext
267
285
268 def makedatesorter():
286 def makedatesorter():
269 """Sort revisions by date."""
287 """Sort revisions by date."""
270 dates = {}
288 dates = {}
271 def getdate(n):
289 def getdate(n):
272 if n not in dates:
290 if n not in dates:
273 dates[n] = util.parsedate(self.commitcache[n].date)
291 dates[n] = util.parsedate(self.commitcache[n].date)
274 return dates[n]
292 return dates[n]
275
293
276 def picknext(nodes):
294 def picknext(nodes):
277 return min([(getdate(n), n) for n in nodes])[1]
295 return min([(getdate(n), n) for n in nodes])[1]
278
296
279 return picknext
297 return picknext
280
298
281 if sortmode == 'branchsort':
299 if sortmode == 'branchsort':
282 picknext = makebranchsorter()
300 picknext = makebranchsorter()
283 elif sortmode == 'datesort':
301 elif sortmode == 'datesort':
284 picknext = makedatesorter()
302 picknext = makedatesorter()
285 elif sortmode == 'sourcesort':
303 elif sortmode == 'sourcesort':
286 picknext = makesourcesorter()
304 picknext = makesourcesorter()
287 elif sortmode == 'closesort':
305 elif sortmode == 'closesort':
288 picknext = makeclosesorter()
306 picknext = makeclosesorter()
289 else:
307 else:
290 raise util.Abort(_('unknown sort mode: %s') % sortmode)
308 raise util.Abort(_('unknown sort mode: %s') % sortmode)
291
309
292 children, actives = mapchildren(parents)
310 children, actives = mapchildren(parents)
293
311
294 s = []
312 s = []
295 pendings = {}
313 pendings = {}
296 while actives:
314 while actives:
297 n = picknext(actives)
315 n = picknext(actives)
298 actives.remove(n)
316 actives.remove(n)
299 s.append(n)
317 s.append(n)
300
318
301 # Update dependents list
319 # Update dependents list
302 for c in children.get(n, []):
320 for c in children.get(n, []):
303 if c not in pendings:
321 if c not in pendings:
304 pendings[c] = [p for p in parents[c] if p not in self.map]
322 pendings[c] = [p for p in parents[c] if p not in self.map]
305 try:
323 try:
306 pendings[c].remove(n)
324 pendings[c].remove(n)
307 except ValueError:
325 except ValueError:
308 raise util.Abort(_('cycle detected between %s and %s')
326 raise util.Abort(_('cycle detected between %s and %s')
309 % (recode(c), recode(n)))
327 % (recode(c), recode(n)))
310 if not pendings[c]:
328 if not pendings[c]:
311 # Parents are converted, node is eligible
329 # Parents are converted, node is eligible
312 actives.insert(0, c)
330 actives.insert(0, c)
313 pendings[c] = None
331 pendings[c] = None
314
332
315 if len(s) != len(parents):
333 if len(s) != len(parents):
316 raise util.Abort(_("not all revisions were sorted"))
334 raise util.Abort(_("not all revisions were sorted"))
317
335
318 return s
336 return s
319
337
320 def writeauthormap(self):
338 def writeauthormap(self):
321 authorfile = self.authorfile
339 authorfile = self.authorfile
322 if authorfile:
340 if authorfile:
323 self.ui.status(_('writing author map file %s\n') % authorfile)
341 self.ui.status(_('writing author map file %s\n') % authorfile)
324 ofile = open(authorfile, 'w+')
342 ofile = open(authorfile, 'w+')
325 for author in self.authors:
343 for author in self.authors:
326 ofile.write("%s=%s\n" % (author, self.authors[author]))
344 ofile.write("%s=%s\n" % (author, self.authors[author]))
327 ofile.close()
345 ofile.close()
328
346
329 def readauthormap(self, authorfile):
347 def readauthormap(self, authorfile):
330 afile = open(authorfile, 'r')
348 afile = open(authorfile, 'r')
331 for line in afile:
349 for line in afile:
332
350
333 line = line.strip()
351 line = line.strip()
334 if not line or line.startswith('#'):
352 if not line or line.startswith('#'):
335 continue
353 continue
336
354
337 try:
355 try:
338 srcauthor, dstauthor = line.split('=', 1)
356 srcauthor, dstauthor = line.split('=', 1)
339 except ValueError:
357 except ValueError:
340 msg = _('ignoring bad line in author map file %s: %s\n')
358 msg = _('ignoring bad line in author map file %s: %s\n')
341 self.ui.warn(msg % (authorfile, line.rstrip()))
359 self.ui.warn(msg % (authorfile, line.rstrip()))
342 continue
360 continue
343
361
344 srcauthor = srcauthor.strip()
362 srcauthor = srcauthor.strip()
345 dstauthor = dstauthor.strip()
363 dstauthor = dstauthor.strip()
346 if self.authors.get(srcauthor) in (None, dstauthor):
364 if self.authors.get(srcauthor) in (None, dstauthor):
347 msg = _('mapping author %s to %s\n')
365 msg = _('mapping author %s to %s\n')
348 self.ui.debug(msg % (srcauthor, dstauthor))
366 self.ui.debug(msg % (srcauthor, dstauthor))
349 self.authors[srcauthor] = dstauthor
367 self.authors[srcauthor] = dstauthor
350 continue
368 continue
351
369
352 m = _('overriding mapping for author %s, was %s, will be %s\n')
370 m = _('overriding mapping for author %s, was %s, will be %s\n')
353 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
371 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
354
372
355 afile.close()
373 afile.close()
356
374
357 def cachecommit(self, rev):
375 def cachecommit(self, rev):
358 commit = self.source.getcommit(rev)
376 commit = self.source.getcommit(rev)
359 commit.author = self.authors.get(commit.author, commit.author)
377 commit.author = self.authors.get(commit.author, commit.author)
360 commit.branch = self.branchmap.get(commit.branch, commit.branch)
378 commit.branch = self.branchmap.get(commit.branch, commit.branch)
361 self.commitcache[rev] = commit
379 self.commitcache[rev] = commit
362 return commit
380 return commit
363
381
364 def copy(self, rev):
382 def copy(self, rev):
365 commit = self.commitcache[rev]
383 commit = self.commitcache[rev]
366
384
367 changes = self.source.getchanges(rev)
385 changes = self.source.getchanges(rev)
368 if isinstance(changes, basestring):
386 if isinstance(changes, basestring):
369 if changes == SKIPREV:
387 if changes == SKIPREV:
370 dest = SKIPREV
388 dest = SKIPREV
371 else:
389 else:
372 dest = self.map[changes]
390 dest = self.map[changes]
373 self.map[rev] = dest
391 self.map[rev] = dest
374 return
392 return
375 files, copies = changes
393 files, copies = changes
376 pbranches = []
394 pbranches = []
377 if commit.parents:
395 if commit.parents:
378 for prev in commit.parents:
396 for prev in commit.parents:
379 if prev not in self.commitcache:
397 if prev not in self.commitcache:
380 self.cachecommit(prev)
398 self.cachecommit(prev)
381 pbranches.append((self.map[prev],
399 pbranches.append((self.map[prev],
382 self.commitcache[prev].branch))
400 self.commitcache[prev].branch))
383 self.dest.setbranch(commit.branch, pbranches)
401 self.dest.setbranch(commit.branch, pbranches)
384 try:
402 try:
385 parents = self.splicemap[rev]
403 parents = self.splicemap[rev]
386 self.ui.status(_('spliced in %s as parents of %s\n') %
404 self.ui.status(_('spliced in %s as parents of %s\n') %
387 (parents, rev))
405 (parents, rev))
388 parents = [self.map.get(p, p) for p in parents]
406 parents = [self.map.get(p, p) for p in parents]
389 except KeyError:
407 except KeyError:
390 parents = [b[0] for b in pbranches]
408 parents = [b[0] for b in pbranches]
391 source = progresssource(self.ui, self.source, len(files))
409 source = progresssource(self.ui, self.source, len(files))
392 newnode = self.dest.putcommit(files, copies, parents, commit,
410 newnode = self.dest.putcommit(files, copies, parents, commit,
393 source, self.map)
411 source, self.map)
394 source.close()
412 source.close()
395 self.source.converted(rev, newnode)
413 self.source.converted(rev, newnode)
396 self.map[rev] = newnode
414 self.map[rev] = newnode
397
415
398 def convert(self, sortmode):
416 def convert(self, sortmode):
399 try:
417 try:
400 self.source.before()
418 self.source.before()
401 self.dest.before()
419 self.dest.before()
402 self.source.setrevmap(self.map)
420 self.source.setrevmap(self.map)
403 self.ui.status(_("scanning source...\n"))
421 self.ui.status(_("scanning source...\n"))
404 heads = self.source.getheads()
422 heads = self.source.getheads()
405 parents = self.walktree(heads)
423 parents = self.walktree(heads)
406 self.mergesplicemap(parents, self.splicemap)
424 self.mergesplicemap(parents, self.splicemap)
407 self.ui.status(_("sorting...\n"))
425 self.ui.status(_("sorting...\n"))
408 t = self.toposort(parents, sortmode)
426 t = self.toposort(parents, sortmode)
409 num = len(t)
427 num = len(t)
410 c = None
428 c = None
411
429
412 self.ui.status(_("converting...\n"))
430 self.ui.status(_("converting...\n"))
413 for i, c in enumerate(t):
431 for i, c in enumerate(t):
414 num -= 1
432 num -= 1
415 desc = self.commitcache[c].desc
433 desc = self.commitcache[c].desc
416 if "\n" in desc:
434 if "\n" in desc:
417 desc = desc.splitlines()[0]
435 desc = desc.splitlines()[0]
418 # convert log message to local encoding without using
436 # convert log message to local encoding without using
419 # tolocal() because the encoding.encoding convert()
437 # tolocal() because the encoding.encoding convert()
420 # uses is 'utf-8'
438 # uses is 'utf-8'
421 self.ui.status("%d %s\n" % (num, recode(desc)))
439 self.ui.status("%d %s\n" % (num, recode(desc)))
422 self.ui.note(_("source: %s\n") % recode(c))
440 self.ui.note(_("source: %s\n") % recode(c))
423 self.ui.progress(_('converting'), i, unit=_('revisions'),
441 self.ui.progress(_('converting'), i, unit=_('revisions'),
424 total=len(t))
442 total=len(t))
425 self.copy(c)
443 self.copy(c)
426 self.ui.progress(_('converting'), None)
444 self.ui.progress(_('converting'), None)
427
445
428 tags = self.source.gettags()
446 tags = self.source.gettags()
429 ctags = {}
447 ctags = {}
430 for k in tags:
448 for k in tags:
431 v = tags[k]
449 v = tags[k]
432 if self.map.get(v, SKIPREV) != SKIPREV:
450 if self.map.get(v, SKIPREV) != SKIPREV:
433 ctags[k] = self.map[v]
451 ctags[k] = self.map[v]
434
452
435 if c and ctags:
453 if c and ctags:
436 nrev, tagsparent = self.dest.puttags(ctags)
454 nrev, tagsparent = self.dest.puttags(ctags)
437 if nrev and tagsparent:
455 if nrev and tagsparent:
438 # write another hash correspondence to override the previous
456 # write another hash correspondence to override the previous
439 # one so we don't end up with extra tag heads
457 # one so we don't end up with extra tag heads
440 tagsparents = [e for e in self.map.iteritems()
458 tagsparents = [e for e in self.map.iteritems()
441 if e[1] == tagsparent]
459 if e[1] == tagsparent]
442 if tagsparents:
460 if tagsparents:
443 self.map[tagsparents[0][0]] = nrev
461 self.map[tagsparents[0][0]] = nrev
444
462
445 bookmarks = self.source.getbookmarks()
463 bookmarks = self.source.getbookmarks()
446 cbookmarks = {}
464 cbookmarks = {}
447 for k in bookmarks:
465 for k in bookmarks:
448 v = bookmarks[k]
466 v = bookmarks[k]
449 if self.map.get(v, SKIPREV) != SKIPREV:
467 if self.map.get(v, SKIPREV) != SKIPREV:
450 cbookmarks[k] = self.map[v]
468 cbookmarks[k] = self.map[v]
451
469
452 if c and cbookmarks:
470 if c and cbookmarks:
453 self.dest.putbookmarks(cbookmarks)
471 self.dest.putbookmarks(cbookmarks)
454
472
455 self.writeauthormap()
473 self.writeauthormap()
456 finally:
474 finally:
457 self.cleanup()
475 self.cleanup()
458
476
459 def cleanup(self):
477 def cleanup(self):
460 try:
478 try:
461 self.dest.after()
479 self.dest.after()
462 finally:
480 finally:
463 self.source.after()
481 self.source.after()
464 self.map.close()
482 self.map.close()
465
483
466 def convert(ui, src, dest=None, revmapfile=None, **opts):
484 def convert(ui, src, dest=None, revmapfile=None, **opts):
467 global orig_encoding
485 global orig_encoding
468 orig_encoding = encoding.encoding
486 orig_encoding = encoding.encoding
469 encoding.encoding = 'UTF-8'
487 encoding.encoding = 'UTF-8'
470
488
471 # support --authors as an alias for --authormap
489 # support --authors as an alias for --authormap
472 if not opts.get('authormap'):
490 if not opts.get('authormap'):
473 opts['authormap'] = opts.get('authors')
491 opts['authormap'] = opts.get('authors')
474
492
475 if not dest:
493 if not dest:
476 dest = hg.defaultdest(src) + "-hg"
494 dest = hg.defaultdest(src) + "-hg"
477 ui.status(_("assuming destination %s\n") % dest)
495 ui.status(_("assuming destination %s\n") % dest)
478
496
479 destc = convertsink(ui, dest, opts.get('dest_type'))
497 destc = convertsink(ui, dest, opts.get('dest_type'))
480
498
481 try:
499 try:
482 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
500 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
483 opts.get('rev'))
501 opts.get('rev'))
484 except Exception:
502 except Exception:
485 for path in destc.created:
503 for path in destc.created:
486 shutil.rmtree(path, True)
504 shutil.rmtree(path, True)
487 raise
505 raise
488
506
489 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
507 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
490 sortmode = [m for m in sortmodes if opts.get(m)]
508 sortmode = [m for m in sortmodes if opts.get(m)]
491 if len(sortmode) > 1:
509 if len(sortmode) > 1:
492 raise util.Abort(_('more than one sort mode specified'))
510 raise util.Abort(_('more than one sort mode specified'))
493 sortmode = sortmode and sortmode[0] or defaultsort
511 sortmode = sortmode and sortmode[0] or defaultsort
494 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
512 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
495 raise util.Abort(_('--sourcesort is not supported by this data source'))
513 raise util.Abort(_('--sourcesort is not supported by this data source'))
496 if sortmode == 'closesort' and not srcc.hasnativeclose():
514 if sortmode == 'closesort' and not srcc.hasnativeclose():
497 raise util.Abort(_('--closesort is not supported by this data source'))
515 raise util.Abort(_('--closesort is not supported by this data source'))
498
516
499 fmap = opts.get('filemap')
517 fmap = opts.get('filemap')
500 if fmap:
518 if fmap:
501 srcc = filemap.filemap_source(ui, srcc, fmap)
519 srcc = filemap.filemap_source(ui, srcc, fmap)
502 destc.setfilemapmode(True)
520 destc.setfilemapmode(True)
503
521
504 if not revmapfile:
522 if not revmapfile:
505 try:
523 try:
506 revmapfile = destc.revmapfile()
524 revmapfile = destc.revmapfile()
507 except Exception:
525 except Exception:
508 revmapfile = os.path.join(destc, "map")
526 revmapfile = os.path.join(destc, "map")
509
527
510 c = converter(ui, srcc, destc, revmapfile, opts)
528 c = converter(ui, srcc, destc, revmapfile, opts)
511 c.convert(sortmode)
529 c.convert(sortmode)
512
530
@@ -1,399 +1,403 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19
19
20
20
21 import os, time, cStringIO
21 import os, time, cStringIO
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import bin, hex, nullid
23 from mercurial.node import bin, hex, nullid
24 from mercurial import hg, util, context, bookmarks, error
24 from mercurial import hg, util, context, bookmarks, error
25
25
26 from common import NoRepo, commit, converter_source, converter_sink
26 from common import NoRepo, commit, converter_source, converter_sink
27
27
28 class mercurial_sink(converter_sink):
28 class mercurial_sink(converter_sink):
29 def __init__(self, ui, path):
29 def __init__(self, ui, path):
30 converter_sink.__init__(self, ui, path)
30 converter_sink.__init__(self, ui, path)
31 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
31 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
32 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
32 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
33 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
33 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
34 self.lastbranch = None
34 self.lastbranch = None
35 if os.path.isdir(path) and len(os.listdir(path)) > 0:
35 if os.path.isdir(path) and len(os.listdir(path)) > 0:
36 try:
36 try:
37 self.repo = hg.repository(self.ui, path)
37 self.repo = hg.repository(self.ui, path)
38 if not self.repo.local():
38 if not self.repo.local():
39 raise NoRepo(_('%s is not a local Mercurial repository')
39 raise NoRepo(_('%s is not a local Mercurial repository')
40 % path)
40 % path)
41 except error.RepoError, err:
41 except error.RepoError, err:
42 ui.traceback()
42 ui.traceback()
43 raise NoRepo(err.args[0])
43 raise NoRepo(err.args[0])
44 else:
44 else:
45 try:
45 try:
46 ui.status(_('initializing destination %s repository\n') % path)
46 ui.status(_('initializing destination %s repository\n') % path)
47 self.repo = hg.repository(self.ui, path, create=True)
47 self.repo = hg.repository(self.ui, path, create=True)
48 if not self.repo.local():
48 if not self.repo.local():
49 raise NoRepo(_('%s is not a local Mercurial repository')
49 raise NoRepo(_('%s is not a local Mercurial repository')
50 % path)
50 % path)
51 self.created.append(path)
51 self.created.append(path)
52 except error.RepoError:
52 except error.RepoError:
53 ui.traceback()
53 ui.traceback()
54 raise NoRepo(_("could not create hg repository %s as sink")
54 raise NoRepo(_("could not create hg repository %s as sink")
55 % path)
55 % path)
56 self.lock = None
56 self.lock = None
57 self.wlock = None
57 self.wlock = None
58 self.filemapmode = False
58 self.filemapmode = False
59
59
60 def before(self):
60 def before(self):
61 self.ui.debug('run hg sink pre-conversion action\n')
61 self.ui.debug('run hg sink pre-conversion action\n')
62 self.wlock = self.repo.wlock()
62 self.wlock = self.repo.wlock()
63 self.lock = self.repo.lock()
63 self.lock = self.repo.lock()
64
64
65 def after(self):
65 def after(self):
66 self.ui.debug('run hg sink post-conversion action\n')
66 self.ui.debug('run hg sink post-conversion action\n')
67 if self.lock:
67 if self.lock:
68 self.lock.release()
68 self.lock.release()
69 if self.wlock:
69 if self.wlock:
70 self.wlock.release()
70 self.wlock.release()
71
71
72 def revmapfile(self):
72 def revmapfile(self):
73 return self.repo.join("shamap")
73 return self.repo.join("shamap")
74
74
75 def authorfile(self):
75 def authorfile(self):
76 return self.repo.join("authormap")
76 return self.repo.join("authormap")
77
77
78 def getheads(self):
78 def getheads(self):
79 h = self.repo.changelog.heads()
79 h = self.repo.changelog.heads()
80 return [hex(x) for x in h]
80 return [hex(x) for x in h]
81
81
82 def setbranch(self, branch, pbranches):
82 def setbranch(self, branch, pbranches):
83 if not self.clonebranches:
83 if not self.clonebranches:
84 return
84 return
85
85
86 setbranch = (branch != self.lastbranch)
86 setbranch = (branch != self.lastbranch)
87 self.lastbranch = branch
87 self.lastbranch = branch
88 if not branch:
88 if not branch:
89 branch = 'default'
89 branch = 'default'
90 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
90 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
91 pbranch = pbranches and pbranches[0][1] or 'default'
91 pbranch = pbranches and pbranches[0][1] or 'default'
92
92
93 branchpath = os.path.join(self.path, branch)
93 branchpath = os.path.join(self.path, branch)
94 if setbranch:
94 if setbranch:
95 self.after()
95 self.after()
96 try:
96 try:
97 self.repo = hg.repository(self.ui, branchpath)
97 self.repo = hg.repository(self.ui, branchpath)
98 except Exception:
98 except Exception:
99 self.repo = hg.repository(self.ui, branchpath, create=True)
99 self.repo = hg.repository(self.ui, branchpath, create=True)
100 self.before()
100 self.before()
101
101
102 # pbranches may bring revisions from other branches (merge parents)
102 # pbranches may bring revisions from other branches (merge parents)
103 # Make sure we have them, or pull them.
103 # Make sure we have them, or pull them.
104 missings = {}
104 missings = {}
105 for b in pbranches:
105 for b in pbranches:
106 try:
106 try:
107 self.repo.lookup(b[0])
107 self.repo.lookup(b[0])
108 except Exception:
108 except Exception:
109 missings.setdefault(b[1], []).append(b[0])
109 missings.setdefault(b[1], []).append(b[0])
110
110
111 if missings:
111 if missings:
112 self.after()
112 self.after()
113 for pbranch, heads in sorted(missings.iteritems()):
113 for pbranch, heads in sorted(missings.iteritems()):
114 pbranchpath = os.path.join(self.path, pbranch)
114 pbranchpath = os.path.join(self.path, pbranch)
115 prepo = hg.peer(self.ui, {}, pbranchpath)
115 prepo = hg.peer(self.ui, {}, pbranchpath)
116 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
116 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
117 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
117 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
118 self.before()
118 self.before()
119
119
120 def _rewritetags(self, source, revmap, data):
120 def _rewritetags(self, source, revmap, data):
121 fp = cStringIO.StringIO()
121 fp = cStringIO.StringIO()
122 for line in data.splitlines():
122 for line in data.splitlines():
123 s = line.split(' ', 1)
123 s = line.split(' ', 1)
124 if len(s) != 2:
124 if len(s) != 2:
125 continue
125 continue
126 revid = revmap.get(source.lookuprev(s[0]))
126 revid = revmap.get(source.lookuprev(s[0]))
127 if not revid:
127 if not revid:
128 continue
128 continue
129 fp.write('%s %s\n' % (revid, s[1]))
129 fp.write('%s %s\n' % (revid, s[1]))
130 return fp.getvalue()
130 return fp.getvalue()
131
131
132 def putcommit(self, files, copies, parents, commit, source, revmap):
132 def putcommit(self, files, copies, parents, commit, source, revmap):
133
133
134 files = dict(files)
134 files = dict(files)
135 def getfilectx(repo, memctx, f):
135 def getfilectx(repo, memctx, f):
136 v = files[f]
136 v = files[f]
137 data, mode = source.getfile(f, v)
137 data, mode = source.getfile(f, v)
138 if f == '.hgtags':
138 if f == '.hgtags':
139 data = self._rewritetags(source, revmap, data)
139 data = self._rewritetags(source, revmap, data)
140 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
140 return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
141 copies.get(f))
141 copies.get(f))
142
142
143 pl = []
143 pl = []
144 for p in parents:
144 for p in parents:
145 if p not in pl:
145 if p not in pl:
146 pl.append(p)
146 pl.append(p)
147 parents = pl
147 parents = pl
148 nparents = len(parents)
148 nparents = len(parents)
149 if self.filemapmode and nparents == 1:
149 if self.filemapmode and nparents == 1:
150 m1node = self.repo.changelog.read(bin(parents[0]))[0]
150 m1node = self.repo.changelog.read(bin(parents[0]))[0]
151 parent = parents[0]
151 parent = parents[0]
152
152
153 if len(parents) < 2:
153 if len(parents) < 2:
154 parents.append(nullid)
154 parents.append(nullid)
155 if len(parents) < 2:
155 if len(parents) < 2:
156 parents.append(nullid)
156 parents.append(nullid)
157 p2 = parents.pop(0)
157 p2 = parents.pop(0)
158
158
159 text = commit.desc
159 text = commit.desc
160 extra = commit.extra.copy()
160 extra = commit.extra.copy()
161 if self.branchnames and commit.branch:
161 if self.branchnames and commit.branch:
162 extra['branch'] = commit.branch
162 extra['branch'] = commit.branch
163 if commit.rev:
163 if commit.rev:
164 extra['convert_revision'] = commit.rev
164 extra['convert_revision'] = commit.rev
165
165
166 while parents:
166 while parents:
167 p1 = p2
167 p1 = p2
168 p2 = parents.pop(0)
168 p2 = parents.pop(0)
169 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
169 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
170 getfilectx, commit.author, commit.date, extra)
170 getfilectx, commit.author, commit.date, extra)
171 self.repo.commitctx(ctx)
171 self.repo.commitctx(ctx)
172 text = "(octopus merge fixup)\n"
172 text = "(octopus merge fixup)\n"
173 p2 = hex(self.repo.changelog.tip())
173 p2 = hex(self.repo.changelog.tip())
174
174
175 if self.filemapmode and nparents == 1:
175 if self.filemapmode and nparents == 1:
176 man = self.repo.manifest
176 man = self.repo.manifest
177 mnode = self.repo.changelog.read(bin(p2))[0]
177 mnode = self.repo.changelog.read(bin(p2))[0]
178 closed = 'close' in commit.extra
178 closed = 'close' in commit.extra
179 if not closed and not man.cmp(m1node, man.revision(mnode)):
179 if not closed and not man.cmp(m1node, man.revision(mnode)):
180 self.ui.status(_("filtering out empty revision\n"))
180 self.ui.status(_("filtering out empty revision\n"))
181 self.repo.rollback(force=True)
181 self.repo.rollback(force=True)
182 return parent
182 return parent
183 return p2
183 return p2
184
184
185 def puttags(self, tags):
185 def puttags(self, tags):
186 try:
186 try:
187 parentctx = self.repo[self.tagsbranch]
187 parentctx = self.repo[self.tagsbranch]
188 tagparent = parentctx.node()
188 tagparent = parentctx.node()
189 except error.RepoError:
189 except error.RepoError:
190 parentctx = None
190 parentctx = None
191 tagparent = nullid
191 tagparent = nullid
192
192
193 try:
193 try:
194 oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
194 oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
195 except Exception:
195 except Exception:
196 oldlines = []
196 oldlines = []
197
197
198 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
198 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
199 if newlines == oldlines:
199 if newlines == oldlines:
200 return None, None
200 return None, None
201 data = "".join(newlines)
201 data = "".join(newlines)
202 def getfilectx(repo, memctx, f):
202 def getfilectx(repo, memctx, f):
203 return context.memfilectx(f, data, False, False, None)
203 return context.memfilectx(f, data, False, False, None)
204
204
205 self.ui.status(_("updating tags\n"))
205 self.ui.status(_("updating tags\n"))
206 date = "%s 0" % int(time.mktime(time.gmtime()))
206 date = "%s 0" % int(time.mktime(time.gmtime()))
207 extra = {'branch': self.tagsbranch}
207 extra = {'branch': self.tagsbranch}
208 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
208 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
209 [".hgtags"], getfilectx, "convert-repo", date,
209 [".hgtags"], getfilectx, "convert-repo", date,
210 extra)
210 extra)
211 self.repo.commitctx(ctx)
211 self.repo.commitctx(ctx)
212 return hex(self.repo.changelog.tip()), hex(tagparent)
212 return hex(self.repo.changelog.tip()), hex(tagparent)
213
213
214 def setfilemapmode(self, active):
214 def setfilemapmode(self, active):
215 self.filemapmode = active
215 self.filemapmode = active
216
216
217 def putbookmarks(self, updatedbookmark):
217 def putbookmarks(self, updatedbookmark):
218 if not len(updatedbookmark):
218 if not len(updatedbookmark):
219 return
219 return
220
220
221 self.ui.status(_("updating bookmarks\n"))
221 self.ui.status(_("updating bookmarks\n"))
222 destmarks = self.repo._bookmarks
222 destmarks = self.repo._bookmarks
223 for bookmark in updatedbookmark:
223 for bookmark in updatedbookmark:
224 destmarks[bookmark] = bin(updatedbookmark[bookmark])
224 destmarks[bookmark] = bin(updatedbookmark[bookmark])
225 destmarks.write()
225 destmarks.write()
226
226
227 def hascommit(self, rev):
227 def hascommit(self, rev):
228 if rev not in self.repo and self.clonebranches:
228 if rev not in self.repo and self.clonebranches:
229 raise util.Abort(_('revision %s not found in destination '
229 raise util.Abort(_('revision %s not found in destination '
230 'repository (lookups with clonebranches=true '
230 'repository (lookups with clonebranches=true '
231 'are not implemented)') % rev)
231 'are not implemented)') % rev)
232 return rev in self.repo
232 return rev in self.repo
233
233
234 class mercurial_source(converter_source):
234 class mercurial_source(converter_source):
235 def __init__(self, ui, path, rev=None):
235 def __init__(self, ui, path, rev=None):
236 converter_source.__init__(self, ui, path, rev)
236 converter_source.__init__(self, ui, path, rev)
237 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
237 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
238 self.ignored = set()
238 self.ignored = set()
239 self.saverev = ui.configbool('convert', 'hg.saverev', False)
239 self.saverev = ui.configbool('convert', 'hg.saverev', False)
240 try:
240 try:
241 self.repo = hg.repository(self.ui, path)
241 self.repo = hg.repository(self.ui, path)
242 # try to provoke an exception if this isn't really a hg
242 # try to provoke an exception if this isn't really a hg
243 # repo, but some other bogus compatible-looking url
243 # repo, but some other bogus compatible-looking url
244 if not self.repo.local():
244 if not self.repo.local():
245 raise error.RepoError
245 raise error.RepoError
246 except error.RepoError:
246 except error.RepoError:
247 ui.traceback()
247 ui.traceback()
248 raise NoRepo(_("%s is not a local Mercurial repository") % path)
248 raise NoRepo(_("%s is not a local Mercurial repository") % path)
249 self.lastrev = None
249 self.lastrev = None
250 self.lastctx = None
250 self.lastctx = None
251 self._changescache = None
251 self._changescache = None
252 self.convertfp = None
252 self.convertfp = None
253 # Restrict converted revisions to startrev descendants
253 # Restrict converted revisions to startrev descendants
254 startnode = ui.config('convert', 'hg.startrev')
254 startnode = ui.config('convert', 'hg.startrev')
255 if startnode is not None:
255 if startnode is not None:
256 try:
256 try:
257 startnode = self.repo.lookup(startnode)
257 startnode = self.repo.lookup(startnode)
258 except error.RepoError:
258 except error.RepoError:
259 raise util.Abort(_('%s is not a valid start revision')
259 raise util.Abort(_('%s is not a valid start revision')
260 % startnode)
260 % startnode)
261 startrev = self.repo.changelog.rev(startnode)
261 startrev = self.repo.changelog.rev(startnode)
262 children = {startnode: 1}
262 children = {startnode: 1}
263 for rev in self.repo.changelog.descendants([startrev]):
263 for rev in self.repo.changelog.descendants([startrev]):
264 children[self.repo.changelog.node(rev)] = 1
264 children[self.repo.changelog.node(rev)] = 1
265 self.keep = children.__contains__
265 self.keep = children.__contains__
266 else:
266 else:
267 self.keep = util.always
267 self.keep = util.always
268
268
269 def changectx(self, rev):
269 def changectx(self, rev):
270 if self.lastrev != rev:
270 if self.lastrev != rev:
271 self.lastctx = self.repo[rev]
271 self.lastctx = self.repo[rev]
272 self.lastrev = rev
272 self.lastrev = rev
273 return self.lastctx
273 return self.lastctx
274
274
275 def parents(self, ctx):
275 def parents(self, ctx):
276 return [p for p in ctx.parents() if p and self.keep(p.node())]
276 return [p for p in ctx.parents() if p and self.keep(p.node())]
277
277
278 def getheads(self):
278 def getheads(self):
279 if self.rev:
279 if self.rev:
280 heads = [self.repo[self.rev].node()]
280 heads = [self.repo[self.rev].node()]
281 else:
281 else:
282 heads = self.repo.heads()
282 heads = self.repo.heads()
283 return [hex(h) for h in heads if self.keep(h)]
283 return [hex(h) for h in heads if self.keep(h)]
284
284
285 def getfile(self, name, rev):
285 def getfile(self, name, rev):
286 try:
286 try:
287 fctx = self.changectx(rev)[name]
287 fctx = self.changectx(rev)[name]
288 return fctx.data(), fctx.flags()
288 return fctx.data(), fctx.flags()
289 except error.LookupError, err:
289 except error.LookupError, err:
290 raise IOError(err)
290 raise IOError(err)
291
291
292 def getchanges(self, rev):
292 def getchanges(self, rev):
293 ctx = self.changectx(rev)
293 ctx = self.changectx(rev)
294 parents = self.parents(ctx)
294 parents = self.parents(ctx)
295 if not parents:
295 if not parents:
296 files = sorted(ctx.manifest())
296 files = sorted(ctx.manifest())
297 # getcopies() is not needed for roots, but it is a simple way to
297 # getcopies() is not needed for roots, but it is a simple way to
298 # detect missing revlogs and abort on errors or populate
298 # detect missing revlogs and abort on errors or populate
299 # self.ignored
299 # self.ignored
300 self.getcopies(ctx, parents, files)
300 self.getcopies(ctx, parents, files)
301 return [(f, rev) for f in files if f not in self.ignored], {}
301 return [(f, rev) for f in files if f not in self.ignored], {}
302 if self._changescache and self._changescache[0] == rev:
302 if self._changescache and self._changescache[0] == rev:
303 m, a, r = self._changescache[1]
303 m, a, r = self._changescache[1]
304 else:
304 else:
305 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
305 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
306 # getcopies() detects missing revlogs early, run it before
306 # getcopies() detects missing revlogs early, run it before
307 # filtering the changes.
307 # filtering the changes.
308 copies = self.getcopies(ctx, parents, m + a)
308 copies = self.getcopies(ctx, parents, m + a)
309 changes = [(name, rev) for name in m + a + r
309 changes = [(name, rev) for name in m + a + r
310 if name not in self.ignored]
310 if name not in self.ignored]
311 return sorted(changes), copies
311 return sorted(changes), copies
312
312
313 def getcopies(self, ctx, parents, files):
313 def getcopies(self, ctx, parents, files):
314 copies = {}
314 copies = {}
315 for name in files:
315 for name in files:
316 if name in self.ignored:
316 if name in self.ignored:
317 continue
317 continue
318 try:
318 try:
319 copysource, copynode = ctx.filectx(name).renamed()
319 copysource, copynode = ctx.filectx(name).renamed()
320 if copysource in self.ignored or not self.keep(copynode):
320 if copysource in self.ignored or not self.keep(copynode):
321 continue
321 continue
322 # Ignore copy sources not in parent revisions
322 # Ignore copy sources not in parent revisions
323 found = False
323 found = False
324 for p in parents:
324 for p in parents:
325 if copysource in p:
325 if copysource in p:
326 found = True
326 found = True
327 break
327 break
328 if not found:
328 if not found:
329 continue
329 continue
330 copies[name] = copysource
330 copies[name] = copysource
331 except TypeError:
331 except TypeError:
332 pass
332 pass
333 except error.LookupError, e:
333 except error.LookupError, e:
334 if not self.ignoreerrors:
334 if not self.ignoreerrors:
335 raise
335 raise
336 self.ignored.add(name)
336 self.ignored.add(name)
337 self.ui.warn(_('ignoring: %s\n') % e)
337 self.ui.warn(_('ignoring: %s\n') % e)
338 return copies
338 return copies
339
339
340 def getcommit(self, rev):
340 def getcommit(self, rev):
341 ctx = self.changectx(rev)
341 ctx = self.changectx(rev)
342 parents = [p.hex() for p in self.parents(ctx)]
342 parents = [p.hex() for p in self.parents(ctx)]
343 if self.saverev:
343 if self.saverev:
344 crev = rev
344 crev = rev
345 else:
345 else:
346 crev = None
346 crev = None
347 return commit(author=ctx.user(),
347 return commit(author=ctx.user(),
348 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
348 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
349 desc=ctx.description(), rev=crev, parents=parents,
349 desc=ctx.description(), rev=crev, parents=parents,
350 branch=ctx.branch(), extra=ctx.extra(),
350 branch=ctx.branch(), extra=ctx.extra(),
351 sortkey=ctx.rev())
351 sortkey=ctx.rev())
352
352
353 def gettags(self):
353 def gettags(self):
354 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
354 tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
355 return dict([(name, hex(node)) for name, node in tags
355 return dict([(name, hex(node)) for name, node in tags
356 if self.keep(node)])
356 if self.keep(node)])
357
357
358 def getchangedfiles(self, rev, i):
358 def getchangedfiles(self, rev, i):
359 ctx = self.changectx(rev)
359 ctx = self.changectx(rev)
360 parents = self.parents(ctx)
360 parents = self.parents(ctx)
361 if not parents and i is None:
361 if not parents and i is None:
362 i = 0
362 i = 0
363 changes = [], ctx.manifest().keys(), []
363 changes = [], ctx.manifest().keys(), []
364 else:
364 else:
365 i = i or 0
365 i = i or 0
366 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
366 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
367 changes = [[f for f in l if f not in self.ignored] for l in changes]
367 changes = [[f for f in l if f not in self.ignored] for l in changes]
368
368
369 if i == 0:
369 if i == 0:
370 self._changescache = (rev, changes)
370 self._changescache = (rev, changes)
371
371
372 return changes[0] + changes[1] + changes[2]
372 return changes[0] + changes[1] + changes[2]
373
373
374 def converted(self, rev, destrev):
374 def converted(self, rev, destrev):
375 if self.convertfp is None:
375 if self.convertfp is None:
376 self.convertfp = open(self.repo.join('shamap'), 'a')
376 self.convertfp = open(self.repo.join('shamap'), 'a')
377 self.convertfp.write('%s %s\n' % (destrev, rev))
377 self.convertfp.write('%s %s\n' % (destrev, rev))
378 self.convertfp.flush()
378 self.convertfp.flush()
379
379
380 def before(self):
380 def before(self):
381 self.ui.debug('run hg source pre-conversion action\n')
381 self.ui.debug('run hg source pre-conversion action\n')
382
382
383 def after(self):
383 def after(self):
384 self.ui.debug('run hg source post-conversion action\n')
384 self.ui.debug('run hg source post-conversion action\n')
385
385
386 def hasnativeorder(self):
386 def hasnativeorder(self):
387 return True
387 return True
388
388
389 def hasnativeclose(self):
389 def hasnativeclose(self):
390 return True
390 return True
391
391
392 def lookuprev(self, rev):
392 def lookuprev(self, rev):
393 try:
393 try:
394 return hex(self.repo.lookup(rev))
394 return hex(self.repo.lookup(rev))
395 except error.RepoError:
395 except error.RepoError:
396 return None
396 return None
397
397
398 def getbookmarks(self):
398 def getbookmarks(self):
399 return bookmarks.listbookmarks(self.repo)
399 return bookmarks.listbookmarks(self.repo)
400
401 def checkrevformat(self, revstr):
402 """ Mercurial, revision string is a 40 byte hex """
403 self.checkhexformat(revstr)
@@ -1,222 +1,242 b''
1
1
2 $ echo "[extensions]" >> $HGRCPATH
2 $ echo "[extensions]" >> $HGRCPATH
3 $ echo "convert=" >> $HGRCPATH
3 $ echo "convert=" >> $HGRCPATH
4 $ echo 'graphlog =' >> $HGRCPATH
4 $ echo 'graphlog =' >> $HGRCPATH
5 $ glog()
5 $ glog()
6 > {
6 > {
7 > hg glog --template '{rev}:{node|short} "{desc|firstline}"\
7 > hg glog --template '{rev}:{node|short} "{desc|firstline}"\
8 > files: {files}\n' "$@"
8 > files: {files}\n' "$@"
9 > }
9 > }
10 $ hg init repo1
10 $ hg init repo1
11 $ cd repo1
11 $ cd repo1
12 $ echo a > a
12 $ echo a > a
13 $ hg ci -Am adda
13 $ hg ci -Am adda
14 adding a
14 adding a
15 $ echo b > b
15 $ echo b > b
16 $ echo a >> a
16 $ echo a >> a
17 $ hg ci -Am addb
17 $ hg ci -Am addb
18 adding b
18 adding b
19 $ PARENTID1=`hg id --debug -i`
19 $ PARENTID1=`hg id --debug -i`
20 $ echo c > c
20 $ echo c > c
21 $ hg ci -Am addc
21 $ hg ci -Am addc
22 adding c
22 adding c
23 $ PARENTID2=`hg id --debug -i`
23 $ PARENTID2=`hg id --debug -i`
24 $ cd ..
24 $ cd ..
25 $ glog -R repo1
25 $ glog -R repo1
26 @ 2:e55c719b85b6 "addc" files: c
26 @ 2:e55c719b85b6 "addc" files: c
27 |
27 |
28 o 1:6d4c2037ddc2 "addb" files: a b
28 o 1:6d4c2037ddc2 "addb" files: a b
29 |
29 |
30 o 0:07f494440405 "adda" files: a
30 o 0:07f494440405 "adda" files: a
31
31
32
32
33 $ hg init repo2
33 $ hg init repo2
34 $ cd repo2
34 $ cd repo2
35 $ echo b > a
35 $ echo b > a
36 $ echo d > d
36 $ echo d > d
37 $ hg ci -Am addaandd
37 $ hg ci -Am addaandd
38 adding a
38 adding a
39 adding d
39 adding d
40 $ INVALIDID1=afd12345af
41 $ INVALIDID2=28173x36ddd1e67bf7098d541130558ef5534a86
40 $ CHILDID1=`hg id --debug -i`
42 $ CHILDID1=`hg id --debug -i`
41 $ echo d >> d
43 $ echo d >> d
42 $ hg ci -Am changed
44 $ hg ci -Am changed
43 $ CHILDID2=`hg id --debug -i`
45 $ CHILDID2=`hg id --debug -i`
44 $ echo e > e
46 $ echo e > e
45 $ hg ci -Am adde
47 $ hg ci -Am adde
46 adding e
48 adding e
47 $ cd ..
49 $ cd ..
48 $ glog -R repo2
50 $ glog -R repo2
49 @ 2:a39b65753b0a "adde" files: e
51 @ 2:a39b65753b0a "adde" files: e
50 |
52 |
51 o 1:e4ea00df9189 "changed" files: d
53 o 1:e4ea00df9189 "changed" files: d
52 |
54 |
53 o 0:527cdedf31fb "addaandd" files: a d
55 o 0:527cdedf31fb "addaandd" files: a d
54
56
55
57
56 test invalid splicemap
58 test invalid splicemap1
57
59
58 $ cat > splicemap <<EOF
60 $ cat > splicemap <<EOF
59 > $CHILDID2
61 > $CHILDID2
60 > EOF
62 > EOF
61 $ hg convert --splicemap splicemap repo2 repo1
63 $ hg convert --splicemap splicemap repo2 repo1
62 abort: syntax error in splicemap(1): child parent1[,parent2] expected
64 abort: syntax error in splicemap(1): child parent1[,parent2] expected
63 [255]
65 [255]
64
66
67 test invalid splicemap2
68
69 $ cat > splicemap <<EOF
70 > $CHILDID2 $PARENTID1, $PARENTID2, $PARENTID2
71 > EOF
72 $ hg convert --splicemap splicemap repo2 repo1
73 abort: syntax error in splicemap(1): child parent1[,parent2] expected
74 [255]
75
76 test invalid splicemap3
77
78 $ cat > splicemap <<EOF
79 > $INVALIDID1 $INVALIDID2
80 > EOF
81 $ hg convert --splicemap splicemap repo2 repo1
82 abort: splicemap entry afd12345af is not a valid revision identifier
83 [255]
84
65 splice repo2 on repo1
85 splice repo2 on repo1
66
86
67 $ cat > splicemap <<EOF
87 $ cat > splicemap <<EOF
68 > $CHILDID1 $PARENTID1
88 > $CHILDID1 $PARENTID1
69 > $CHILDID2 $PARENTID2,$CHILDID1
89 > $CHILDID2 $PARENTID2,$CHILDID1
70 >
90 >
71 > EOF
91 > EOF
72 $ cat splicemap
92 $ cat splicemap
73 527cdedf31fbd5ea708aa14eeecf53d4676f38db 6d4c2037ddc2cb2627ac3a244ecce35283268f8e
93 527cdedf31fbd5ea708aa14eeecf53d4676f38db 6d4c2037ddc2cb2627ac3a244ecce35283268f8e
74 e4ea00df91897da3079a10fab658c1eddba6617b e55c719b85b60e5102fac26110ba626e7cb6b7dc,527cdedf31fbd5ea708aa14eeecf53d4676f38db
94 e4ea00df91897da3079a10fab658c1eddba6617b e55c719b85b60e5102fac26110ba626e7cb6b7dc,527cdedf31fbd5ea708aa14eeecf53d4676f38db
75
95
76 $ hg clone repo1 target1
96 $ hg clone repo1 target1
77 updating to branch default
97 updating to branch default
78 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
98 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
79 $ hg convert --splicemap splicemap repo2 target1
99 $ hg convert --splicemap splicemap repo2 target1
80 scanning source...
100 scanning source...
81 sorting...
101 sorting...
82 converting...
102 converting...
83 2 addaandd
103 2 addaandd
84 spliced in ['6d4c2037ddc2cb2627ac3a244ecce35283268f8e'] as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
104 spliced in ['6d4c2037ddc2cb2627ac3a244ecce35283268f8e'] as parents of 527cdedf31fbd5ea708aa14eeecf53d4676f38db
85 1 changed
105 1 changed
86 spliced in ['e55c719b85b60e5102fac26110ba626e7cb6b7dc', '527cdedf31fbd5ea708aa14eeecf53d4676f38db'] as parents of e4ea00df91897da3079a10fab658c1eddba6617b
106 spliced in ['e55c719b85b60e5102fac26110ba626e7cb6b7dc', '527cdedf31fbd5ea708aa14eeecf53d4676f38db'] as parents of e4ea00df91897da3079a10fab658c1eddba6617b
87 0 adde
107 0 adde
88 $ glog -R target1
108 $ glog -R target1
89 o 5:16bc847b02aa "adde" files: e
109 o 5:16bc847b02aa "adde" files: e
90 |
110 |
91 o 4:e30e4fee3418 "changed" files: d
111 o 4:e30e4fee3418 "changed" files: d
92 |\
112 |\
93 | o 3:e673348c3a3c "addaandd" files: a d
113 | o 3:e673348c3a3c "addaandd" files: a d
94 | |
114 | |
95 @ | 2:e55c719b85b6 "addc" files: c
115 @ | 2:e55c719b85b6 "addc" files: c
96 |/
116 |/
97 o 1:6d4c2037ddc2 "addb" files: a b
117 o 1:6d4c2037ddc2 "addb" files: a b
98 |
118 |
99 o 0:07f494440405 "adda" files: a
119 o 0:07f494440405 "adda" files: a
100
120
101
121
102
122
103
123
104 Test splicemap and conversion order
124 Test splicemap and conversion order
105
125
106 $ hg init ordered
126 $ hg init ordered
107 $ cd ordered
127 $ cd ordered
108 $ echo a > a
128 $ echo a > a
109 $ hg ci -Am adda
129 $ hg ci -Am adda
110 adding a
130 adding a
111 $ hg branch branch
131 $ hg branch branch
112 marked working directory as branch branch
132 marked working directory as branch branch
113 (branches are permanent and global, did you want a bookmark?)
133 (branches are permanent and global, did you want a bookmark?)
114 $ echo a >> a
134 $ echo a >> a
115 $ hg ci -Am changea
135 $ hg ci -Am changea
116 $ echo a >> a
136 $ echo a >> a
117 $ hg ci -Am changeaagain
137 $ hg ci -Am changeaagain
118 $ hg up 0
138 $ hg up 0
119 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
139 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
120 $ echo b > b
140 $ echo b > b
121 $ hg ci -Am addb
141 $ hg ci -Am addb
122 adding b
142 adding b
123
143
124 We want 2 to depend on 1 and 3. Since 3 is always converted after 2,
144 We want 2 to depend on 1 and 3. Since 3 is always converted after 2,
125 the bug should be exhibited with all conversion orders.
145 the bug should be exhibited with all conversion orders.
126
146
127 $ cat > ../splicemap <<EOF
147 $ cat > ../splicemap <<EOF
128 > `(hg id -r 2 -i --debug)` `(hg id -r 1 -i --debug)`, `(hg id -r 3 -i --debug)`
148 > `(hg id -r 2 -i --debug)` `(hg id -r 1 -i --debug)`, `(hg id -r 3 -i --debug)`
129 > EOF
149 > EOF
130 $ cd ..
150 $ cd ..
131 $ cat splicemap
151 $ cat splicemap
132 7c364e7fa7d70ae525610c016317ed717b519d97 717d54d67e6c31fd75ffef2ff3042bdd98418437, 102a90ea7b4a3361e4082ed620918c261189a36a
152 7c364e7fa7d70ae525610c016317ed717b519d97 717d54d67e6c31fd75ffef2ff3042bdd98418437, 102a90ea7b4a3361e4082ed620918c261189a36a
133
153
134 Test regular conversion
154 Test regular conversion
135
155
136 $ hg convert --splicemap splicemap ordered ordered-hg1
156 $ hg convert --splicemap splicemap ordered ordered-hg1
137 initializing destination ordered-hg1 repository
157 initializing destination ordered-hg1 repository
138 scanning source...
158 scanning source...
139 sorting...
159 sorting...
140 converting...
160 converting...
141 3 adda
161 3 adda
142 2 changea
162 2 changea
143 1 addb
163 1 addb
144 0 changeaagain
164 0 changeaagain
145 spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
165 spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
146 $ glog -R ordered-hg1
166 $ glog -R ordered-hg1
147 o 3:4cb04b9afbf2 "changeaagain" files: a
167 o 3:4cb04b9afbf2 "changeaagain" files: a
148 |\
168 |\
149 | o 2:102a90ea7b4a "addb" files: b
169 | o 2:102a90ea7b4a "addb" files: b
150 | |
170 | |
151 o | 1:717d54d67e6c "changea" files: a
171 o | 1:717d54d67e6c "changea" files: a
152 |/
172 |/
153 o 0:07f494440405 "adda" files: a
173 o 0:07f494440405 "adda" files: a
154
174
155
175
156 Test conversion with parent revisions already in dest, using source
176 Test conversion with parent revisions already in dest, using source
157 and destination identifiers. Test unknown splicemap target.
177 and destination identifiers. Test unknown splicemap target.
158
178
159 $ hg convert -r1 ordered ordered-hg2
179 $ hg convert -r1 ordered ordered-hg2
160 initializing destination ordered-hg2 repository
180 initializing destination ordered-hg2 repository
161 scanning source...
181 scanning source...
162 sorting...
182 sorting...
163 converting...
183 converting...
164 1 adda
184 1 adda
165 0 changea
185 0 changea
166 $ hg convert -r3 ordered ordered-hg2
186 $ hg convert -r3 ordered ordered-hg2
167 scanning source...
187 scanning source...
168 sorting...
188 sorting...
169 converting...
189 converting...
170 0 addb
190 0 addb
171 $ cat > splicemap <<EOF
191 $ cat > splicemap <<EOF
172 > `(hg -R ordered id -r 2 -i --debug)` \
192 > `(hg -R ordered id -r 2 -i --debug)` \
173 > `(hg -R ordered-hg2 id -r 1 -i --debug)`,\
193 > `(hg -R ordered-hg2 id -r 1 -i --debug)`,\
174 > `(hg -R ordered-hg2 id -r 2 -i --debug)`
194 > `(hg -R ordered-hg2 id -r 2 -i --debug)`
175 > deadbeef102a90ea7b4a3361e4082ed620918c26 deadbeef102a90ea7b4a3361e4082ed620918c27
195 > deadbeef102a90ea7b4a3361e4082ed620918c26 deadbeef102a90ea7b4a3361e4082ed620918c27
176 > EOF
196 > EOF
177 $ hg convert --splicemap splicemap ordered ordered-hg2
197 $ hg convert --splicemap splicemap ordered ordered-hg2
178 scanning source...
198 scanning source...
179 splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring
199 splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring
180 sorting...
200 sorting...
181 converting...
201 converting...
182 0 changeaagain
202 0 changeaagain
183 spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
203 spliced in ['717d54d67e6c31fd75ffef2ff3042bdd98418437', '102a90ea7b4a3361e4082ed620918c261189a36a'] as parents of 7c364e7fa7d70ae525610c016317ed717b519d97
184 $ glog -R ordered-hg2
204 $ glog -R ordered-hg2
185 o 3:4cb04b9afbf2 "changeaagain" files: a
205 o 3:4cb04b9afbf2 "changeaagain" files: a
186 |\
206 |\
187 | o 2:102a90ea7b4a "addb" files: b
207 | o 2:102a90ea7b4a "addb" files: b
188 | |
208 | |
189 o | 1:717d54d67e6c "changea" files: a
209 o | 1:717d54d67e6c "changea" files: a
190 |/
210 |/
191 o 0:07f494440405 "adda" files: a
211 o 0:07f494440405 "adda" files: a
192
212
193
213
194 Test empty conversion
214 Test empty conversion
195
215
196 $ hg convert --splicemap splicemap ordered ordered-hg2
216 $ hg convert --splicemap splicemap ordered ordered-hg2
197 scanning source...
217 scanning source...
198 splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring
218 splice map revision deadbeef102a90ea7b4a3361e4082ed620918c26 is not being converted, ignoring
199 sorting...
219 sorting...
200 converting...
220 converting...
201
221
202 Test clonebranches
222 Test clonebranches
203
223
204 $ hg --config convert.hg.clonebranches=true convert \
224 $ hg --config convert.hg.clonebranches=true convert \
205 > --splicemap splicemap ordered ordered-hg3
225 > --splicemap splicemap ordered ordered-hg3
206 initializing destination ordered-hg3 repository
226 initializing destination ordered-hg3 repository
207 scanning source...
227 scanning source...
208 abort: revision 717d54d67e6c31fd75ffef2ff3042bdd98418437 not found in destination repository (lookups with clonebranches=true are not implemented)
228 abort: revision 717d54d67e6c31fd75ffef2ff3042bdd98418437 not found in destination repository (lookups with clonebranches=true are not implemented)
209 [255]
229 [255]
210
230
211 Test invalid dependency
231 Test invalid dependency
212
232
213 $ cat > splicemap <<EOF
233 $ cat > splicemap <<EOF
214 > `(hg -R ordered id -r 2 -i --debug)` \
234 > `(hg -R ordered id -r 2 -i --debug)` \
215 > deadbeef102a90ea7b4a3361e4082ed620918c26,\
235 > deadbeef102a90ea7b4a3361e4082ed620918c26,\
216 > `(hg -R ordered-hg2 id -r 2 -i --debug)`
236 > `(hg -R ordered-hg2 id -r 2 -i --debug)`
217 > EOF
237 > EOF
218 $ hg convert --splicemap splicemap ordered ordered-hg4
238 $ hg convert --splicemap splicemap ordered ordered-hg4
219 initializing destination ordered-hg4 repository
239 initializing destination ordered-hg4 repository
220 scanning source...
240 scanning source...
221 abort: unknown splice map parent: deadbeef102a90ea7b4a3361e4082ed620918c26
241 abort: unknown splice map parent: deadbeef102a90ea7b4a3361e4082ed620918c26
222 [255]
242 [255]
General Comments 0
You need to be logged in to leave comments. Login now