##// END OF EJS Templates
convert: add support for specifying multiple revs...
Durham Goode -
r25748:baea47ca default
parent child Browse files
Show More
@@ -1,434 +1,434 b''
1 1 # convert.py Foreign SCM converter
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 '''import revisions from foreign VCS repositories into Mercurial'''
9 9
10 10 import convcmd
11 11 import cvsps
12 12 import subversion
13 13 from mercurial import cmdutil, templatekw
14 14 from mercurial.i18n import _
15 15
16 16 cmdtable = {}
17 17 command = cmdutil.command(cmdtable)
18 18 # Note for extension authors: ONLY specify testedwith = 'internal' for
19 19 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
20 20 # be specifying the version(s) of Mercurial they are tested with, or
21 21 # leave the attribute unspecified.
22 22 testedwith = 'internal'
23 23
24 24 # Commands definition was moved elsewhere to ease demandload job.
25 25
26 26 @command('convert',
27 27 [('', 'authors', '',
28 28 _('username mapping filename (DEPRECATED, use --authormap instead)'),
29 29 _('FILE')),
30 30 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
31 31 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
32 ('r', 'rev', '', _('import up to source revision REV'), _('REV')),
32 ('r', 'rev', [], _('import up to source revision REV'), _('REV')),
33 33 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
34 34 ('', 'filemap', '', _('remap file names using contents of file'),
35 35 _('FILE')),
36 36 ('', 'full', None,
37 37 _('apply filemap changes by converting all files again')),
38 38 ('', 'splicemap', '', _('splice synthesized history into place'),
39 39 _('FILE')),
40 40 ('', 'branchmap', '', _('change branch names while converting'),
41 41 _('FILE')),
42 42 ('', 'branchsort', None, _('try to sort changesets by branches')),
43 43 ('', 'datesort', None, _('try to sort changesets by date')),
44 44 ('', 'sourcesort', None, _('preserve source changesets order')),
45 45 ('', 'closesort', None, _('try to reorder closed revisions'))],
46 46 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
47 47 norepo=True)
48 48 def convert(ui, src, dest=None, revmapfile=None, **opts):
49 49 """convert a foreign SCM repository to a Mercurial one.
50 50
51 51 Accepted source formats [identifiers]:
52 52
53 53 - Mercurial [hg]
54 54 - CVS [cvs]
55 55 - Darcs [darcs]
56 56 - git [git]
57 57 - Subversion [svn]
58 58 - Monotone [mtn]
59 59 - GNU Arch [gnuarch]
60 60 - Bazaar [bzr]
61 61 - Perforce [p4]
62 62
63 63 Accepted destination formats [identifiers]:
64 64
65 65 - Mercurial [hg]
66 66 - Subversion [svn] (history on branches is not preserved)
67 67
68 68 If no revision is given, all revisions will be converted.
69 69 Otherwise, convert will only import up to the named revision
70 70 (given in a format understood by the source).
71 71
72 72 If no destination directory name is specified, it defaults to the
73 73 basename of the source with ``-hg`` appended. If the destination
74 74 repository doesn't exist, it will be created.
75 75
76 76 By default, all sources except Mercurial will use --branchsort.
77 77 Mercurial uses --sourcesort to preserve original revision numbers
78 78 order. Sort modes have the following effects:
79 79
80 80 --branchsort convert from parent to child revision when possible,
81 81 which means branches are usually converted one after
82 82 the other. It generates more compact repositories.
83 83
84 84 --datesort sort revisions by date. Converted repositories have
85 85 good-looking changelogs but are often an order of
86 86 magnitude larger than the same ones generated by
87 87 --branchsort.
88 88
89 89 --sourcesort try to preserve source revisions order, only
90 90 supported by Mercurial sources.
91 91
92 92 --closesort try to move closed revisions as close as possible
93 93 to parent branches, only supported by Mercurial
94 94 sources.
95 95
96 96 If ``REVMAP`` isn't given, it will be put in a default location
97 97 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
98 98 text file that maps each source commit ID to the destination ID
99 99 for that revision, like so::
100 100
101 101 <source ID> <destination ID>
102 102
103 103 If the file doesn't exist, it's automatically created. It's
104 104 updated on each commit copied, so :hg:`convert` can be interrupted
105 105 and can be run repeatedly to copy new commits.
106 106
107 107 The authormap is a simple text file that maps each source commit
108 108 author to a destination commit author. It is handy for source SCMs
109 109 that use unix logins to identify authors (e.g.: CVS). One line per
110 110 author mapping and the line format is::
111 111
112 112 source author = destination author
113 113
114 114 Empty lines and lines starting with a ``#`` are ignored.
115 115
116 116 The filemap is a file that allows filtering and remapping of files
117 117 and directories. Each line can contain one of the following
118 118 directives::
119 119
120 120 include path/to/file-or-dir
121 121
122 122 exclude path/to/file-or-dir
123 123
124 124 rename path/to/source path/to/destination
125 125
126 126 Comment lines start with ``#``. A specified path matches if it
127 127 equals the full relative name of a file or one of its parent
128 128 directories. The ``include`` or ``exclude`` directive with the
129 129 longest matching path applies, so line order does not matter.
130 130
131 131 The ``include`` directive causes a file, or all files under a
132 132 directory, to be included in the destination repository. The default
133 133 if there are no ``include`` statements is to include everything.
134 134 If there are any ``include`` statements, nothing else is included.
135 135 The ``exclude`` directive causes files or directories to
136 136 be omitted. The ``rename`` directive renames a file or directory if
137 137 it is converted. To rename from a subdirectory into the root of
138 138 the repository, use ``.`` as the path to rename to.
139 139
140 140 ``--full`` will make sure the converted changesets contain exactly
141 141 the right files with the right content. It will make a full
142 142 conversion of all files, not just the ones that have
143 143 changed. Files that already are correct will not be changed. This
144 144 can be used to apply filemap changes when converting
145 145 incrementally. This is currently only supported for Mercurial and
146 146 Subversion.
147 147
148 148 The splicemap is a file that allows insertion of synthetic
149 149 history, letting you specify the parents of a revision. This is
150 150 useful if you want to e.g. give a Subversion merge two parents, or
151 151 graft two disconnected series of history together. Each entry
152 152 contains a key, followed by a space, followed by one or two
153 153 comma-separated values::
154 154
155 155 key parent1, parent2
156 156
157 157 The key is the revision ID in the source
158 158 revision control system whose parents should be modified (same
159 159 format as a key in .hg/shamap). The values are the revision IDs
160 160 (in either the source or destination revision control system) that
161 161 should be used as the new parents for that node. For example, if
162 162 you have merged "release-1.0" into "trunk", then you should
163 163 specify the revision on "trunk" as the first parent and the one on
164 164 the "release-1.0" branch as the second.
165 165
166 166 The branchmap is a file that allows you to rename a branch when it is
167 167 being brought in from whatever external repository. When used in
168 168 conjunction with a splicemap, it allows for a powerful combination
169 169 to help fix even the most badly mismanaged repositories and turn them
170 170 into nicely structured Mercurial repositories. The branchmap contains
171 171 lines of the form::
172 172
173 173 original_branch_name new_branch_name
174 174
175 175 where "original_branch_name" is the name of the branch in the
176 176 source repository, and "new_branch_name" is the name of the branch
177 177 is the destination repository. No whitespace is allowed in the
178 178 branch names. This can be used to (for instance) move code in one
179 179 repository from "default" to a named branch.
180 180
181 181 Mercurial Source
182 182 ################
183 183
184 184 The Mercurial source recognizes the following configuration
185 185 options, which you can set on the command line with ``--config``:
186 186
187 187 :convert.hg.ignoreerrors: ignore integrity errors when reading.
188 188 Use it to fix Mercurial repositories with missing revlogs, by
189 189 converting from and to Mercurial. Default is False.
190 190
191 191 :convert.hg.saverev: store original revision ID in changeset
192 192 (forces target IDs to change). It takes a boolean argument and
193 193 defaults to False.
194 194
195 195 :convert.hg.revs: revset specifying the source revisions to convert.
196 196
197 197 CVS Source
198 198 ##########
199 199
200 200 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
201 201 to indicate the starting point of what will be converted. Direct
202 202 access to the repository files is not needed, unless of course the
203 203 repository is ``:local:``. The conversion uses the top level
204 204 directory in the sandbox to find the CVS repository, and then uses
205 205 CVS rlog commands to find files to convert. This means that unless
206 206 a filemap is given, all files under the starting directory will be
207 207 converted, and that any directory reorganization in the CVS
208 208 sandbox is ignored.
209 209
210 210 The following options can be used with ``--config``:
211 211
212 212 :convert.cvsps.cache: Set to False to disable remote log caching,
213 213 for testing and debugging purposes. Default is True.
214 214
215 215 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
216 216 allowed between commits with identical user and log message in
217 217 a single changeset. When very large files were checked in as
218 218 part of a changeset then the default may not be long enough.
219 219 The default is 60.
220 220
221 221 :convert.cvsps.mergeto: Specify a regular expression to which
222 222 commit log messages are matched. If a match occurs, then the
223 223 conversion process will insert a dummy revision merging the
224 224 branch on which this log message occurs to the branch
225 225 indicated in the regex. Default is ``{{mergetobranch
226 226 ([-\\w]+)}}``
227 227
228 228 :convert.cvsps.mergefrom: Specify a regular expression to which
229 229 commit log messages are matched. If a match occurs, then the
230 230 conversion process will add the most recent revision on the
231 231 branch indicated in the regex as the second parent of the
232 232 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
233 233
234 234 :convert.localtimezone: use local time (as determined by the TZ
235 235 environment variable) for changeset date/times. The default
236 236 is False (use UTC).
237 237
238 238 :hooks.cvslog: Specify a Python function to be called at the end of
239 239 gathering the CVS log. The function is passed a list with the
240 240 log entries, and can modify the entries in-place, or add or
241 241 delete them.
242 242
243 243 :hooks.cvschangesets: Specify a Python function to be called after
244 244 the changesets are calculated from the CVS log. The
245 245 function is passed a list with the changeset entries, and can
246 246 modify the changesets in-place, or add or delete them.
247 247
248 248 An additional "debugcvsps" Mercurial command allows the builtin
249 249 changeset merging code to be run without doing a conversion. Its
250 250 parameters and output are similar to that of cvsps 2.1. Please see
251 251 the command help for more details.
252 252
253 253 Subversion Source
254 254 #################
255 255
256 256 Subversion source detects classical trunk/branches/tags layouts.
257 257 By default, the supplied ``svn://repo/path/`` source URL is
258 258 converted as a single branch. If ``svn://repo/path/trunk`` exists
259 259 it replaces the default branch. If ``svn://repo/path/branches``
260 260 exists, its subdirectories are listed as possible branches. If
261 261 ``svn://repo/path/tags`` exists, it is looked for tags referencing
262 262 converted branches. Default ``trunk``, ``branches`` and ``tags``
263 263 values can be overridden with following options. Set them to paths
264 264 relative to the source URL, or leave them blank to disable auto
265 265 detection.
266 266
267 267 The following options can be set with ``--config``:
268 268
269 269 :convert.svn.branches: specify the directory containing branches.
270 270 The default is ``branches``.
271 271
272 272 :convert.svn.tags: specify the directory containing tags. The
273 273 default is ``tags``.
274 274
275 275 :convert.svn.trunk: specify the name of the trunk branch. The
276 276 default is ``trunk``.
277 277
278 278 :convert.localtimezone: use local time (as determined by the TZ
279 279 environment variable) for changeset date/times. The default
280 280 is False (use UTC).
281 281
282 282 Source history can be retrieved starting at a specific revision,
283 283 instead of being integrally converted. Only single branch
284 284 conversions are supported.
285 285
286 286 :convert.svn.startrev: specify start Subversion revision number.
287 287 The default is 0.
288 288
289 289 Git Source
290 290 ##########
291 291
292 292 The Git importer converts commits from all reachable branches (refs
293 293 in refs/heads) and remotes (refs in refs/remotes) to Mercurial.
294 294 Branches are converted to bookmarks with the same name, with the
295 295 leading 'refs/heads' stripped. Git submodules are converted to Git
296 296 subrepos in Mercurial.
297 297
298 298 The following options can be set with ``--config``:
299 299
300 300 :convert.git.similarity: specify how similar files modified in a
301 301 commit must be to be imported as renames or copies, as a
302 302 percentage between ``0`` (disabled) and ``100`` (files must be
303 303 identical). For example, ``90`` means that a delete/add pair will
304 304 be imported as a rename if more than 90% of the file hasn't
305 305 changed. The default is ``50``.
306 306
307 307 :convert.git.findcopiesharder: while detecting copies, look at all
308 308 files in the working copy instead of just changed ones. This
309 309 is very expensive for large projects, and is only effective when
310 310 ``convert.git.similarity`` is greater than 0. The default is False.
311 311
312 312 Perforce Source
313 313 ###############
314 314
315 315 The Perforce (P4) importer can be given a p4 depot path or a
316 316 client specification as source. It will convert all files in the
317 317 source to a flat Mercurial repository, ignoring labels, branches
318 318 and integrations. Note that when a depot path is given you then
319 319 usually should specify a target directory, because otherwise the
320 320 target may be named ``...-hg``.
321 321
322 322 It is possible to limit the amount of source history to be
323 323 converted by specifying an initial Perforce revision:
324 324
325 325 :convert.p4.startrev: specify initial Perforce revision (a
326 326 Perforce changelist number).
327 327
328 328 Mercurial Destination
329 329 #####################
330 330
331 331 The Mercurial destination will recognize Mercurial subrepositories in the
332 332 destination directory, and update the .hgsubstate file automatically if the
333 333 destination subrepositories contain the <dest>/<sub>/.hg/shamap file.
334 334 Converting a repository with subrepositories requires converting a single
335 335 repository at a time, from the bottom up.
336 336
337 337 .. container:: verbose
338 338
339 339 An example showing how to convert a repository with subrepositories::
340 340
341 341 # so convert knows the type when it sees a non empty destination
342 342 $ hg init converted
343 343
344 344 $ hg convert orig/sub1 converted/sub1
345 345 $ hg convert orig/sub2 converted/sub2
346 346 $ hg convert orig converted
347 347
348 348 The following options are supported:
349 349
350 350 :convert.hg.clonebranches: dispatch source branches in separate
351 351 clones. The default is False.
352 352
353 353 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
354 354 ``default``.
355 355
356 356 :convert.hg.usebranchnames: preserve branch names. The default is
357 357 True.
358 358
359 359 All Destinations
360 360 ################
361 361
362 362 All destination types accept the following options:
363 363
364 364 :convert.skiptags: does not convert tags from the source repo to the target
365 365 repo. The default is False.
366 366 """
367 367 return convcmd.convert(ui, src, dest, revmapfile, **opts)
368 368
369 369 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
370 370 def debugsvnlog(ui, **opts):
371 371 return subversion.debugsvnlog(ui, **opts)
372 372
373 373 @command('debugcvsps',
374 374 [
375 375 # Main options shared with cvsps-2.1
376 376 ('b', 'branches', [], _('only return changes on specified branches')),
377 377 ('p', 'prefix', '', _('prefix to remove from file names')),
378 378 ('r', 'revisions', [],
379 379 _('only return changes after or between specified tags')),
380 380 ('u', 'update-cache', None, _("update cvs log cache")),
381 381 ('x', 'new-cache', None, _("create new cvs log cache")),
382 382 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
383 383 ('', 'root', '', _('specify cvsroot')),
384 384 # Options specific to builtin cvsps
385 385 ('', 'parents', '', _('show parent changesets')),
386 386 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
387 387 # Options that are ignored for compatibility with cvsps-2.1
388 388 ('A', 'cvs-direct', None, _('ignored for compatibility')),
389 389 ],
390 390 _('hg debugcvsps [OPTION]... [PATH]...'),
391 391 norepo=True)
392 392 def debugcvsps(ui, *args, **opts):
393 393 '''create changeset information from CVS
394 394
395 395 This command is intended as a debugging tool for the CVS to
396 396 Mercurial converter, and can be used as a direct replacement for
397 397 cvsps.
398 398
399 399 Hg debugcvsps reads the CVS rlog for current directory (or any
400 400 named directory) in the CVS repository, and converts the log to a
401 401 series of changesets based on matching commit log entries and
402 402 dates.'''
403 403 return cvsps.debugcvsps(ui, *args, **opts)
404 404
405 405 def kwconverted(ctx, name):
406 406 rev = ctx.extra().get('convert_revision', '')
407 407 if rev.startswith('svn:'):
408 408 if name == 'svnrev':
409 409 return str(subversion.revsplit(rev)[2])
410 410 elif name == 'svnpath':
411 411 return subversion.revsplit(rev)[1]
412 412 elif name == 'svnuuid':
413 413 return subversion.revsplit(rev)[0]
414 414 return rev
415 415
416 416 def kwsvnrev(repo, ctx, **args):
417 417 """:svnrev: String. Converted subversion revision number."""
418 418 return kwconverted(ctx, 'svnrev')
419 419
420 420 def kwsvnpath(repo, ctx, **args):
421 421 """:svnpath: String. Converted subversion revision project path."""
422 422 return kwconverted(ctx, 'svnpath')
423 423
424 424 def kwsvnuuid(repo, ctx, **args):
425 425 """:svnuuid: String. Converted subversion revision repository identifier."""
426 426 return kwconverted(ctx, 'svnuuid')
427 427
428 428 def extsetup(ui):
429 429 templatekw.keywords['svnrev'] = kwsvnrev
430 430 templatekw.keywords['svnpath'] = kwsvnpath
431 431 templatekw.keywords['svnuuid'] = kwsvnuuid
432 432
433 433 # tell hggettext to extract docstrings from these functions:
434 434 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
@@ -1,286 +1,286 b''
1 1 # bzr.py - bzr support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
9 9 # it cannot access 'bar' repositories, but they were never used very much
10 10
11 11 import os
12 12 from mercurial import demandimport
13 13 # these do not work with demandimport, blacklist
14 14 demandimport.ignore.extend([
15 15 'bzrlib.transactions',
16 16 'bzrlib.urlutils',
17 17 'ElementPath',
18 18 ])
19 19
20 20 from mercurial.i18n import _
21 21 from mercurial import util
22 22 from common import NoRepo, commit, converter_source
23 23
24 24 try:
25 25 # bazaar imports
26 26 from bzrlib import bzrdir, revision, errors
27 27 from bzrlib.revisionspec import RevisionSpec
28 28 except ImportError:
29 29 pass
30 30
31 31 supportedkinds = ('file', 'symlink')
32 32
33 33 class bzr_source(converter_source):
34 34 """Reads Bazaar repositories by using the Bazaar Python libraries"""
35 35
36 def __init__(self, ui, path, rev=None):
37 super(bzr_source, self).__init__(ui, path, rev=rev)
36 def __init__(self, ui, path, revs=None):
37 super(bzr_source, self).__init__(ui, path, revs=revs)
38 38
39 39 if not os.path.exists(os.path.join(path, '.bzr')):
40 40 raise NoRepo(_('%s does not look like a Bazaar repository')
41 41 % path)
42 42
43 43 try:
44 44 # access bzrlib stuff
45 45 bzrdir
46 46 except NameError:
47 47 raise NoRepo(_('Bazaar modules could not be loaded'))
48 48
49 49 path = os.path.abspath(path)
50 50 self._checkrepotype(path)
51 51 try:
52 52 self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
53 53 except errors.NoRepositoryPresent:
54 54 raise NoRepo(_('%s does not look like a Bazaar repository')
55 55 % path)
56 56 self._parentids = {}
57 57
58 58 def _checkrepotype(self, path):
59 59 # Lightweight checkouts detection is informational but probably
60 60 # fragile at API level. It should not terminate the conversion.
61 61 try:
62 62 from bzrlib import bzrdir
63 63 dir = bzrdir.BzrDir.open_containing(path)[0]
64 64 try:
65 65 tree = dir.open_workingtree(recommend_upgrade=False)
66 66 branch = tree.branch
67 67 except (errors.NoWorkingTree, errors.NotLocalUrl):
68 68 tree = None
69 69 branch = dir.open_branch()
70 70 if (tree is not None and tree.bzrdir.root_transport.base !=
71 71 branch.bzrdir.root_transport.base):
72 72 self.ui.warn(_('warning: lightweight checkouts may cause '
73 73 'conversion failures, try with a regular '
74 74 'branch instead.\n'))
75 75 except Exception:
76 76 self.ui.note(_('bzr source type could not be determined\n'))
77 77
78 78 def before(self):
79 79 """Before the conversion begins, acquire a read lock
80 80 for all the operations that might need it. Fortunately
81 81 read locks don't block other reads or writes to the
82 82 repository, so this shouldn't have any impact on the usage of
83 83 the source repository.
84 84
85 85 The alternative would be locking on every operation that
86 86 needs locks (there are currently two: getting the file and
87 87 getting the parent map) and releasing immediately after,
88 88 but this approach can take even 40% longer."""
89 89 self.sourcerepo.lock_read()
90 90
91 91 def after(self):
92 92 self.sourcerepo.unlock()
93 93
94 94 def _bzrbranches(self):
95 95 return self.sourcerepo.find_branches(using=True)
96 96
97 97 def getheads(self):
98 if not self.rev:
98 if not self.revs:
99 99 # Set using=True to avoid nested repositories (see issue3254)
100 100 heads = sorted([b.last_revision() for b in self._bzrbranches()])
101 101 else:
102 102 revid = None
103 103 for branch in self._bzrbranches():
104 104 try:
105 r = RevisionSpec.from_string(self.rev)
105 r = RevisionSpec.from_string(self.revs[0])
106 106 info = r.in_history(branch)
107 107 except errors.BzrError:
108 108 pass
109 109 revid = info.rev_id
110 110 if revid is None:
111 raise util.Abort(_('%s is not a valid revision') % self.rev)
111 raise util.Abort(_('%s is not a valid revision') % self.revs[0])
112 112 heads = [revid]
113 113 # Empty repositories return 'null:', which cannot be retrieved
114 114 heads = [h for h in heads if h != 'null:']
115 115 return heads
116 116
117 117 def getfile(self, name, rev):
118 118 revtree = self.sourcerepo.revision_tree(rev)
119 119 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
120 120 kind = None
121 121 if fileid is not None:
122 122 kind = revtree.kind(fileid)
123 123 if kind not in supportedkinds:
124 124 # the file is not available anymore - was deleted
125 125 return None, None
126 126 mode = self._modecache[(name, rev)]
127 127 if kind == 'symlink':
128 128 target = revtree.get_symlink_target(fileid)
129 129 if target is None:
130 130 raise util.Abort(_('%s.%s symlink has no target')
131 131 % (name, rev))
132 132 return target, mode
133 133 else:
134 134 sio = revtree.get_file(fileid)
135 135 return sio.read(), mode
136 136
137 137 def getchanges(self, version, full):
138 138 if full:
139 139 raise util.Abort(_("convert from cvs do not support --full"))
140 140 self._modecache = {}
141 141 self._revtree = self.sourcerepo.revision_tree(version)
142 142 # get the parentids from the cache
143 143 parentids = self._parentids.pop(version)
144 144 # only diff against first parent id
145 145 prevtree = self.sourcerepo.revision_tree(parentids[0])
146 146 files, changes = self._gettreechanges(self._revtree, prevtree)
147 147 return files, changes, set()
148 148
149 149 def getcommit(self, version):
150 150 rev = self.sourcerepo.get_revision(version)
151 151 # populate parent id cache
152 152 if not rev.parent_ids:
153 153 parents = []
154 154 self._parentids[version] = (revision.NULL_REVISION,)
155 155 else:
156 156 parents = self._filterghosts(rev.parent_ids)
157 157 self._parentids[version] = parents
158 158
159 159 branch = self.recode(rev.properties.get('branch-nick', u'default'))
160 160 if branch == 'trunk':
161 161 branch = 'default'
162 162 return commit(parents=parents,
163 163 date='%d %d' % (rev.timestamp, -rev.timezone),
164 164 author=self.recode(rev.committer),
165 165 desc=self.recode(rev.message),
166 166 branch=branch,
167 167 rev=version)
168 168
169 169 def gettags(self):
170 170 bytetags = {}
171 171 for branch in self._bzrbranches():
172 172 if not branch.supports_tags():
173 173 return {}
174 174 tagdict = branch.tags.get_tag_dict()
175 175 for name, rev in tagdict.iteritems():
176 176 bytetags[self.recode(name)] = rev
177 177 return bytetags
178 178
179 179 def getchangedfiles(self, rev, i):
180 180 self._modecache = {}
181 181 curtree = self.sourcerepo.revision_tree(rev)
182 182 if i is not None:
183 183 parentid = self._parentids[rev][i]
184 184 else:
185 185 # no parent id, get the empty revision
186 186 parentid = revision.NULL_REVISION
187 187
188 188 prevtree = self.sourcerepo.revision_tree(parentid)
189 189 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
190 190 return changes
191 191
192 192 def _gettreechanges(self, current, origin):
193 193 revid = current._revision_id
194 194 changes = []
195 195 renames = {}
196 196 seen = set()
197 197 # Process the entries by reverse lexicographic name order to
198 198 # handle nested renames correctly, most specific first.
199 199 curchanges = sorted(current.iter_changes(origin),
200 200 key=lambda c: c[1][0] or c[1][1],
201 201 reverse=True)
202 202 for (fileid, paths, changed_content, versioned, parent, name,
203 203 kind, executable) in curchanges:
204 204
205 205 if paths[0] == u'' or paths[1] == u'':
206 206 # ignore changes to tree root
207 207 continue
208 208
209 209 # bazaar tracks directories, mercurial does not, so
210 210 # we have to rename the directory contents
211 211 if kind[1] == 'directory':
212 212 if kind[0] not in (None, 'directory'):
213 213 # Replacing 'something' with a directory, record it
214 214 # so it can be removed.
215 215 changes.append((self.recode(paths[0]), revid))
216 216
217 217 if kind[0] == 'directory' and None not in paths:
218 218 renaming = paths[0] != paths[1]
219 219 # neither an add nor an delete - a move
220 220 # rename all directory contents manually
221 221 subdir = origin.inventory.path2id(paths[0])
222 222 # get all child-entries of the directory
223 223 for name, entry in origin.inventory.iter_entries(subdir):
224 224 # hg does not track directory renames
225 225 if entry.kind == 'directory':
226 226 continue
227 227 frompath = self.recode(paths[0] + '/' + name)
228 228 if frompath in seen:
229 229 # Already handled by a more specific change entry
230 230 # This is important when you have:
231 231 # a => b
232 232 # a/c => a/c
233 233 # Here a/c must not be renamed into b/c
234 234 continue
235 235 seen.add(frompath)
236 236 if not renaming:
237 237 continue
238 238 topath = self.recode(paths[1] + '/' + name)
239 239 # register the files as changed
240 240 changes.append((frompath, revid))
241 241 changes.append((topath, revid))
242 242 # add to mode cache
243 243 mode = ((entry.executable and 'x')
244 244 or (entry.kind == 'symlink' and 's')
245 245 or '')
246 246 self._modecache[(topath, revid)] = mode
247 247 # register the change as move
248 248 renames[topath] = frompath
249 249
250 250 # no further changes, go to the next change
251 251 continue
252 252
253 253 # we got unicode paths, need to convert them
254 254 path, topath = paths
255 255 if path is not None:
256 256 path = self.recode(path)
257 257 if topath is not None:
258 258 topath = self.recode(topath)
259 259 seen.add(path or topath)
260 260
261 261 if topath is None:
262 262 # file deleted
263 263 changes.append((path, revid))
264 264 continue
265 265
266 266 # renamed
267 267 if path and path != topath:
268 268 renames[topath] = path
269 269 changes.append((path, revid))
270 270
271 271 # populate the mode cache
272 272 kind, executable = [e[1] for e in (kind, executable)]
273 273 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
274 274 or '')
275 275 self._modecache[(topath, revid)] = mode
276 276 changes.append((topath, revid))
277 277
278 278 return changes, renames
279 279
280 280 def _filterghosts(self, ids):
281 281 """Filters out ghost revisions which hg does not support, see
282 282 <http://bazaar-vcs.org/GhostRevision>
283 283 """
284 284 parentmap = self.sourcerepo.get_parent_map(ids)
285 285 parents = tuple([parent for parent in ids if parent in parentmap])
286 286 return parents
@@ -1,471 +1,471 b''
1 1 # common.py - common code for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import base64, errno, subprocess, os, datetime, re
9 9 import cPickle as pickle
10 10 from mercurial import phases, util
11 11 from mercurial.i18n import _
12 12
13 13 propertycache = util.propertycache
14 14
15 15 def encodeargs(args):
16 16 def encodearg(s):
17 17 lines = base64.encodestring(s)
18 18 lines = [l.splitlines()[0] for l in lines]
19 19 return ''.join(lines)
20 20
21 21 s = pickle.dumps(args)
22 22 return encodearg(s)
23 23
24 24 def decodeargs(s):
25 25 s = base64.decodestring(s)
26 26 return pickle.loads(s)
27 27
28 28 class MissingTool(Exception):
29 29 pass
30 30
31 31 def checktool(exe, name=None, abort=True):
32 32 name = name or exe
33 33 if not util.findexe(exe):
34 34 if abort:
35 35 exc = util.Abort
36 36 else:
37 37 exc = MissingTool
38 38 raise exc(_('cannot find required "%s" tool') % name)
39 39
40 40 class NoRepo(Exception):
41 41 pass
42 42
43 43 SKIPREV = 'SKIP'
44 44
45 45 class commit(object):
46 46 def __init__(self, author, date, desc, parents, branch=None, rev=None,
47 47 extra={}, sortkey=None, saverev=True, phase=phases.draft):
48 48 self.author = author or 'unknown'
49 49 self.date = date or '0 0'
50 50 self.desc = desc
51 51 self.parents = parents
52 52 self.branch = branch
53 53 self.rev = rev
54 54 self.extra = extra
55 55 self.sortkey = sortkey
56 56 self.saverev = saverev
57 57 self.phase = phase
58 58
59 59 class converter_source(object):
60 60 """Conversion source interface"""
61 61
62 def __init__(self, ui, path=None, rev=None):
62 def __init__(self, ui, path=None, revs=None):
63 63 """Initialize conversion source (or raise NoRepo("message")
64 64 exception if path is not a valid repository)"""
65 65 self.ui = ui
66 66 self.path = path
67 self.rev = rev
67 self.revs = revs
68 68
69 69 self.encoding = 'utf-8'
70 70
71 71 def checkhexformat(self, revstr, mapname='splicemap'):
72 72 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
73 73 such format for their revision numbering
74 74 """
75 75 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
76 76 raise util.Abort(_('%s entry %s is not a valid revision'
77 77 ' identifier') % (mapname, revstr))
78 78
79 79 def before(self):
80 80 pass
81 81
82 82 def after(self):
83 83 pass
84 84
85 85 def setrevmap(self, revmap):
86 86 """set the map of already-converted revisions"""
87 87 pass
88 88
89 89 def getheads(self):
90 90 """Return a list of this repository's heads"""
91 91 raise NotImplementedError
92 92
93 93 def getfile(self, name, rev):
94 94 """Return a pair (data, mode) where data is the file content
95 95 as a string and mode one of '', 'x' or 'l'. rev is the
96 96 identifier returned by a previous call to getchanges().
97 97 Data is None if file is missing/deleted in rev.
98 98 """
99 99 raise NotImplementedError
100 100
101 101 def getchanges(self, version, full):
102 102 """Returns a tuple of (files, copies, cleanp2).
103 103
104 104 files is a sorted list of (filename, id) tuples for all files
105 105 changed between version and its first parent returned by
106 106 getcommit(). If full, all files in that revision is returned.
107 107 id is the source revision id of the file.
108 108
109 109 copies is a dictionary of dest: source
110 110
111 111 cleanp2 is the set of files filenames that are clean against p2.
112 112 (Files that are clean against p1 are already not in files (unless
113 113 full). This makes it possible to handle p2 clean files similarly.)
114 114 """
115 115 raise NotImplementedError
116 116
117 117 def getcommit(self, version):
118 118 """Return the commit object for version"""
119 119 raise NotImplementedError
120 120
121 121 def numcommits(self):
122 122 """Return the number of commits in this source.
123 123
124 124 If unknown, return None.
125 125 """
126 126 return None
127 127
128 128 def gettags(self):
129 129 """Return the tags as a dictionary of name: revision
130 130
131 131 Tag names must be UTF-8 strings.
132 132 """
133 133 raise NotImplementedError
134 134
135 135 def recode(self, s, encoding=None):
136 136 if not encoding:
137 137 encoding = self.encoding or 'utf-8'
138 138
139 139 if isinstance(s, unicode):
140 140 return s.encode("utf-8")
141 141 try:
142 142 return s.decode(encoding).encode("utf-8")
143 143 except UnicodeError:
144 144 try:
145 145 return s.decode("latin-1").encode("utf-8")
146 146 except UnicodeError:
147 147 return s.decode(encoding, "replace").encode("utf-8")
148 148
149 149 def getchangedfiles(self, rev, i):
150 150 """Return the files changed by rev compared to parent[i].
151 151
152 152 i is an index selecting one of the parents of rev. The return
153 153 value should be the list of files that are different in rev and
154 154 this parent.
155 155
156 156 If rev has no parents, i is None.
157 157
158 158 This function is only needed to support --filemap
159 159 """
160 160 raise NotImplementedError
161 161
162 162 def converted(self, rev, sinkrev):
163 163 '''Notify the source that a revision has been converted.'''
164 164 pass
165 165
166 166 def hasnativeorder(self):
167 167 """Return true if this source has a meaningful, native revision
168 168 order. For instance, Mercurial revisions are store sequentially
169 169 while there is no such global ordering with Darcs.
170 170 """
171 171 return False
172 172
173 173 def hasnativeclose(self):
174 174 """Return true if this source has ability to close branch.
175 175 """
176 176 return False
177 177
178 178 def lookuprev(self, rev):
179 179 """If rev is a meaningful revision reference in source, return
180 180 the referenced identifier in the same format used by getcommit().
181 181 return None otherwise.
182 182 """
183 183 return None
184 184
185 185 def getbookmarks(self):
186 186 """Return the bookmarks as a dictionary of name: revision
187 187
188 188 Bookmark names are to be UTF-8 strings.
189 189 """
190 190 return {}
191 191
192 192 def checkrevformat(self, revstr, mapname='splicemap'):
193 193 """revstr is a string that describes a revision in the given
194 194 source control system. Return true if revstr has correct
195 195 format.
196 196 """
197 197 return True
198 198
199 199 class converter_sink(object):
200 200 """Conversion sink (target) interface"""
201 201
202 202 def __init__(self, ui, path):
203 203 """Initialize conversion sink (or raise NoRepo("message")
204 204 exception if path is not a valid repository)
205 205
206 206 created is a list of paths to remove if a fatal error occurs
207 207 later"""
208 208 self.ui = ui
209 209 self.path = path
210 210 self.created = []
211 211
212 212 def revmapfile(self):
213 213 """Path to a file that will contain lines
214 214 source_rev_id sink_rev_id
215 215 mapping equivalent revision identifiers for each system."""
216 216 raise NotImplementedError
217 217
218 218 def authorfile(self):
219 219 """Path to a file that will contain lines
220 220 srcauthor=dstauthor
221 221 mapping equivalent authors identifiers for each system."""
222 222 return None
223 223
224 224 def putcommit(self, files, copies, parents, commit, source, revmap, full,
225 225 cleanp2):
226 226 """Create a revision with all changed files listed in 'files'
227 227 and having listed parents. 'commit' is a commit object
228 228 containing at a minimum the author, date, and message for this
229 229 changeset. 'files' is a list of (path, version) tuples,
230 230 'copies' is a dictionary mapping destinations to sources,
231 231 'source' is the source repository, and 'revmap' is a mapfile
232 232 of source revisions to converted revisions. Only getfile() and
233 233 lookuprev() should be called on 'source'. 'full' means that 'files'
234 234 is complete and all other files should be removed.
235 235 'cleanp2' is a set of the filenames that are unchanged from p2
236 236 (only in the common merge case where there two parents).
237 237
238 238 Note that the sink repository is not told to update itself to
239 239 a particular revision (or even what that revision would be)
240 240 before it receives the file data.
241 241 """
242 242 raise NotImplementedError
243 243
244 244 def puttags(self, tags):
245 245 """Put tags into sink.
246 246
247 247 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
248 248 Return a pair (tag_revision, tag_parent_revision), or (None, None)
249 249 if nothing was changed.
250 250 """
251 251 raise NotImplementedError
252 252
253 253 def setbranch(self, branch, pbranches):
254 254 """Set the current branch name. Called before the first putcommit
255 255 on the branch.
256 256 branch: branch name for subsequent commits
257 257 pbranches: (converted parent revision, parent branch) tuples"""
258 258 pass
259 259
260 260 def setfilemapmode(self, active):
261 261 """Tell the destination that we're using a filemap
262 262
263 263 Some converter_sources (svn in particular) can claim that a file
264 264 was changed in a revision, even if there was no change. This method
265 265 tells the destination that we're using a filemap and that it should
266 266 filter empty revisions.
267 267 """
268 268 pass
269 269
270 270 def before(self):
271 271 pass
272 272
273 273 def after(self):
274 274 pass
275 275
276 276 def putbookmarks(self, bookmarks):
277 277 """Put bookmarks into sink.
278 278
279 279 bookmarks: {bookmarkname: sink_rev_id, ...}
280 280 where bookmarkname is an UTF-8 string.
281 281 """
282 282 pass
283 283
284 284 def hascommitfrommap(self, rev):
285 285 """Return False if a rev mentioned in a filemap is known to not be
286 286 present."""
287 287 raise NotImplementedError
288 288
289 289 def hascommitforsplicemap(self, rev):
290 290 """This method is for the special needs for splicemap handling and not
291 291 for general use. Returns True if the sink contains rev, aborts on some
292 292 special cases."""
293 293 raise NotImplementedError
294 294
295 295 class commandline(object):
296 296 def __init__(self, ui, command):
297 297 self.ui = ui
298 298 self.command = command
299 299
300 300 def prerun(self):
301 301 pass
302 302
303 303 def postrun(self):
304 304 pass
305 305
306 306 def _cmdline(self, cmd, *args, **kwargs):
307 307 cmdline = [self.command, cmd] + list(args)
308 308 for k, v in kwargs.iteritems():
309 309 if len(k) == 1:
310 310 cmdline.append('-' + k)
311 311 else:
312 312 cmdline.append('--' + k.replace('_', '-'))
313 313 try:
314 314 if len(k) == 1:
315 315 cmdline.append('' + v)
316 316 else:
317 317 cmdline[-1] += '=' + v
318 318 except TypeError:
319 319 pass
320 320 cmdline = [util.shellquote(arg) for arg in cmdline]
321 321 if not self.ui.debugflag:
322 322 cmdline += ['2>', os.devnull]
323 323 cmdline = ' '.join(cmdline)
324 324 return cmdline
325 325
326 326 def _run(self, cmd, *args, **kwargs):
327 327 def popen(cmdline):
328 328 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
329 329 close_fds=util.closefds,
330 330 stdout=subprocess.PIPE)
331 331 return p
332 332 return self._dorun(popen, cmd, *args, **kwargs)
333 333
334 334 def _run2(self, cmd, *args, **kwargs):
335 335 return self._dorun(util.popen2, cmd, *args, **kwargs)
336 336
337 337 def _dorun(self, openfunc, cmd, *args, **kwargs):
338 338 cmdline = self._cmdline(cmd, *args, **kwargs)
339 339 self.ui.debug('running: %s\n' % (cmdline,))
340 340 self.prerun()
341 341 try:
342 342 return openfunc(cmdline)
343 343 finally:
344 344 self.postrun()
345 345
346 346 def run(self, cmd, *args, **kwargs):
347 347 p = self._run(cmd, *args, **kwargs)
348 348 output = p.communicate()[0]
349 349 self.ui.debug(output)
350 350 return output, p.returncode
351 351
352 352 def runlines(self, cmd, *args, **kwargs):
353 353 p = self._run(cmd, *args, **kwargs)
354 354 output = p.stdout.readlines()
355 355 p.wait()
356 356 self.ui.debug(''.join(output))
357 357 return output, p.returncode
358 358
359 359 def checkexit(self, status, output=''):
360 360 if status:
361 361 if output:
362 362 self.ui.warn(_('%s error:\n') % self.command)
363 363 self.ui.warn(output)
364 364 msg = util.explainexit(status)[0]
365 365 raise util.Abort('%s %s' % (self.command, msg))
366 366
367 367 def run0(self, cmd, *args, **kwargs):
368 368 output, status = self.run(cmd, *args, **kwargs)
369 369 self.checkexit(status, output)
370 370 return output
371 371
372 372 def runlines0(self, cmd, *args, **kwargs):
373 373 output, status = self.runlines(cmd, *args, **kwargs)
374 374 self.checkexit(status, ''.join(output))
375 375 return output
376 376
377 377 @propertycache
378 378 def argmax(self):
379 379 # POSIX requires at least 4096 bytes for ARG_MAX
380 380 argmax = 4096
381 381 try:
382 382 argmax = os.sysconf("SC_ARG_MAX")
383 383 except (AttributeError, ValueError):
384 384 pass
385 385
386 386 # Windows shells impose their own limits on command line length,
387 387 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
388 388 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
389 389 # details about cmd.exe limitations.
390 390
391 391 # Since ARG_MAX is for command line _and_ environment, lower our limit
392 392 # (and make happy Windows shells while doing this).
393 393 return argmax // 2 - 1
394 394
395 395 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
396 396 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
397 397 limit = self.argmax - cmdlen
398 398 bytes = 0
399 399 fl = []
400 400 for fn in arglist:
401 401 b = len(fn) + 3
402 402 if bytes + b < limit or len(fl) == 0:
403 403 fl.append(fn)
404 404 bytes += b
405 405 else:
406 406 yield fl
407 407 fl = [fn]
408 408 bytes = b
409 409 if fl:
410 410 yield fl
411 411
412 412 def xargs(self, arglist, cmd, *args, **kwargs):
413 413 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
414 414 self.run0(cmd, *(list(args) + l), **kwargs)
415 415
416 416 class mapfile(dict):
417 417 def __init__(self, ui, path):
418 418 super(mapfile, self).__init__()
419 419 self.ui = ui
420 420 self.path = path
421 421 self.fp = None
422 422 self.order = []
423 423 self._read()
424 424
425 425 def _read(self):
426 426 if not self.path:
427 427 return
428 428 try:
429 429 fp = open(self.path, 'r')
430 430 except IOError as err:
431 431 if err.errno != errno.ENOENT:
432 432 raise
433 433 return
434 434 for i, line in enumerate(fp):
435 435 line = line.splitlines()[0].rstrip()
436 436 if not line:
437 437 # Ignore blank lines
438 438 continue
439 439 try:
440 440 key, value = line.rsplit(' ', 1)
441 441 except ValueError:
442 442 raise util.Abort(
443 443 _('syntax error in %s(%d): key/value pair expected')
444 444 % (self.path, i + 1))
445 445 if key not in self:
446 446 self.order.append(key)
447 447 super(mapfile, self).__setitem__(key, value)
448 448 fp.close()
449 449
450 450 def __setitem__(self, key, value):
451 451 if self.fp is None:
452 452 try:
453 453 self.fp = open(self.path, 'a')
454 454 except IOError as err:
455 455 raise util.Abort(_('could not open map file %r: %s') %
456 456 (self.path, err.strerror))
457 457 self.fp.write('%s %s\n' % (key, value))
458 458 self.fp.flush()
459 459 super(mapfile, self).__setitem__(key, value)
460 460
461 461 def close(self):
462 462 if self.fp:
463 463 self.fp.close()
464 464 self.fp = None
465 465
466 466 def makedatetimestamp(t):
467 467 """Like util.makedate() but for time t instead of current time"""
468 468 delta = (datetime.datetime.utcfromtimestamp(t) -
469 469 datetime.datetime.fromtimestamp(t))
470 470 tz = delta.days * 86400 + delta.seconds
471 471 return t, tz
@@ -1,548 +1,548 b''
1 1 # convcmd - convert extension commands definition
2 2 #
3 3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 9 from cvs import convert_cvs
10 10 from darcs import darcs_source
11 11 from git import convert_git
12 12 from hg import mercurial_source, mercurial_sink
13 13 from subversion import svn_source, svn_sink
14 14 from monotone import monotone_source
15 15 from gnuarch import gnuarch_source
16 16 from bzr import bzr_source
17 17 from p4 import p4_source
18 18 import filemap
19 19
20 20 import os, shutil, shlex
21 21 from mercurial import hg, util, encoding
22 22 from mercurial.i18n import _
23 23
24 24 orig_encoding = 'ascii'
25 25
26 26 def recode(s):
27 27 if isinstance(s, unicode):
28 28 return s.encode(orig_encoding, 'replace')
29 29 else:
30 30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31 31
32 32 source_converters = [
33 33 ('cvs', convert_cvs, 'branchsort'),
34 34 ('git', convert_git, 'branchsort'),
35 35 ('svn', svn_source, 'branchsort'),
36 36 ('hg', mercurial_source, 'sourcesort'),
37 37 ('darcs', darcs_source, 'branchsort'),
38 38 ('mtn', monotone_source, 'branchsort'),
39 39 ('gnuarch', gnuarch_source, 'branchsort'),
40 40 ('bzr', bzr_source, 'branchsort'),
41 41 ('p4', p4_source, 'branchsort'),
42 42 ]
43 43
44 44 sink_converters = [
45 45 ('hg', mercurial_sink),
46 46 ('svn', svn_sink),
47 47 ]
48 48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, revs):
50 50 exceptions = []
51 51 if type and type not in [s[0] for s in source_converters]:
52 52 raise util.Abort(_('%s: invalid source repository type') % type)
53 53 for name, source, sortmode in source_converters:
54 54 try:
55 55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, revs), sortmode
57 57 except (NoRepo, MissingTool) as inst:
58 58 exceptions.append(inst)
59 59 if not ui.quiet:
60 60 for inst in exceptions:
61 61 ui.write("%s\n" % inst)
62 62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63 63
64 64 def convertsink(ui, path, type):
65 65 if type and type not in [s[0] for s in sink_converters]:
66 66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 67 for name, sink in sink_converters:
68 68 try:
69 69 if not type or name == type:
70 70 return sink(ui, path)
71 71 except NoRepo as inst:
72 72 ui.note(_("convert: %s\n") % inst)
73 73 except MissingTool as inst:
74 74 raise util.Abort('%s\n' % inst)
75 75 raise util.Abort(_('%s: unknown repository type') % path)
76 76
77 77 class progresssource(object):
78 78 def __init__(self, ui, source, filecount):
79 79 self.ui = ui
80 80 self.source = source
81 81 self.filecount = filecount
82 82 self.retrieved = 0
83 83
84 84 def getfile(self, file, rev):
85 85 self.retrieved += 1
86 86 self.ui.progress(_('getting files'), self.retrieved,
87 87 item=file, total=self.filecount)
88 88 return self.source.getfile(file, rev)
89 89
90 90 def lookuprev(self, rev):
91 91 return self.source.lookuprev(rev)
92 92
93 93 def close(self):
94 94 self.ui.progress(_('getting files'), None)
95 95
96 96 class converter(object):
97 97 def __init__(self, ui, source, dest, revmapfile, opts):
98 98
99 99 self.source = source
100 100 self.dest = dest
101 101 self.ui = ui
102 102 self.opts = opts
103 103 self.commitcache = {}
104 104 self.authors = {}
105 105 self.authorfile = None
106 106
107 107 # Record converted revisions persistently: maps source revision
108 108 # ID to target revision ID (both strings). (This is how
109 109 # incremental conversions work.)
110 110 self.map = mapfile(ui, revmapfile)
111 111
112 112 # Read first the dst author map if any
113 113 authorfile = self.dest.authorfile()
114 114 if authorfile and os.path.exists(authorfile):
115 115 self.readauthormap(authorfile)
116 116 # Extend/Override with new author map if necessary
117 117 if opts.get('authormap'):
118 118 self.readauthormap(opts.get('authormap'))
119 119 self.authorfile = self.dest.authorfile()
120 120
121 121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
122 122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123 123
124 124 def parsesplicemap(self, path):
125 125 """ check and validate the splicemap format and
126 126 return a child/parents dictionary.
127 127 Format checking has two parts.
128 128 1. generic format which is same across all source types
129 129 2. specific format checking which may be different for
130 130 different source type. This logic is implemented in
131 131 checkrevformat function in source files like
132 132 hg.py, subversion.py etc.
133 133 """
134 134
135 135 if not path:
136 136 return {}
137 137 m = {}
138 138 try:
139 139 fp = open(path, 'r')
140 140 for i, line in enumerate(fp):
141 141 line = line.splitlines()[0].rstrip()
142 142 if not line:
143 143 # Ignore blank lines
144 144 continue
145 145 # split line
146 146 lex = shlex.shlex(line, posix=True)
147 147 lex.whitespace_split = True
148 148 lex.whitespace += ','
149 149 line = list(lex)
150 150 # check number of parents
151 151 if not (2 <= len(line) <= 3):
152 152 raise util.Abort(_('syntax error in %s(%d): child parent1'
153 153 '[,parent2] expected') % (path, i + 1))
154 154 for part in line:
155 155 self.source.checkrevformat(part)
156 156 child, p1, p2 = line[0], line[1:2], line[2:]
157 157 if p1 == p2:
158 158 m[child] = p1
159 159 else:
160 160 m[child] = p1 + p2
161 161 # if file does not exist or error reading, exit
162 162 except IOError:
163 163 raise util.Abort(_('splicemap file not found or error reading %s:')
164 164 % path)
165 165 return m
166 166
167 167
168 168 def walktree(self, heads):
169 169 '''Return a mapping that identifies the uncommitted parents of every
170 170 uncommitted changeset.'''
171 171 visit = heads
172 172 known = set()
173 173 parents = {}
174 174 numcommits = self.source.numcommits()
175 175 while visit:
176 176 n = visit.pop(0)
177 177 if n in known:
178 178 continue
179 179 if n in self.map:
180 180 m = self.map[n]
181 181 if m == SKIPREV or self.dest.hascommitfrommap(m):
182 182 continue
183 183 known.add(n)
184 184 self.ui.progress(_('scanning'), len(known), unit=_('revisions'),
185 185 total=numcommits)
186 186 commit = self.cachecommit(n)
187 187 parents[n] = []
188 188 for p in commit.parents:
189 189 parents[n].append(p)
190 190 visit.append(p)
191 191 self.ui.progress(_('scanning'), None)
192 192
193 193 return parents
194 194
195 195 def mergesplicemap(self, parents, splicemap):
196 196 """A splicemap redefines child/parent relationships. Check the
197 197 map contains valid revision identifiers and merge the new
198 198 links in the source graph.
199 199 """
200 200 for c in sorted(splicemap):
201 201 if c not in parents:
202 202 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
203 203 # Could be in source but not converted during this run
204 204 self.ui.warn(_('splice map revision %s is not being '
205 205 'converted, ignoring\n') % c)
206 206 continue
207 207 pc = []
208 208 for p in splicemap[c]:
209 209 # We do not have to wait for nodes already in dest.
210 210 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
211 211 continue
212 212 # Parent is not in dest and not being converted, not good
213 213 if p not in parents:
214 214 raise util.Abort(_('unknown splice map parent: %s') % p)
215 215 pc.append(p)
216 216 parents[c] = pc
217 217
218 218 def toposort(self, parents, sortmode):
219 219 '''Return an ordering such that every uncommitted changeset is
220 220 preceded by all its uncommitted ancestors.'''
221 221
222 222 def mapchildren(parents):
223 223 """Return a (children, roots) tuple where 'children' maps parent
224 224 revision identifiers to children ones, and 'roots' is the list of
225 225 revisions without parents. 'parents' must be a mapping of revision
226 226 identifier to its parents ones.
227 227 """
228 228 visit = sorted(parents)
229 229 seen = set()
230 230 children = {}
231 231 roots = []
232 232
233 233 while visit:
234 234 n = visit.pop(0)
235 235 if n in seen:
236 236 continue
237 237 seen.add(n)
238 238 # Ensure that nodes without parents are present in the
239 239 # 'children' mapping.
240 240 children.setdefault(n, [])
241 241 hasparent = False
242 242 for p in parents[n]:
243 243 if p not in self.map:
244 244 visit.append(p)
245 245 hasparent = True
246 246 children.setdefault(p, []).append(n)
247 247 if not hasparent:
248 248 roots.append(n)
249 249
250 250 return children, roots
251 251
252 252 # Sort functions are supposed to take a list of revisions which
253 253 # can be converted immediately and pick one
254 254
255 255 def makebranchsorter():
256 256 """If the previously converted revision has a child in the
257 257 eligible revisions list, pick it. Return the list head
258 258 otherwise. Branch sort attempts to minimize branch
259 259 switching, which is harmful for Mercurial backend
260 260 compression.
261 261 """
262 262 prev = [None]
263 263 def picknext(nodes):
264 264 next = nodes[0]
265 265 for n in nodes:
266 266 if prev[0] in parents[n]:
267 267 next = n
268 268 break
269 269 prev[0] = next
270 270 return next
271 271 return picknext
272 272
273 273 def makesourcesorter():
274 274 """Source specific sort."""
275 275 keyfn = lambda n: self.commitcache[n].sortkey
276 276 def picknext(nodes):
277 277 return sorted(nodes, key=keyfn)[0]
278 278 return picknext
279 279
280 280 def makeclosesorter():
281 281 """Close order sort."""
282 282 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
283 283 self.commitcache[n].sortkey)
284 284 def picknext(nodes):
285 285 return sorted(nodes, key=keyfn)[0]
286 286 return picknext
287 287
288 288 def makedatesorter():
289 289 """Sort revisions by date."""
290 290 dates = {}
291 291 def getdate(n):
292 292 if n not in dates:
293 293 dates[n] = util.parsedate(self.commitcache[n].date)
294 294 return dates[n]
295 295
296 296 def picknext(nodes):
297 297 return min([(getdate(n), n) for n in nodes])[1]
298 298
299 299 return picknext
300 300
301 301 if sortmode == 'branchsort':
302 302 picknext = makebranchsorter()
303 303 elif sortmode == 'datesort':
304 304 picknext = makedatesorter()
305 305 elif sortmode == 'sourcesort':
306 306 picknext = makesourcesorter()
307 307 elif sortmode == 'closesort':
308 308 picknext = makeclosesorter()
309 309 else:
310 310 raise util.Abort(_('unknown sort mode: %s') % sortmode)
311 311
312 312 children, actives = mapchildren(parents)
313 313
314 314 s = []
315 315 pendings = {}
316 316 while actives:
317 317 n = picknext(actives)
318 318 actives.remove(n)
319 319 s.append(n)
320 320
321 321 # Update dependents list
322 322 for c in children.get(n, []):
323 323 if c not in pendings:
324 324 pendings[c] = [p for p in parents[c] if p not in self.map]
325 325 try:
326 326 pendings[c].remove(n)
327 327 except ValueError:
328 328 raise util.Abort(_('cycle detected between %s and %s')
329 329 % (recode(c), recode(n)))
330 330 if not pendings[c]:
331 331 # Parents are converted, node is eligible
332 332 actives.insert(0, c)
333 333 pendings[c] = None
334 334
335 335 if len(s) != len(parents):
336 336 raise util.Abort(_("not all revisions were sorted"))
337 337
338 338 return s
339 339
340 340 def writeauthormap(self):
341 341 authorfile = self.authorfile
342 342 if authorfile:
343 343 self.ui.status(_('writing author map file %s\n') % authorfile)
344 344 ofile = open(authorfile, 'w+')
345 345 for author in self.authors:
346 346 ofile.write("%s=%s\n" % (author, self.authors[author]))
347 347 ofile.close()
348 348
349 349 def readauthormap(self, authorfile):
350 350 afile = open(authorfile, 'r')
351 351 for line in afile:
352 352
353 353 line = line.strip()
354 354 if not line or line.startswith('#'):
355 355 continue
356 356
357 357 try:
358 358 srcauthor, dstauthor = line.split('=', 1)
359 359 except ValueError:
360 360 msg = _('ignoring bad line in author map file %s: %s\n')
361 361 self.ui.warn(msg % (authorfile, line.rstrip()))
362 362 continue
363 363
364 364 srcauthor = srcauthor.strip()
365 365 dstauthor = dstauthor.strip()
366 366 if self.authors.get(srcauthor) in (None, dstauthor):
367 367 msg = _('mapping author %s to %s\n')
368 368 self.ui.debug(msg % (srcauthor, dstauthor))
369 369 self.authors[srcauthor] = dstauthor
370 370 continue
371 371
372 372 m = _('overriding mapping for author %s, was %s, will be %s\n')
373 373 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
374 374
375 375 afile.close()
376 376
377 377 def cachecommit(self, rev):
378 378 commit = self.source.getcommit(rev)
379 379 commit.author = self.authors.get(commit.author, commit.author)
380 380 # If commit.branch is None, this commit is coming from the source
381 381 # repository's default branch and destined for the default branch in the
382 382 # destination repository. For such commits, passing a literal "None"
383 383 # string to branchmap.get() below allows the user to map "None" to an
384 384 # alternate default branch in the destination repository.
385 385 commit.branch = self.branchmap.get(str(commit.branch), commit.branch)
386 386 self.commitcache[rev] = commit
387 387 return commit
388 388
389 389 def copy(self, rev):
390 390 commit = self.commitcache[rev]
391 391 full = self.opts.get('full')
392 392 changes = self.source.getchanges(rev, full)
393 393 if isinstance(changes, basestring):
394 394 if changes == SKIPREV:
395 395 dest = SKIPREV
396 396 else:
397 397 dest = self.map[changes]
398 398 self.map[rev] = dest
399 399 return
400 400 files, copies, cleanp2 = changes
401 401 pbranches = []
402 402 if commit.parents:
403 403 for prev in commit.parents:
404 404 if prev not in self.commitcache:
405 405 self.cachecommit(prev)
406 406 pbranches.append((self.map[prev],
407 407 self.commitcache[prev].branch))
408 408 self.dest.setbranch(commit.branch, pbranches)
409 409 try:
410 410 parents = self.splicemap[rev]
411 411 self.ui.status(_('spliced in %s as parents of %s\n') %
412 412 (parents, rev))
413 413 parents = [self.map.get(p, p) for p in parents]
414 414 except KeyError:
415 415 parents = [b[0] for b in pbranches]
416 416 if len(pbranches) != 2:
417 417 cleanp2 = set()
418 418 if len(parents) < 3:
419 419 source = progresssource(self.ui, self.source, len(files))
420 420 else:
421 421 # For an octopus merge, we end up traversing the list of
422 422 # changed files N-1 times. This tweak to the number of
423 423 # files makes it so the progress bar doesn't overflow
424 424 # itself.
425 425 source = progresssource(self.ui, self.source,
426 426 len(files) * (len(parents) - 1))
427 427 newnode = self.dest.putcommit(files, copies, parents, commit,
428 428 source, self.map, full, cleanp2)
429 429 source.close()
430 430 self.source.converted(rev, newnode)
431 431 self.map[rev] = newnode
432 432
433 433 def convert(self, sortmode):
434 434 try:
435 435 self.source.before()
436 436 self.dest.before()
437 437 self.source.setrevmap(self.map)
438 438 self.ui.status(_("scanning source...\n"))
439 439 heads = self.source.getheads()
440 440 parents = self.walktree(heads)
441 441 self.mergesplicemap(parents, self.splicemap)
442 442 self.ui.status(_("sorting...\n"))
443 443 t = self.toposort(parents, sortmode)
444 444 num = len(t)
445 445 c = None
446 446
447 447 self.ui.status(_("converting...\n"))
448 448 for i, c in enumerate(t):
449 449 num -= 1
450 450 desc = self.commitcache[c].desc
451 451 if "\n" in desc:
452 452 desc = desc.splitlines()[0]
453 453 # convert log message to local encoding without using
454 454 # tolocal() because the encoding.encoding convert()
455 455 # uses is 'utf-8'
456 456 self.ui.status("%d %s\n" % (num, recode(desc)))
457 457 self.ui.note(_("source: %s\n") % recode(c))
458 458 self.ui.progress(_('converting'), i, unit=_('revisions'),
459 459 total=len(t))
460 460 self.copy(c)
461 461 self.ui.progress(_('converting'), None)
462 462
463 463 if not self.ui.configbool('convert', 'skiptags'):
464 464 tags = self.source.gettags()
465 465 ctags = {}
466 466 for k in tags:
467 467 v = tags[k]
468 468 if self.map.get(v, SKIPREV) != SKIPREV:
469 469 ctags[k] = self.map[v]
470 470
471 471 if c and ctags:
472 472 nrev, tagsparent = self.dest.puttags(ctags)
473 473 if nrev and tagsparent:
474 474 # write another hash correspondence to override the
475 475 # previous one so we don't end up with extra tag heads
476 476 tagsparents = [e for e in self.map.iteritems()
477 477 if e[1] == tagsparent]
478 478 if tagsparents:
479 479 self.map[tagsparents[0][0]] = nrev
480 480
481 481 bookmarks = self.source.getbookmarks()
482 482 cbookmarks = {}
483 483 for k in bookmarks:
484 484 v = bookmarks[k]
485 485 if self.map.get(v, SKIPREV) != SKIPREV:
486 486 cbookmarks[k] = self.map[v]
487 487
488 488 if c and cbookmarks:
489 489 self.dest.putbookmarks(cbookmarks)
490 490
491 491 self.writeauthormap()
492 492 finally:
493 493 self.cleanup()
494 494
495 495 def cleanup(self):
496 496 try:
497 497 self.dest.after()
498 498 finally:
499 499 self.source.after()
500 500 self.map.close()
501 501
502 502 def convert(ui, src, dest=None, revmapfile=None, **opts):
503 503 global orig_encoding
504 504 orig_encoding = encoding.encoding
505 505 encoding.encoding = 'UTF-8'
506 506
507 507 # support --authors as an alias for --authormap
508 508 if not opts.get('authormap'):
509 509 opts['authormap'] = opts.get('authors')
510 510
511 511 if not dest:
512 512 dest = hg.defaultdest(src) + "-hg"
513 513 ui.status(_("assuming destination %s\n") % dest)
514 514
515 515 destc = convertsink(ui, dest, opts.get('dest_type'))
516 516
517 517 try:
518 518 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
519 519 opts.get('rev'))
520 520 except Exception:
521 521 for path in destc.created:
522 522 shutil.rmtree(path, True)
523 523 raise
524 524
525 525 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
526 526 sortmode = [m for m in sortmodes if opts.get(m)]
527 527 if len(sortmode) > 1:
528 528 raise util.Abort(_('more than one sort mode specified'))
529 529 if sortmode:
530 530 sortmode = sortmode[0]
531 531 else:
532 532 sortmode = defaultsort
533 533
534 534 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
535 535 raise util.Abort(_('--sourcesort is not supported by this data source'))
536 536 if sortmode == 'closesort' and not srcc.hasnativeclose():
537 537 raise util.Abort(_('--closesort is not supported by this data source'))
538 538
539 539 fmap = opts.get('filemap')
540 540 if fmap:
541 541 srcc = filemap.filemap_source(ui, srcc, fmap)
542 542 destc.setfilemapmode(True)
543 543
544 544 if not revmapfile:
545 545 revmapfile = destc.revmapfile()
546 546
547 547 c = converter(ui, srcc, destc, revmapfile, opts)
548 548 c.convert(sortmode)
@@ -1,277 +1,280 b''
1 1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os, re, socket, errno
9 9 from cStringIO import StringIO
10 10 from mercurial import encoding, util
11 11 from mercurial.i18n import _
12 12
13 13 from common import NoRepo, commit, converter_source, checktool
14 14 from common import makedatetimestamp
15 15 import cvsps
16 16
17 17 class convert_cvs(converter_source):
18 def __init__(self, ui, path, rev=None):
19 super(convert_cvs, self).__init__(ui, path, rev=rev)
18 def __init__(self, ui, path, revs=None):
19 super(convert_cvs, self).__init__(ui, path, revs=revs)
20 20
21 21 cvs = os.path.join(path, "CVS")
22 22 if not os.path.exists(cvs):
23 23 raise NoRepo(_("%s does not look like a CVS checkout") % path)
24 24
25 25 checktool('cvs')
26 26
27 27 self.changeset = None
28 28 self.files = {}
29 29 self.tags = {}
30 30 self.lastbranch = {}
31 31 self.socket = None
32 32 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
33 33 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
34 34 self.encoding = encoding.encoding
35 35
36 36 self._connect()
37 37
38 38 def _parse(self):
39 39 if self.changeset is not None:
40 40 return
41 41 self.changeset = {}
42 42
43 43 maxrev = 0
44 if self.rev:
44 if self.revs:
45 if len(self.revs) > 1:
46 raise util.Abort(_('cvs source does not support specifying '
47 'multiple revs'))
45 48 # TODO: handle tags
46 49 try:
47 50 # patchset number?
48 maxrev = int(self.rev)
51 maxrev = int(self.revs[0])
49 52 except ValueError:
50 53 raise util.Abort(_('revision %s is not a patchset number')
51 % self.rev)
54 % self.revs[0])
52 55
53 56 d = os.getcwd()
54 57 try:
55 58 os.chdir(self.path)
56 59 id = None
57 60
58 61 cache = 'update'
59 62 if not self.ui.configbool('convert', 'cvsps.cache', True):
60 63 cache = None
61 64 db = cvsps.createlog(self.ui, cache=cache)
62 65 db = cvsps.createchangeset(self.ui, db,
63 66 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
64 67 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
65 68 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
66 69
67 70 for cs in db:
68 71 if maxrev and cs.id > maxrev:
69 72 break
70 73 id = str(cs.id)
71 74 cs.author = self.recode(cs.author)
72 75 self.lastbranch[cs.branch] = id
73 76 cs.comment = self.recode(cs.comment)
74 77 if self.ui.configbool('convert', 'localtimezone'):
75 78 cs.date = makedatetimestamp(cs.date[0])
76 79 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
77 80 self.tags.update(dict.fromkeys(cs.tags, id))
78 81
79 82 files = {}
80 83 for f in cs.entries:
81 84 files[f.file] = "%s%s" % ('.'.join([str(x)
82 85 for x in f.revision]),
83 86 ['', '(DEAD)'][f.dead])
84 87
85 88 # add current commit to set
86 89 c = commit(author=cs.author, date=date,
87 90 parents=[str(p.id) for p in cs.parents],
88 91 desc=cs.comment, branch=cs.branch or '')
89 92 self.changeset[id] = c
90 93 self.files[id] = files
91 94
92 95 self.heads = self.lastbranch.values()
93 96 finally:
94 97 os.chdir(d)
95 98
96 99 def _connect(self):
97 100 root = self.cvsroot
98 101 conntype = None
99 102 user, host = None, None
100 103 cmd = ['cvs', 'server']
101 104
102 105 self.ui.status(_("connecting to %s\n") % root)
103 106
104 107 if root.startswith(":pserver:"):
105 108 root = root[9:]
106 109 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
107 110 root)
108 111 if m:
109 112 conntype = "pserver"
110 113 user, passw, serv, port, root = m.groups()
111 114 if not user:
112 115 user = "anonymous"
113 116 if not port:
114 117 port = 2401
115 118 else:
116 119 port = int(port)
117 120 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
118 121 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
119 122
120 123 if not passw:
121 124 passw = "A"
122 125 cvspass = os.path.expanduser("~/.cvspass")
123 126 try:
124 127 pf = open(cvspass)
125 128 for line in pf.read().splitlines():
126 129 part1, part2 = line.split(' ', 1)
127 130 # /1 :pserver:user@example.com:2401/cvsroot/foo
128 131 # Ah<Z
129 132 if part1 == '/1':
130 133 part1, part2 = part2.split(' ', 1)
131 134 format = format1
132 135 # :pserver:user@example.com:/cvsroot/foo Ah<Z
133 136 else:
134 137 format = format0
135 138 if part1 == format:
136 139 passw = part2
137 140 break
138 141 pf.close()
139 142 except IOError as inst:
140 143 if inst.errno != errno.ENOENT:
141 144 if not getattr(inst, 'filename', None):
142 145 inst.filename = cvspass
143 146 raise
144 147
145 148 sck = socket.socket()
146 149 sck.connect((serv, port))
147 150 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
148 151 "END AUTH REQUEST", ""]))
149 152 if sck.recv(128) != "I LOVE YOU\n":
150 153 raise util.Abort(_("CVS pserver authentication failed"))
151 154
152 155 self.writep = self.readp = sck.makefile('r+')
153 156
154 157 if not conntype and root.startswith(":local:"):
155 158 conntype = "local"
156 159 root = root[7:]
157 160
158 161 if not conntype:
159 162 # :ext:user@host/home/user/path/to/cvsroot
160 163 if root.startswith(":ext:"):
161 164 root = root[5:]
162 165 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
163 166 # Do not take Windows path "c:\foo\bar" for a connection strings
164 167 if os.path.isdir(root) or not m:
165 168 conntype = "local"
166 169 else:
167 170 conntype = "rsh"
168 171 user, host, root = m.group(1), m.group(2), m.group(3)
169 172
170 173 if conntype != "pserver":
171 174 if conntype == "rsh":
172 175 rsh = os.environ.get("CVS_RSH") or "ssh"
173 176 if user:
174 177 cmd = [rsh, '-l', user, host] + cmd
175 178 else:
176 179 cmd = [rsh, host] + cmd
177 180
178 181 # popen2 does not support argument lists under Windows
179 182 cmd = [util.shellquote(arg) for arg in cmd]
180 183 cmd = util.quotecommand(' '.join(cmd))
181 184 self.writep, self.readp = util.popen2(cmd)
182 185
183 186 self.realroot = root
184 187
185 188 self.writep.write("Root %s\n" % root)
186 189 self.writep.write("Valid-responses ok error Valid-requests Mode"
187 190 " M Mbinary E Checked-in Created Updated"
188 191 " Merged Removed\n")
189 192 self.writep.write("valid-requests\n")
190 193 self.writep.flush()
191 194 r = self.readp.readline()
192 195 if not r.startswith("Valid-requests"):
193 196 raise util.Abort(_('unexpected response from CVS server '
194 197 '(expected "Valid-requests", but got %r)')
195 198 % r)
196 199 if "UseUnchanged" in r:
197 200 self.writep.write("UseUnchanged\n")
198 201 self.writep.flush()
199 202 r = self.readp.readline()
200 203
201 204 def getheads(self):
202 205 self._parse()
203 206 return self.heads
204 207
205 208 def getfile(self, name, rev):
206 209
207 210 def chunkedread(fp, count):
208 211 # file-objects returned by socket.makefile() do not handle
209 212 # large read() requests very well.
210 213 chunksize = 65536
211 214 output = StringIO()
212 215 while count > 0:
213 216 data = fp.read(min(count, chunksize))
214 217 if not data:
215 218 raise util.Abort(_("%d bytes missing from remote file")
216 219 % count)
217 220 count -= len(data)
218 221 output.write(data)
219 222 return output.getvalue()
220 223
221 224 self._parse()
222 225 if rev.endswith("(DEAD)"):
223 226 return None, None
224 227
225 228 args = ("-N -P -kk -r %s --" % rev).split()
226 229 args.append(self.cvsrepo + '/' + name)
227 230 for x in args:
228 231 self.writep.write("Argument %s\n" % x)
229 232 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
230 233 self.writep.flush()
231 234
232 235 data = ""
233 236 mode = None
234 237 while True:
235 238 line = self.readp.readline()
236 239 if line.startswith("Created ") or line.startswith("Updated "):
237 240 self.readp.readline() # path
238 241 self.readp.readline() # entries
239 242 mode = self.readp.readline()[:-1]
240 243 count = int(self.readp.readline()[:-1])
241 244 data = chunkedread(self.readp, count)
242 245 elif line.startswith(" "):
243 246 data += line[1:]
244 247 elif line.startswith("M "):
245 248 pass
246 249 elif line.startswith("Mbinary "):
247 250 count = int(self.readp.readline()[:-1])
248 251 data = chunkedread(self.readp, count)
249 252 else:
250 253 if line == "ok\n":
251 254 if mode is None:
252 255 raise util.Abort(_('malformed response from CVS'))
253 256 return (data, "x" in mode and "x" or "")
254 257 elif line.startswith("E "):
255 258 self.ui.warn(_("cvs server: %s\n") % line[2:])
256 259 elif line.startswith("Remove"):
257 260 self.readp.readline()
258 261 else:
259 262 raise util.Abort(_("unknown CVS response: %s") % line)
260 263
261 264 def getchanges(self, rev, full):
262 265 if full:
263 266 raise util.Abort(_("convert from cvs do not support --full"))
264 267 self._parse()
265 268 return sorted(self.files[rev].iteritems()), {}, set()
266 269
267 270 def getcommit(self, rev):
268 271 self._parse()
269 272 return self.changeset[rev]
270 273
271 274 def gettags(self):
272 275 self._parse()
273 276 return self.tags
274 277
275 278 def getchangedfiles(self, rev, i):
276 279 self._parse()
277 280 return sorted(self.files[rev])
@@ -1,208 +1,208 b''
1 1 # darcs.py - darcs support for the convert extension
2 2 #
3 3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from common import NoRepo, checktool, commandline, commit, converter_source
9 9 from mercurial.i18n import _
10 10 from mercurial import util
11 11 import os, shutil, tempfile, re, errno
12 12
13 13 # The naming drift of ElementTree is fun!
14 14
15 15 try:
16 16 from xml.etree.cElementTree import ElementTree, XMLParser
17 17 except ImportError:
18 18 try:
19 19 from xml.etree.ElementTree import ElementTree, XMLParser
20 20 except ImportError:
21 21 try:
22 22 from elementtree.cElementTree import ElementTree, XMLParser
23 23 except ImportError:
24 24 try:
25 25 from elementtree.ElementTree import ElementTree, XMLParser
26 26 except ImportError:
27 27 pass
28 28
29 29 class darcs_source(converter_source, commandline):
30 def __init__(self, ui, path, rev=None):
31 converter_source.__init__(self, ui, path, rev=rev)
30 def __init__(self, ui, path, revs=None):
31 converter_source.__init__(self, ui, path, revs=revs)
32 32 commandline.__init__(self, ui, 'darcs')
33 33
34 34 # check for _darcs, ElementTree so that we can easily skip
35 35 # test-convert-darcs if ElementTree is not around
36 36 if not os.path.exists(os.path.join(path, '_darcs')):
37 37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38 38
39 39 checktool('darcs')
40 40 version = self.run0('--version').splitlines()[0].strip()
41 41 if version < '2.1':
42 42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
43 43 version)
44 44
45 45 if "ElementTree" not in globals():
46 46 raise util.Abort(_("Python ElementTree module is not available"))
47 47
48 48 self.path = os.path.realpath(path)
49 49
50 50 self.lastrev = None
51 51 self.changes = {}
52 52 self.parents = {}
53 53 self.tags = {}
54 54
55 55 # Check darcs repository format
56 56 format = self.format()
57 57 if format:
58 58 if format in ('darcs-1.0', 'hashed'):
59 59 raise NoRepo(_("%s repository format is unsupported, "
60 60 "please upgrade") % format)
61 61 else:
62 62 self.ui.warn(_('failed to detect repository format!'))
63 63
64 64 def before(self):
65 65 self.tmppath = tempfile.mkdtemp(
66 66 prefix='convert-' + os.path.basename(self.path) + '-')
67 67 output, status = self.run('init', repodir=self.tmppath)
68 68 self.checkexit(status)
69 69
70 70 tree = self.xml('changes', xml_output=True, summary=True,
71 71 repodir=self.path)
72 72 tagname = None
73 73 child = None
74 74 for elt in tree.findall('patch'):
75 75 node = elt.get('hash')
76 76 name = elt.findtext('name', '')
77 77 if name.startswith('TAG '):
78 78 tagname = name[4:].strip()
79 79 elif tagname is not None:
80 80 self.tags[tagname] = node
81 81 tagname = None
82 82 self.changes[node] = elt
83 83 self.parents[child] = [node]
84 84 child = node
85 85 self.parents[child] = []
86 86
87 87 def after(self):
88 88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 89 shutil.rmtree(self.tmppath, ignore_errors=True)
90 90
91 91 def recode(self, s, encoding=None):
92 92 if isinstance(s, unicode):
93 93 # XMLParser returns unicode objects for anything it can't
94 94 # encode into ASCII. We convert them back to str to get
95 95 # recode's normal conversion behavior.
96 96 s = s.encode('latin-1')
97 97 return super(darcs_source, self).recode(s, encoding)
98 98
99 99 def xml(self, cmd, **kwargs):
100 100 # NOTE: darcs is currently encoding agnostic and will print
101 101 # patch metadata byte-for-byte, even in the XML changelog.
102 102 etree = ElementTree()
103 103 # While we are decoding the XML as latin-1 to be as liberal as
104 104 # possible, etree will still raise an exception if any
105 105 # non-printable characters are in the XML changelog.
106 106 parser = XMLParser(encoding='latin-1')
107 107 p = self._run(cmd, **kwargs)
108 108 etree.parse(p.stdout, parser=parser)
109 109 p.wait()
110 110 self.checkexit(p.returncode)
111 111 return etree.getroot()
112 112
113 113 def format(self):
114 114 output, status = self.run('show', 'repo', no_files=True,
115 115 repodir=self.path)
116 116 self.checkexit(status)
117 117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
118 118 if not m:
119 119 return None
120 120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
121 121
122 122 def manifest(self):
123 123 man = []
124 124 output, status = self.run('show', 'files', no_directories=True,
125 125 repodir=self.tmppath)
126 126 self.checkexit(status)
127 127 for line in output.split('\n'):
128 128 path = line[2:]
129 129 if path:
130 130 man.append(path)
131 131 return man
132 132
133 133 def getheads(self):
134 134 return self.parents[None]
135 135
136 136 def getcommit(self, rev):
137 137 elt = self.changes[rev]
138 138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
139 139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
140 140 # etree can return unicode objects for name, comment, and author,
141 141 # so recode() is used to ensure str objects are emitted.
142 142 return commit(author=self.recode(elt.get('author')),
143 143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
144 144 desc=self.recode(desc).strip(),
145 145 parents=self.parents[rev])
146 146
147 147 def pull(self, rev):
148 148 output, status = self.run('pull', self.path, all=True,
149 149 match='hash %s' % rev,
150 150 no_test=True, no_posthook=True,
151 151 external_merge='/bin/false',
152 152 repodir=self.tmppath)
153 153 if status:
154 154 if output.find('We have conflicts in') == -1:
155 155 self.checkexit(status, output)
156 156 output, status = self.run('revert', all=True, repodir=self.tmppath)
157 157 self.checkexit(status, output)
158 158
159 159 def getchanges(self, rev, full):
160 160 if full:
161 161 raise util.Abort(_("convert from darcs do not support --full"))
162 162 copies = {}
163 163 changes = []
164 164 man = None
165 165 for elt in self.changes[rev].find('summary').getchildren():
166 166 if elt.tag in ('add_directory', 'remove_directory'):
167 167 continue
168 168 if elt.tag == 'move':
169 169 if man is None:
170 170 man = self.manifest()
171 171 source, dest = elt.get('from'), elt.get('to')
172 172 if source in man:
173 173 # File move
174 174 changes.append((source, rev))
175 175 changes.append((dest, rev))
176 176 copies[dest] = source
177 177 else:
178 178 # Directory move, deduce file moves from manifest
179 179 source = source + '/'
180 180 for f in man:
181 181 if not f.startswith(source):
182 182 continue
183 183 fdest = dest + '/' + f[len(source):]
184 184 changes.append((f, rev))
185 185 changes.append((fdest, rev))
186 186 copies[fdest] = f
187 187 else:
188 188 changes.append((elt.text.strip(), rev))
189 189 self.pull(rev)
190 190 self.lastrev = rev
191 191 return sorted(changes), copies, set()
192 192
193 193 def getfile(self, name, rev):
194 194 if rev != self.lastrev:
195 195 raise util.Abort(_('internal calling inconsistency'))
196 196 path = os.path.join(self.tmppath, name)
197 197 try:
198 198 data = util.readfile(path)
199 199 mode = os.lstat(path).st_mode
200 200 except IOError as inst:
201 201 if inst.errno == errno.ENOENT:
202 202 return None, None
203 203 raise
204 204 mode = (mode & 0o111) and 'x' or ''
205 205 return data, mode
206 206
207 207 def gettags(self):
208 208 return self.tags
@@ -1,396 +1,401 b''
1 1 # git.py - git support for the convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 import os
9 9 import subprocess
10 10 from mercurial import util, config, error
11 11 from mercurial.node import hex, nullid
12 12 from mercurial.i18n import _
13 13
14 14 from common import NoRepo, commit, converter_source, checktool
15 15
16 16 class submodule(object):
17 17 def __init__(self, path, node, url):
18 18 self.path = path
19 19 self.node = node
20 20 self.url = url
21 21
22 22 def hgsub(self):
23 23 return "%s = [git]%s" % (self.path, self.url)
24 24
25 25 def hgsubstate(self):
26 26 return "%s %s" % (self.node, self.path)
27 27
28 28 class convert_git(converter_source):
29 29 # Windows does not support GIT_DIR= construct while other systems
30 30 # cannot remove environment variable. Just assume none have
31 31 # both issues.
32 32 if util.safehasattr(os, 'unsetenv'):
33 33 def gitopen(self, s, err=None):
34 34 prevgitdir = os.environ.get('GIT_DIR')
35 35 os.environ['GIT_DIR'] = self.path
36 36 try:
37 37 if err == subprocess.PIPE:
38 38 (stdin, stdout, stderr) = util.popen3(s)
39 39 return stdout
40 40 elif err == subprocess.STDOUT:
41 41 return self.popen_with_stderr(s)
42 42 else:
43 43 return util.popen(s, 'rb')
44 44 finally:
45 45 if prevgitdir is None:
46 46 del os.environ['GIT_DIR']
47 47 else:
48 48 os.environ['GIT_DIR'] = prevgitdir
49 49
50 50 def gitpipe(self, s):
51 51 prevgitdir = os.environ.get('GIT_DIR')
52 52 os.environ['GIT_DIR'] = self.path
53 53 try:
54 54 return util.popen3(s)
55 55 finally:
56 56 if prevgitdir is None:
57 57 del os.environ['GIT_DIR']
58 58 else:
59 59 os.environ['GIT_DIR'] = prevgitdir
60 60
61 61 else:
62 62 def gitopen(self, s, err=None):
63 63 if err == subprocess.PIPE:
64 64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
65 65 return so
66 66 elif err == subprocess.STDOUT:
67 67 return self.popen_with_stderr(s)
68 68 else:
69 69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
70 70
71 71 def gitpipe(self, s):
72 72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
73 73
74 74 def popen_with_stderr(self, s):
75 75 p = subprocess.Popen(s, shell=True, bufsize=-1,
76 76 close_fds=util.closefds,
77 77 stdin=subprocess.PIPE,
78 78 stdout=subprocess.PIPE,
79 79 stderr=subprocess.STDOUT,
80 80 universal_newlines=False,
81 81 env=None)
82 82 return p.stdout
83 83
84 84 def gitread(self, s):
85 85 fh = self.gitopen(s)
86 86 data = fh.read()
87 87 return data, fh.close()
88 88
89 def __init__(self, ui, path, rev=None):
90 super(convert_git, self).__init__(ui, path, rev=rev)
89 def __init__(self, ui, path, revs=None):
90 super(convert_git, self).__init__(ui, path, revs=revs)
91
92 if revs and len(revs) > 1:
93 raise util.Abort(_("git source does not support specifying "
94 "multiple revs"))
91 95
92 96 if os.path.isdir(path + "/.git"):
93 97 path += "/.git"
94 98 if not os.path.exists(path + "/objects"):
95 99 raise NoRepo(_("%s does not look like a Git repository") % path)
96 100
97 101 # The default value (50) is based on the default for 'git diff'.
98 102 similarity = ui.configint('convert', 'git.similarity', default=50)
99 103 if similarity < 0 or similarity > 100:
100 104 raise util.Abort(_('similarity must be between 0 and 100'))
101 105 if similarity > 0:
102 106 self.simopt = '-C%d%%' % similarity
103 107 findcopiesharder = ui.configbool('convert', 'git.findcopiesharder',
104 108 False)
105 109 if findcopiesharder:
106 110 self.simopt += ' --find-copies-harder'
107 111 else:
108 112 self.simopt = ''
109 113
110 114 checktool('git', 'git')
111 115
112 116 self.path = path
113 117 self.submodules = []
114 118
115 119 self.catfilepipe = self.gitpipe('git cat-file --batch')
116 120
117 121 def after(self):
118 122 for f in self.catfilepipe:
119 123 f.close()
120 124
121 125 def getheads(self):
122 if not self.rev:
126 if not self.revs:
123 127 heads, ret = self.gitread('git rev-parse --branches --remotes')
124 128 heads = heads.splitlines()
125 129 else:
126 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
130 heads, ret = self.gitread("git rev-parse --verify %s" %
131 self.revs[0])
127 132 heads = [heads[:-1]]
128 133 if ret:
129 134 raise util.Abort(_('cannot retrieve git heads'))
130 135 return heads
131 136
132 137 def catfile(self, rev, type):
133 138 if rev == hex(nullid):
134 139 raise IOError
135 140 self.catfilepipe[0].write(rev+'\n')
136 141 self.catfilepipe[0].flush()
137 142 info = self.catfilepipe[1].readline().split()
138 143 if info[1] != type:
139 144 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
140 145 size = int(info[2])
141 146 data = self.catfilepipe[1].read(size)
142 147 if len(data) < size:
143 148 raise util.Abort(_('cannot read %r object at %s: unexpected size')
144 149 % (type, rev))
145 150 # read the trailing newline
146 151 self.catfilepipe[1].read(1)
147 152 return data
148 153
149 154 def getfile(self, name, rev):
150 155 if rev == hex(nullid):
151 156 return None, None
152 157 if name == '.hgsub':
153 158 data = '\n'.join([m.hgsub() for m in self.submoditer()])
154 159 mode = ''
155 160 elif name == '.hgsubstate':
156 161 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
157 162 mode = ''
158 163 else:
159 164 data = self.catfile(rev, "blob")
160 165 mode = self.modecache[(name, rev)]
161 166 return data, mode
162 167
163 168 def submoditer(self):
164 169 null = hex(nullid)
165 170 for m in sorted(self.submodules, key=lambda p: p.path):
166 171 if m.node != null:
167 172 yield m
168 173
169 174 def parsegitmodules(self, content):
170 175 """Parse the formatted .gitmodules file, example file format:
171 176 [submodule "sub"]\n
172 177 \tpath = sub\n
173 178 \turl = git://giturl\n
174 179 """
175 180 self.submodules = []
176 181 c = config.config()
177 182 # Each item in .gitmodules starts with whitespace that cant be parsed
178 183 c.parse('.gitmodules', '\n'.join(line.strip() for line in
179 184 content.split('\n')))
180 185 for sec in c.sections():
181 186 s = c[sec]
182 187 if 'url' in s and 'path' in s:
183 188 self.submodules.append(submodule(s['path'], '', s['url']))
184 189
185 190 def retrievegitmodules(self, version):
186 191 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
187 192 if ret:
188 193 # This can happen if a file is in the repo that has permissions
189 194 # 160000, but there is no .gitmodules file.
190 195 self.ui.warn(_("warning: cannot read submodules config file in "
191 196 "%s\n") % version)
192 197 return
193 198
194 199 try:
195 200 self.parsegitmodules(modules)
196 201 except error.ParseError:
197 202 self.ui.warn(_("warning: unable to parse .gitmodules in %s\n")
198 203 % version)
199 204 return
200 205
201 206 for m in self.submodules:
202 207 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
203 208 if ret:
204 209 continue
205 210 m.node = node.strip()
206 211
207 212 def getchanges(self, version, full):
208 213 if full:
209 214 raise util.Abort(_("convert from git do not support --full"))
210 215 self.modecache = {}
211 216 fh = self.gitopen("git diff-tree -z --root -m -r %s %s" % (
212 217 self.simopt, version))
213 218 changes = []
214 219 copies = {}
215 220 seen = set()
216 221 entry = None
217 222 subexists = [False]
218 223 subdeleted = [False]
219 224 difftree = fh.read().split('\x00')
220 225 lcount = len(difftree)
221 226 i = 0
222 227
223 228 def add(entry, f, isdest):
224 229 seen.add(f)
225 230 h = entry[3]
226 231 p = (entry[1] == "100755")
227 232 s = (entry[1] == "120000")
228 233 renamesource = (not isdest and entry[4][0] == 'R')
229 234
230 235 if f == '.gitmodules':
231 236 subexists[0] = True
232 237 if entry[4] == 'D' or renamesource:
233 238 subdeleted[0] = True
234 239 changes.append(('.hgsub', hex(nullid)))
235 240 else:
236 241 changes.append(('.hgsub', ''))
237 242 elif entry[1] == '160000' or entry[0] == ':160000':
238 243 subexists[0] = True
239 244 else:
240 245 if renamesource:
241 246 h = hex(nullid)
242 247 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
243 248 changes.append((f, h))
244 249
245 250 while i < lcount:
246 251 l = difftree[i]
247 252 i += 1
248 253 if not entry:
249 254 if not l.startswith(':'):
250 255 continue
251 256 entry = l.split()
252 257 continue
253 258 f = l
254 259 if f not in seen:
255 260 add(entry, f, False)
256 261 # A file can be copied multiple times, or modified and copied
257 262 # simultaneously. So f can be repeated even if fdest isn't.
258 263 if entry[4][0] in 'RC':
259 264 # rename or copy: next line is the destination
260 265 fdest = difftree[i]
261 266 i += 1
262 267 if fdest not in seen:
263 268 add(entry, fdest, True)
264 269 # .gitmodules isn't imported at all, so it being copied to
265 270 # and fro doesn't really make sense
266 271 if f != '.gitmodules' and fdest != '.gitmodules':
267 272 copies[fdest] = f
268 273 entry = None
269 274 if fh.close():
270 275 raise util.Abort(_('cannot read changes in %s') % version)
271 276
272 277 if subexists[0]:
273 278 if subdeleted[0]:
274 279 changes.append(('.hgsubstate', hex(nullid)))
275 280 else:
276 281 self.retrievegitmodules(version)
277 282 changes.append(('.hgsubstate', ''))
278 283 return (changes, copies, set())
279 284
280 285 def getcommit(self, version):
281 286 c = self.catfile(version, "commit") # read the commit hash
282 287 end = c.find("\n\n")
283 288 message = c[end + 2:]
284 289 message = self.recode(message)
285 290 l = c[:end].splitlines()
286 291 parents = []
287 292 author = committer = None
288 293 for e in l[1:]:
289 294 n, v = e.split(" ", 1)
290 295 if n == "author":
291 296 p = v.split()
292 297 tm, tz = p[-2:]
293 298 author = " ".join(p[:-2])
294 299 if author[0] == "<": author = author[1:-1]
295 300 author = self.recode(author)
296 301 if n == "committer":
297 302 p = v.split()
298 303 tm, tz = p[-2:]
299 304 committer = " ".join(p[:-2])
300 305 if committer[0] == "<": committer = committer[1:-1]
301 306 committer = self.recode(committer)
302 307 if n == "parent":
303 308 parents.append(v)
304 309
305 310 if committer and committer != author:
306 311 message += "\ncommitter: %s\n" % committer
307 312 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
308 313 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
309 314 date = tm + " " + str(tz)
310 315
311 316 c = commit(parents=parents, date=date, author=author, desc=message,
312 317 rev=version)
313 318 return c
314 319
315 320 def numcommits(self):
316 321 return len([None for _ in self.gitopen('git rev-list --all')])
317 322
318 323 def gettags(self):
319 324 tags = {}
320 325 alltags = {}
321 326 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
322 327 err=subprocess.STDOUT)
323 328 prefix = 'refs/tags/'
324 329
325 330 # Build complete list of tags, both annotated and bare ones
326 331 for line in fh:
327 332 line = line.strip()
328 333 if line.startswith("error:") or line.startswith("fatal:"):
329 334 raise util.Abort(_('cannot read tags from %s') % self.path)
330 335 node, tag = line.split(None, 1)
331 336 if not tag.startswith(prefix):
332 337 continue
333 338 alltags[tag[len(prefix):]] = node
334 339 if fh.close():
335 340 raise util.Abort(_('cannot read tags from %s') % self.path)
336 341
337 342 # Filter out tag objects for annotated tag refs
338 343 for tag in alltags:
339 344 if tag.endswith('^{}'):
340 345 tags[tag[:-3]] = alltags[tag]
341 346 else:
342 347 if tag + '^{}' in alltags:
343 348 continue
344 349 else:
345 350 tags[tag] = alltags[tag]
346 351
347 352 return tags
348 353
349 354 def getchangedfiles(self, version, i):
350 355 changes = []
351 356 if i is None:
352 357 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
353 358 for l in fh:
354 359 if "\t" not in l:
355 360 continue
356 361 m, f = l[:-1].split("\t")
357 362 changes.append(f)
358 363 else:
359 364 fh = self.gitopen('git diff-tree --name-only --root -r %s '
360 365 '"%s^%s" --' % (version, version, i + 1))
361 366 changes = [f.rstrip('\n') for f in fh]
362 367 if fh.close():
363 368 raise util.Abort(_('cannot read changes in %s') % version)
364 369
365 370 return changes
366 371
367 372 def getbookmarks(self):
368 373 bookmarks = {}
369 374
370 375 # Interesting references in git are prefixed
371 376 prefix = 'refs/heads/'
372 377 prefixlen = len(prefix)
373 378
374 379 # factor two commands
375 380 gitcmd = { 'remote/': 'git ls-remote --heads origin',
376 381 '': 'git show-ref'}
377 382
378 383 # Origin heads
379 384 for reftype in gitcmd:
380 385 try:
381 386 fh = self.gitopen(gitcmd[reftype], err=subprocess.PIPE)
382 387 for line in fh:
383 388 line = line.strip()
384 389 rev, name = line.split(None, 1)
385 390 if not name.startswith(prefix):
386 391 continue
387 392 name = '%s%s' % (reftype, name[prefixlen:])
388 393 bookmarks[name] = rev
389 394 except Exception:
390 395 pass
391 396
392 397 return bookmarks
393 398
394 399 def checkrevformat(self, revstr, mapname='splicemap'):
395 400 """ git revision string is a 40 byte hex """
396 401 self.checkhexformat(revstr, mapname)
@@ -1,342 +1,342 b''
1 1 # gnuarch.py - GNU Arch support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 4 # and others
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 from common import NoRepo, commandline, commit, converter_source
10 10 from mercurial.i18n import _
11 11 from mercurial import encoding, util
12 12 import os, shutil, tempfile, stat
13 13 from email.Parser import Parser
14 14
15 15 class gnuarch_source(converter_source, commandline):
16 16
17 17 class gnuarch_rev(object):
18 18 def __init__(self, rev):
19 19 self.rev = rev
20 20 self.summary = ''
21 21 self.date = None
22 22 self.author = ''
23 23 self.continuationof = None
24 24 self.add_files = []
25 25 self.mod_files = []
26 26 self.del_files = []
27 27 self.ren_files = {}
28 28 self.ren_dirs = {}
29 29
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
30 def __init__(self, ui, path, revs=None):
31 super(gnuarch_source, self).__init__(ui, path, revs=revs)
32 32
33 33 if not os.path.exists(os.path.join(path, '{arch}')):
34 34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 35 % path)
36 36
37 37 # Could use checktool, but we want to check for baz or tla.
38 38 self.execmd = None
39 39 if util.findexe('baz'):
40 40 self.execmd = 'baz'
41 41 else:
42 42 if util.findexe('tla'):
43 43 self.execmd = 'tla'
44 44 else:
45 45 raise util.Abort(_('cannot find a GNU Arch tool'))
46 46
47 47 commandline.__init__(self, ui, self.execmd)
48 48
49 49 self.path = os.path.realpath(path)
50 50 self.tmppath = None
51 51
52 52 self.treeversion = None
53 53 self.lastrev = None
54 54 self.changes = {}
55 55 self.parents = {}
56 56 self.tags = {}
57 57 self.catlogparser = Parser()
58 58 self.encoding = encoding.encoding
59 59 self.archives = []
60 60
61 61 def before(self):
62 62 # Get registered archives
63 63 self.archives = [i.rstrip('\n')
64 64 for i in self.runlines0('archives', '-n')]
65 65
66 66 if self.execmd == 'tla':
67 67 output = self.run0('tree-version', self.path)
68 68 else:
69 69 output = self.run0('tree-version', '-d', self.path)
70 70 self.treeversion = output.strip()
71 71
72 72 # Get name of temporary directory
73 73 version = self.treeversion.split('/')
74 74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 75 'hg-%s' % version[1])
76 76
77 77 # Generate parents dictionary
78 78 self.parents[None] = []
79 79 treeversion = self.treeversion
80 80 child = None
81 81 while treeversion:
82 82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83 83
84 84 archive = treeversion.split('/')[0]
85 85 if archive not in self.archives:
86 86 self.ui.status(_('tree analysis stopped because it points to '
87 87 'an unregistered archive %s...\n') % archive)
88 88 break
89 89
90 90 # Get the complete list of revisions for that tree version
91 91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 92 self.checkexit(status, 'failed retrieving revisions for %s'
93 93 % treeversion)
94 94
95 95 # No new iteration unless a revision has a continuation-of header
96 96 treeversion = None
97 97
98 98 for l in output:
99 99 rev = l.strip()
100 100 self.changes[rev] = self.gnuarch_rev(rev)
101 101 self.parents[rev] = []
102 102
103 103 # Read author, date and summary
104 104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 105 if status:
106 106 catlog = self.run0('cat-archive-log', rev)
107 107 self._parsecatlog(catlog, rev)
108 108
109 109 # Populate the parents map
110 110 self.parents[child].append(rev)
111 111
112 112 # Keep track of the current revision as the child of the next
113 113 # revision scanned
114 114 child = rev
115 115
116 116 # Check if we have to follow the usual incremental history
117 117 # or if we have to 'jump' to a different treeversion given
118 118 # by the continuation-of header.
119 119 if self.changes[rev].continuationof:
120 120 treeversion = '--'.join(
121 121 self.changes[rev].continuationof.split('--')[:-1])
122 122 break
123 123
124 124 # If we reached a base-0 revision w/o any continuation-of
125 125 # header, it means the tree history ends here.
126 126 if rev[-6:] == 'base-0':
127 127 break
128 128
129 129 def after(self):
130 130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 131 shutil.rmtree(self.tmppath, ignore_errors=True)
132 132
133 133 def getheads(self):
134 134 return self.parents[None]
135 135
136 136 def getfile(self, name, rev):
137 137 if rev != self.lastrev:
138 138 raise util.Abort(_('internal calling inconsistency'))
139 139
140 140 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 141 return None, None
142 142
143 143 return self._getfile(name, rev)
144 144
145 145 def getchanges(self, rev, full):
146 146 if full:
147 147 raise util.Abort(_("convert from arch do not support --full"))
148 148 self._update(rev)
149 149 changes = []
150 150 copies = {}
151 151
152 152 for f in self.changes[rev].add_files:
153 153 changes.append((f, rev))
154 154
155 155 for f in self.changes[rev].mod_files:
156 156 changes.append((f, rev))
157 157
158 158 for f in self.changes[rev].del_files:
159 159 changes.append((f, rev))
160 160
161 161 for src in self.changes[rev].ren_files:
162 162 to = self.changes[rev].ren_files[src]
163 163 changes.append((src, rev))
164 164 changes.append((to, rev))
165 165 copies[to] = src
166 166
167 167 for src in self.changes[rev].ren_dirs:
168 168 to = self.changes[rev].ren_dirs[src]
169 169 chgs, cps = self._rendirchanges(src, to)
170 170 changes += [(f, rev) for f in chgs]
171 171 copies.update(cps)
172 172
173 173 self.lastrev = rev
174 174 return sorted(set(changes)), copies, set()
175 175
176 176 def getcommit(self, rev):
177 177 changes = self.changes[rev]
178 178 return commit(author=changes.author, date=changes.date,
179 179 desc=changes.summary, parents=self.parents[rev], rev=rev)
180 180
181 181 def gettags(self):
182 182 return self.tags
183 183
184 184 def _execute(self, cmd, *args, **kwargs):
185 185 cmdline = [self.execmd, cmd]
186 186 cmdline += args
187 187 cmdline = [util.shellquote(arg) for arg in cmdline]
188 188 cmdline += ['>', os.devnull, '2>', os.devnull]
189 189 cmdline = util.quotecommand(' '.join(cmdline))
190 190 self.ui.debug(cmdline, '\n')
191 191 return os.system(cmdline)
192 192
193 193 def _update(self, rev):
194 194 self.ui.debug('applying revision %s...\n' % rev)
195 195 changeset, status = self.runlines('replay', '-d', self.tmppath,
196 196 rev)
197 197 if status:
198 198 # Something went wrong while merging (baz or tla
199 199 # issue?), get latest revision and try from there
200 200 shutil.rmtree(self.tmppath, ignore_errors=True)
201 201 self._obtainrevision(rev)
202 202 else:
203 203 old_rev = self.parents[rev][0]
204 204 self.ui.debug('computing changeset between %s and %s...\n'
205 205 % (old_rev, rev))
206 206 self._parsechangeset(changeset, rev)
207 207
208 208 def _getfile(self, name, rev):
209 209 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
210 210 if stat.S_ISLNK(mode):
211 211 data = os.readlink(os.path.join(self.tmppath, name))
212 212 if mode:
213 213 mode = 'l'
214 214 else:
215 215 mode = ''
216 216 else:
217 217 data = open(os.path.join(self.tmppath, name), 'rb').read()
218 218 mode = (mode & 0o111) and 'x' or ''
219 219 return data, mode
220 220
221 221 def _exclude(self, name):
222 222 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
223 223 for exc in exclude:
224 224 if name.find(exc) != -1:
225 225 return True
226 226 return False
227 227
228 228 def _readcontents(self, path):
229 229 files = []
230 230 contents = os.listdir(path)
231 231 while len(contents) > 0:
232 232 c = contents.pop()
233 233 p = os.path.join(path, c)
234 234 # os.walk could be used, but here we avoid internal GNU
235 235 # Arch files and directories, thus saving a lot time.
236 236 if not self._exclude(p):
237 237 if os.path.isdir(p):
238 238 contents += [os.path.join(c, f) for f in os.listdir(p)]
239 239 else:
240 240 files.append(c)
241 241 return files
242 242
243 243 def _rendirchanges(self, src, dest):
244 244 changes = []
245 245 copies = {}
246 246 files = self._readcontents(os.path.join(self.tmppath, dest))
247 247 for f in files:
248 248 s = os.path.join(src, f)
249 249 d = os.path.join(dest, f)
250 250 changes.append(s)
251 251 changes.append(d)
252 252 copies[d] = s
253 253 return changes, copies
254 254
255 255 def _obtainrevision(self, rev):
256 256 self.ui.debug('obtaining revision %s...\n' % rev)
257 257 output = self._execute('get', rev, self.tmppath)
258 258 self.checkexit(output)
259 259 self.ui.debug('analyzing revision %s...\n' % rev)
260 260 files = self._readcontents(self.tmppath)
261 261 self.changes[rev].add_files += files
262 262
263 263 def _stripbasepath(self, path):
264 264 if path.startswith('./'):
265 265 return path[2:]
266 266 return path
267 267
268 268 def _parsecatlog(self, data, rev):
269 269 try:
270 270 catlog = self.catlogparser.parsestr(data)
271 271
272 272 # Commit date
273 273 self.changes[rev].date = util.datestr(
274 274 util.strdate(catlog['Standard-date'],
275 275 '%Y-%m-%d %H:%M:%S'))
276 276
277 277 # Commit author
278 278 self.changes[rev].author = self.recode(catlog['Creator'])
279 279
280 280 # Commit description
281 281 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
282 282 catlog.get_payload()))
283 283 self.changes[rev].summary = self.recode(self.changes[rev].summary)
284 284
285 285 # Commit revision origin when dealing with a branch or tag
286 286 if 'Continuation-of' in catlog:
287 287 self.changes[rev].continuationof = self.recode(
288 288 catlog['Continuation-of'])
289 289 except Exception:
290 290 raise util.Abort(_('could not parse cat-log of %s') % rev)
291 291
292 292 def _parsechangeset(self, data, rev):
293 293 for l in data:
294 294 l = l.strip()
295 295 # Added file (ignore added directory)
296 296 if l.startswith('A') and not l.startswith('A/'):
297 297 file = self._stripbasepath(l[1:].strip())
298 298 if not self._exclude(file):
299 299 self.changes[rev].add_files.append(file)
300 300 # Deleted file (ignore deleted directory)
301 301 elif l.startswith('D') and not l.startswith('D/'):
302 302 file = self._stripbasepath(l[1:].strip())
303 303 if not self._exclude(file):
304 304 self.changes[rev].del_files.append(file)
305 305 # Modified binary file
306 306 elif l.startswith('Mb'):
307 307 file = self._stripbasepath(l[2:].strip())
308 308 if not self._exclude(file):
309 309 self.changes[rev].mod_files.append(file)
310 310 # Modified link
311 311 elif l.startswith('M->'):
312 312 file = self._stripbasepath(l[3:].strip())
313 313 if not self._exclude(file):
314 314 self.changes[rev].mod_files.append(file)
315 315 # Modified file
316 316 elif l.startswith('M'):
317 317 file = self._stripbasepath(l[1:].strip())
318 318 if not self._exclude(file):
319 319 self.changes[rev].mod_files.append(file)
320 320 # Renamed file (or link)
321 321 elif l.startswith('=>'):
322 322 files = l[2:].strip().split(' ')
323 323 if len(files) == 1:
324 324 files = l[2:].strip().split('\t')
325 325 src = self._stripbasepath(files[0])
326 326 dst = self._stripbasepath(files[1])
327 327 if not self._exclude(src) and not self._exclude(dst):
328 328 self.changes[rev].ren_files[src] = dst
329 329 # Conversion from file to link or from link to file (modified)
330 330 elif l.startswith('ch'):
331 331 file = self._stripbasepath(l[2:].strip())
332 332 if not self._exclude(file):
333 333 self.changes[rev].mod_files.append(file)
334 334 # Renamed directory
335 335 elif l.startswith('/>'):
336 336 dirs = l[2:].strip().split(' ')
337 337 if len(dirs) == 1:
338 338 dirs = l[2:].strip().split('\t')
339 339 src = self._stripbasepath(dirs[0])
340 340 dst = self._stripbasepath(dirs[1])
341 341 if not self._exclude(src) and not self._exclude(dst):
342 342 self.changes[rev].ren_dirs[src] = dst
@@ -1,561 +1,564 b''
1 1 # hg.py - hg backend for convert extension
2 2 #
3 3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 # Notes for hg->hg conversion:
9 9 #
10 10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 11 # of commit messages, but new versions do. Changesets created by
12 12 # those older versions, then converted, may thus have different
13 13 # hashes for changesets that are otherwise identical.
14 14 #
15 15 # * Using "--config convert.hg.saverev=true" will make the source
16 16 # identifier to be stored in the converted revision. This will cause
17 17 # the converted revision to have a different identity than the
18 18 # source.
19 19
20 20
21 21 import os, time, cStringIO
22 22 from mercurial.i18n import _
23 23 from mercurial.node import bin, hex, nullid
24 24 from mercurial import hg, util, context, bookmarks, error, scmutil, exchange
25 25 from mercurial import phases
26 26
27 27 from common import NoRepo, commit, converter_source, converter_sink, mapfile
28 28
29 29 import re
30 30 sha1re = re.compile(r'\b[0-9a-f]{12,40}\b')
31 31
32 32 class mercurial_sink(converter_sink):
33 33 def __init__(self, ui, path):
34 34 converter_sink.__init__(self, ui, path)
35 35 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
36 36 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
37 37 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
38 38 self.lastbranch = None
39 39 if os.path.isdir(path) and len(os.listdir(path)) > 0:
40 40 try:
41 41 self.repo = hg.repository(self.ui, path)
42 42 if not self.repo.local():
43 43 raise NoRepo(_('%s is not a local Mercurial repository')
44 44 % path)
45 45 except error.RepoError as err:
46 46 ui.traceback()
47 47 raise NoRepo(err.args[0])
48 48 else:
49 49 try:
50 50 ui.status(_('initializing destination %s repository\n') % path)
51 51 self.repo = hg.repository(self.ui, path, create=True)
52 52 if not self.repo.local():
53 53 raise NoRepo(_('%s is not a local Mercurial repository')
54 54 % path)
55 55 self.created.append(path)
56 56 except error.RepoError:
57 57 ui.traceback()
58 58 raise NoRepo(_("could not create hg repository %s as sink")
59 59 % path)
60 60 self.lock = None
61 61 self.wlock = None
62 62 self.filemapmode = False
63 63 self.subrevmaps = {}
64 64
65 65 def before(self):
66 66 self.ui.debug('run hg sink pre-conversion action\n')
67 67 self.wlock = self.repo.wlock()
68 68 self.lock = self.repo.lock()
69 69
70 70 def after(self):
71 71 self.ui.debug('run hg sink post-conversion action\n')
72 72 if self.lock:
73 73 self.lock.release()
74 74 if self.wlock:
75 75 self.wlock.release()
76 76
77 77 def revmapfile(self):
78 78 return self.repo.join("shamap")
79 79
80 80 def authorfile(self):
81 81 return self.repo.join("authormap")
82 82
83 83 def setbranch(self, branch, pbranches):
84 84 if not self.clonebranches:
85 85 return
86 86
87 87 setbranch = (branch != self.lastbranch)
88 88 self.lastbranch = branch
89 89 if not branch:
90 90 branch = 'default'
91 91 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
92 92 if pbranches:
93 93 pbranch = pbranches[0][1]
94 94 else:
95 95 pbranch = 'default'
96 96
97 97 branchpath = os.path.join(self.path, branch)
98 98 if setbranch:
99 99 self.after()
100 100 try:
101 101 self.repo = hg.repository(self.ui, branchpath)
102 102 except Exception:
103 103 self.repo = hg.repository(self.ui, branchpath, create=True)
104 104 self.before()
105 105
106 106 # pbranches may bring revisions from other branches (merge parents)
107 107 # Make sure we have them, or pull them.
108 108 missings = {}
109 109 for b in pbranches:
110 110 try:
111 111 self.repo.lookup(b[0])
112 112 except Exception:
113 113 missings.setdefault(b[1], []).append(b[0])
114 114
115 115 if missings:
116 116 self.after()
117 117 for pbranch, heads in sorted(missings.iteritems()):
118 118 pbranchpath = os.path.join(self.path, pbranch)
119 119 prepo = hg.peer(self.ui, {}, pbranchpath)
120 120 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
121 121 exchange.pull(self.repo, prepo,
122 122 [prepo.lookup(h) for h in heads])
123 123 self.before()
124 124
125 125 def _rewritetags(self, source, revmap, data):
126 126 fp = cStringIO.StringIO()
127 127 for line in data.splitlines():
128 128 s = line.split(' ', 1)
129 129 if len(s) != 2:
130 130 continue
131 131 revid = revmap.get(source.lookuprev(s[0]))
132 132 if not revid:
133 133 if s[0] == hex(nullid):
134 134 revid = s[0]
135 135 else:
136 136 continue
137 137 fp.write('%s %s\n' % (revid, s[1]))
138 138 return fp.getvalue()
139 139
140 140 def _rewritesubstate(self, source, data):
141 141 fp = cStringIO.StringIO()
142 142 for line in data.splitlines():
143 143 s = line.split(' ', 1)
144 144 if len(s) != 2:
145 145 continue
146 146
147 147 revid = s[0]
148 148 subpath = s[1]
149 149 if revid != hex(nullid):
150 150 revmap = self.subrevmaps.get(subpath)
151 151 if revmap is None:
152 152 revmap = mapfile(self.ui,
153 153 self.repo.wjoin(subpath, '.hg/shamap'))
154 154 self.subrevmaps[subpath] = revmap
155 155
156 156 # It is reasonable that one or more of the subrepos don't
157 157 # need to be converted, in which case they can be cloned
158 158 # into place instead of converted. Therefore, only warn
159 159 # once.
160 160 msg = _('no ".hgsubstate" updates will be made for "%s"\n')
161 161 if len(revmap) == 0:
162 162 sub = self.repo.wvfs.reljoin(subpath, '.hg')
163 163
164 164 if self.repo.wvfs.exists(sub):
165 165 self.ui.warn(msg % subpath)
166 166
167 167 newid = revmap.get(revid)
168 168 if not newid:
169 169 if len(revmap) > 0:
170 170 self.ui.warn(_("%s is missing from %s/.hg/shamap\n") %
171 171 (revid, subpath))
172 172 else:
173 173 revid = newid
174 174
175 175 fp.write('%s %s\n' % (revid, subpath))
176 176
177 177 return fp.getvalue()
178 178
179 179 def putcommit(self, files, copies, parents, commit, source, revmap, full,
180 180 cleanp2):
181 181 files = dict(files)
182 182
183 183 def getfilectx(repo, memctx, f):
184 184 if p2ctx and f in cleanp2 and f not in copies:
185 185 self.ui.debug('reusing %s from p2\n' % f)
186 186 return p2ctx[f]
187 187 try:
188 188 v = files[f]
189 189 except KeyError:
190 190 return None
191 191 data, mode = source.getfile(f, v)
192 192 if data is None:
193 193 return None
194 194 if f == '.hgtags':
195 195 data = self._rewritetags(source, revmap, data)
196 196 if f == '.hgsubstate':
197 197 data = self._rewritesubstate(source, data)
198 198 return context.memfilectx(self.repo, f, data, 'l' in mode,
199 199 'x' in mode, copies.get(f))
200 200
201 201 pl = []
202 202 for p in parents:
203 203 if p not in pl:
204 204 pl.append(p)
205 205 parents = pl
206 206 nparents = len(parents)
207 207 if self.filemapmode and nparents == 1:
208 208 m1node = self.repo.changelog.read(bin(parents[0]))[0]
209 209 parent = parents[0]
210 210
211 211 if len(parents) < 2:
212 212 parents.append(nullid)
213 213 if len(parents) < 2:
214 214 parents.append(nullid)
215 215 p2 = parents.pop(0)
216 216
217 217 text = commit.desc
218 218
219 219 sha1s = re.findall(sha1re, text)
220 220 for sha1 in sha1s:
221 221 oldrev = source.lookuprev(sha1)
222 222 newrev = revmap.get(oldrev)
223 223 if newrev is not None:
224 224 text = text.replace(sha1, newrev[:len(sha1)])
225 225
226 226 extra = commit.extra.copy()
227 227
228 228 for label in ('source', 'transplant_source', 'rebase_source',
229 229 'intermediate-source'):
230 230 node = extra.get(label)
231 231
232 232 if node is None:
233 233 continue
234 234
235 235 # Only transplant stores its reference in binary
236 236 if label == 'transplant_source':
237 237 node = hex(node)
238 238
239 239 newrev = revmap.get(node)
240 240 if newrev is not None:
241 241 if label == 'transplant_source':
242 242 newrev = bin(newrev)
243 243
244 244 extra[label] = newrev
245 245
246 246 if self.branchnames and commit.branch:
247 247 extra['branch'] = commit.branch
248 248 if commit.rev and commit.saverev:
249 249 extra['convert_revision'] = commit.rev
250 250
251 251 while parents:
252 252 p1 = p2
253 253 p2 = parents.pop(0)
254 254 p2ctx = None
255 255 if p2 != nullid:
256 256 p2ctx = self.repo[p2]
257 257 fileset = set(files)
258 258 if full:
259 259 fileset.update(self.repo[p1])
260 260 fileset.update(self.repo[p2])
261 261 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
262 262 getfilectx, commit.author, commit.date, extra)
263 263
264 264 # We won't know if the conversion changes the node until after the
265 265 # commit, so copy the source's phase for now.
266 266 self.repo.ui.setconfig('phases', 'new-commit',
267 267 phases.phasenames[commit.phase], 'convert')
268 268
269 269 tr = self.repo.transaction("convert")
270 270
271 271 try:
272 272 node = hex(self.repo.commitctx(ctx))
273 273
274 274 # If the node value has changed, but the phase is lower than
275 275 # draft, set it back to draft since it hasn't been exposed
276 276 # anywhere.
277 277 if commit.rev != node:
278 278 ctx = self.repo[node]
279 279 if ctx.phase() < phases.draft:
280 280 phases.retractboundary(self.repo, tr, phases.draft,
281 281 [ctx.node()])
282 282 tr.close()
283 283 finally:
284 284 tr.release()
285 285
286 286 text = "(octopus merge fixup)\n"
287 287 p2 = node
288 288
289 289 if self.filemapmode and nparents == 1:
290 290 man = self.repo.manifest
291 291 mnode = self.repo.changelog.read(bin(p2))[0]
292 292 closed = 'close' in commit.extra
293 293 if not closed and not man.cmp(m1node, man.revision(mnode)):
294 294 self.ui.status(_("filtering out empty revision\n"))
295 295 self.repo.rollback(force=True)
296 296 return parent
297 297 return p2
298 298
299 299 def puttags(self, tags):
300 300 try:
301 301 parentctx = self.repo[self.tagsbranch]
302 302 tagparent = parentctx.node()
303 303 except error.RepoError:
304 304 parentctx = None
305 305 tagparent = nullid
306 306
307 307 oldlines = set()
308 308 for branch, heads in self.repo.branchmap().iteritems():
309 309 for h in heads:
310 310 if '.hgtags' in self.repo[h]:
311 311 oldlines.update(
312 312 set(self.repo[h]['.hgtags'].data().splitlines(True)))
313 313 oldlines = sorted(list(oldlines))
314 314
315 315 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
316 316 if newlines == oldlines:
317 317 return None, None
318 318
319 319 # if the old and new tags match, then there is nothing to update
320 320 oldtags = set()
321 321 newtags = set()
322 322 for line in oldlines:
323 323 s = line.strip().split(' ', 1)
324 324 if len(s) != 2:
325 325 continue
326 326 oldtags.add(s[1])
327 327 for line in newlines:
328 328 s = line.strip().split(' ', 1)
329 329 if len(s) != 2:
330 330 continue
331 331 if s[1] not in oldtags:
332 332 newtags.add(s[1].strip())
333 333
334 334 if not newtags:
335 335 return None, None
336 336
337 337 data = "".join(newlines)
338 338 def getfilectx(repo, memctx, f):
339 339 return context.memfilectx(repo, f, data, False, False, None)
340 340
341 341 self.ui.status(_("updating tags\n"))
342 342 date = "%s 0" % int(time.mktime(time.gmtime()))
343 343 extra = {'branch': self.tagsbranch}
344 344 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
345 345 [".hgtags"], getfilectx, "convert-repo", date,
346 346 extra)
347 347 node = self.repo.commitctx(ctx)
348 348 return hex(node), hex(tagparent)
349 349
350 350 def setfilemapmode(self, active):
351 351 self.filemapmode = active
352 352
353 353 def putbookmarks(self, updatedbookmark):
354 354 if not len(updatedbookmark):
355 355 return
356 356
357 357 self.ui.status(_("updating bookmarks\n"))
358 358 destmarks = self.repo._bookmarks
359 359 for bookmark in updatedbookmark:
360 360 destmarks[bookmark] = bin(updatedbookmark[bookmark])
361 361 destmarks.write()
362 362
363 363 def hascommitfrommap(self, rev):
364 364 # the exact semantics of clonebranches is unclear so we can't say no
365 365 return rev in self.repo or self.clonebranches
366 366
367 367 def hascommitforsplicemap(self, rev):
368 368 if rev not in self.repo and self.clonebranches:
369 369 raise util.Abort(_('revision %s not found in destination '
370 370 'repository (lookups with clonebranches=true '
371 371 'are not implemented)') % rev)
372 372 return rev in self.repo
373 373
374 374 class mercurial_source(converter_source):
375 def __init__(self, ui, path, rev=None):
376 converter_source.__init__(self, ui, path, rev)
375 def __init__(self, ui, path, revs=None):
376 converter_source.__init__(self, ui, path, revs)
377 if revs and len(revs) > 1:
378 raise util.Abort(_("mercurial source does not support specifying "
379 "multiple revisions"))
377 380 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
378 381 self.ignored = set()
379 382 self.saverev = ui.configbool('convert', 'hg.saverev', False)
380 383 try:
381 384 self.repo = hg.repository(self.ui, path)
382 385 # try to provoke an exception if this isn't really a hg
383 386 # repo, but some other bogus compatible-looking url
384 387 if not self.repo.local():
385 388 raise error.RepoError
386 389 except error.RepoError:
387 390 ui.traceback()
388 391 raise NoRepo(_("%s is not a local Mercurial repository") % path)
389 392 self.lastrev = None
390 393 self.lastctx = None
391 394 self._changescache = None, None
392 395 self.convertfp = None
393 396 # Restrict converted revisions to startrev descendants
394 397 startnode = ui.config('convert', 'hg.startrev')
395 398 hgrevs = ui.config('convert', 'hg.revs')
396 399 if hgrevs is None:
397 400 if startnode is not None:
398 401 try:
399 402 startnode = self.repo.lookup(startnode)
400 403 except error.RepoError:
401 404 raise util.Abort(_('%s is not a valid start revision')
402 405 % startnode)
403 406 startrev = self.repo.changelog.rev(startnode)
404 407 children = {startnode: 1}
405 408 for r in self.repo.changelog.descendants([startrev]):
406 409 children[self.repo.changelog.node(r)] = 1
407 410 self.keep = children.__contains__
408 411 else:
409 412 self.keep = util.always
410 if rev:
411 self._heads = [self.repo[rev].node()]
413 if revs:
414 self._heads = [self.repo[revs[0]].node()]
412 415 else:
413 416 self._heads = self.repo.heads()
414 417 else:
415 if rev or startnode is not None:
418 if revs or startnode is not None:
416 419 raise util.Abort(_('hg.revs cannot be combined with '
417 420 'hg.startrev or --rev'))
418 421 nodes = set()
419 422 parents = set()
420 423 for r in scmutil.revrange(self.repo, [hgrevs]):
421 424 ctx = self.repo[r]
422 425 nodes.add(ctx.node())
423 426 parents.update(p.node() for p in ctx.parents())
424 427 self.keep = nodes.__contains__
425 428 self._heads = nodes - parents
426 429
427 430 def changectx(self, rev):
428 431 if self.lastrev != rev:
429 432 self.lastctx = self.repo[rev]
430 433 self.lastrev = rev
431 434 return self.lastctx
432 435
433 436 def parents(self, ctx):
434 437 return [p for p in ctx.parents() if p and self.keep(p.node())]
435 438
436 439 def getheads(self):
437 440 return [hex(h) for h in self._heads if self.keep(h)]
438 441
439 442 def getfile(self, name, rev):
440 443 try:
441 444 fctx = self.changectx(rev)[name]
442 445 return fctx.data(), fctx.flags()
443 446 except error.LookupError:
444 447 return None, None
445 448
446 449 def getchanges(self, rev, full):
447 450 ctx = self.changectx(rev)
448 451 parents = self.parents(ctx)
449 452 if full or not parents:
450 453 files = copyfiles = ctx.manifest()
451 454 if parents:
452 455 if self._changescache[0] == rev:
453 456 m, a, r = self._changescache[1]
454 457 else:
455 458 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
456 459 if not full:
457 460 files = m + a + r
458 461 copyfiles = m + a
459 462 # getcopies() is also run for roots and before filtering so missing
460 463 # revlogs are detected early
461 464 copies = self.getcopies(ctx, parents, copyfiles)
462 465 cleanp2 = set()
463 466 if len(parents) == 2:
464 467 cleanp2.update(self.repo.status(parents[1].node(), ctx.node(),
465 468 clean=True).clean)
466 469 changes = [(f, rev) for f in files if f not in self.ignored]
467 470 changes.sort()
468 471 return changes, copies, cleanp2
469 472
470 473 def getcopies(self, ctx, parents, files):
471 474 copies = {}
472 475 for name in files:
473 476 if name in self.ignored:
474 477 continue
475 478 try:
476 479 copysource, _copynode = ctx.filectx(name).renamed()
477 480 if copysource in self.ignored:
478 481 continue
479 482 # Ignore copy sources not in parent revisions
480 483 found = False
481 484 for p in parents:
482 485 if copysource in p:
483 486 found = True
484 487 break
485 488 if not found:
486 489 continue
487 490 copies[name] = copysource
488 491 except TypeError:
489 492 pass
490 493 except error.LookupError as e:
491 494 if not self.ignoreerrors:
492 495 raise
493 496 self.ignored.add(name)
494 497 self.ui.warn(_('ignoring: %s\n') % e)
495 498 return copies
496 499
497 500 def getcommit(self, rev):
498 501 ctx = self.changectx(rev)
499 502 parents = [p.hex() for p in self.parents(ctx)]
500 503 crev = rev
501 504
502 505 return commit(author=ctx.user(),
503 506 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
504 507 desc=ctx.description(), rev=crev, parents=parents,
505 508 branch=ctx.branch(), extra=ctx.extra(),
506 509 sortkey=ctx.rev(), saverev=self.saverev,
507 510 phase=ctx.phase())
508 511
509 512 def gettags(self):
510 513 # This will get written to .hgtags, filter non global tags out.
511 514 tags = [t for t in self.repo.tagslist()
512 515 if self.repo.tagtype(t[0]) == 'global']
513 516 return dict([(name, hex(node)) for name, node in tags
514 517 if self.keep(node)])
515 518
516 519 def getchangedfiles(self, rev, i):
517 520 ctx = self.changectx(rev)
518 521 parents = self.parents(ctx)
519 522 if not parents and i is None:
520 523 i = 0
521 524 changes = [], ctx.manifest().keys(), []
522 525 else:
523 526 i = i or 0
524 527 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
525 528 changes = [[f for f in l if f not in self.ignored] for l in changes]
526 529
527 530 if i == 0:
528 531 self._changescache = (rev, changes)
529 532
530 533 return changes[0] + changes[1] + changes[2]
531 534
532 535 def converted(self, rev, destrev):
533 536 if self.convertfp is None:
534 537 self.convertfp = open(self.repo.join('shamap'), 'a')
535 538 self.convertfp.write('%s %s\n' % (destrev, rev))
536 539 self.convertfp.flush()
537 540
538 541 def before(self):
539 542 self.ui.debug('run hg source pre-conversion action\n')
540 543
541 544 def after(self):
542 545 self.ui.debug('run hg source post-conversion action\n')
543 546
544 547 def hasnativeorder(self):
545 548 return True
546 549
547 550 def hasnativeclose(self):
548 551 return True
549 552
550 553 def lookuprev(self, rev):
551 554 try:
552 555 return hex(self.repo.lookup(rev))
553 556 except (error.RepoError, error.LookupError):
554 557 return None
555 558
556 559 def getbookmarks(self):
557 560 return bookmarks.listbookmarks(self.repo)
558 561
559 562 def checkrevformat(self, revstr, mapname='splicemap'):
560 563 """ Mercurial, revision string is a 40 byte hex """
561 564 self.checkhexformat(revstr, mapname)
@@ -1,361 +1,364 b''
1 1 # monotone.py - monotone support for the convert extension
2 2 #
3 3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 4 # others
5 5 #
6 6 # This software may be used and distributed according to the terms of the
7 7 # GNU General Public License version 2 or any later version.
8 8
9 9 import os, re
10 10 from mercurial import util
11 11 from common import NoRepo, commit, converter_source, checktool
12 12 from common import commandline
13 13 from mercurial.i18n import _
14 14
15 15 class monotone_source(converter_source, commandline):
16 def __init__(self, ui, path=None, rev=None):
17 converter_source.__init__(self, ui, path, rev)
16 def __init__(self, ui, path=None, revs=None):
17 converter_source.__init__(self, ui, path, revs)
18 if revs and len(revs) > 1:
19 raise util.Abort(_('monotone source does not support specifying '
20 'multiple revs'))
18 21 commandline.__init__(self, ui, 'mtn')
19 22
20 23 self.ui = ui
21 24 self.path = path
22 25 self.automatestdio = False
23 self.rev = rev
26 self.revs = revs
24 27
25 28 norepo = NoRepo(_("%s does not look like a monotone repository")
26 29 % path)
27 30 if not os.path.exists(os.path.join(path, '_MTN')):
28 31 # Could be a monotone repository (SQLite db file)
29 32 try:
30 33 f = file(path, 'rb')
31 34 header = f.read(16)
32 35 f.close()
33 36 except IOError:
34 37 header = ''
35 38 if header != 'SQLite format 3\x00':
36 39 raise norepo
37 40
38 41 # regular expressions for parsing monotone output
39 42 space = r'\s*'
40 43 name = r'\s+"((?:\\"|[^"])*)"\s*'
41 44 value = name
42 45 revision = r'\s+\[(\w+)\]\s*'
43 46 lines = r'(?:.|\n)+'
44 47
45 48 self.dir_re = re.compile(space + "dir" + name)
46 49 self.file_re = re.compile(space + "file" + name +
47 50 "content" + revision)
48 51 self.add_file_re = re.compile(space + "add_file" + name +
49 52 "content" + revision)
50 53 self.patch_re = re.compile(space + "patch" + name +
51 54 "from" + revision + "to" + revision)
52 55 self.rename_re = re.compile(space + "rename" + name + "to" + name)
53 56 self.delete_re = re.compile(space + "delete" + name)
54 57 self.tag_re = re.compile(space + "tag" + name + "revision" +
55 58 revision)
56 59 self.cert_re = re.compile(lines + space + "name" + name +
57 60 "value" + value)
58 61
59 62 attr = space + "file" + lines + space + "attr" + space
60 63 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
61 64 space + '"true"')
62 65
63 66 # cached data
64 67 self.manifest_rev = None
65 68 self.manifest = None
66 69 self.files = None
67 70 self.dirs = None
68 71
69 72 checktool('mtn', abort=False)
70 73
71 74 def mtnrun(self, *args, **kwargs):
72 75 if self.automatestdio:
73 76 return self.mtnrunstdio(*args, **kwargs)
74 77 else:
75 78 return self.mtnrunsingle(*args, **kwargs)
76 79
77 80 def mtnrunsingle(self, *args, **kwargs):
78 81 kwargs['d'] = self.path
79 82 return self.run0('automate', *args, **kwargs)
80 83
81 84 def mtnrunstdio(self, *args, **kwargs):
82 85 # Prepare the command in automate stdio format
83 86 command = []
84 87 for k, v in kwargs.iteritems():
85 88 command.append("%s:%s" % (len(k), k))
86 89 if v:
87 90 command.append("%s:%s" % (len(v), v))
88 91 if command:
89 92 command.insert(0, 'o')
90 93 command.append('e')
91 94
92 95 command.append('l')
93 96 for arg in args:
94 97 command += "%s:%s" % (len(arg), arg)
95 98 command.append('e')
96 99 command = ''.join(command)
97 100
98 101 self.ui.debug("mtn: sending '%s'\n" % command)
99 102 self.mtnwritefp.write(command)
100 103 self.mtnwritefp.flush()
101 104
102 105 return self.mtnstdioreadcommandoutput(command)
103 106
104 107 def mtnstdioreadpacket(self):
105 108 read = None
106 109 commandnbr = ''
107 110 while read != ':':
108 111 read = self.mtnreadfp.read(1)
109 112 if not read:
110 113 raise util.Abort(_('bad mtn packet - no end of commandnbr'))
111 114 commandnbr += read
112 115 commandnbr = commandnbr[:-1]
113 116
114 117 stream = self.mtnreadfp.read(1)
115 118 if stream not in 'mewptl':
116 119 raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
117 120
118 121 read = self.mtnreadfp.read(1)
119 122 if read != ':':
120 123 raise util.Abort(_('bad mtn packet - no divider before size'))
121 124
122 125 read = None
123 126 lengthstr = ''
124 127 while read != ':':
125 128 read = self.mtnreadfp.read(1)
126 129 if not read:
127 130 raise util.Abort(_('bad mtn packet - no end of packet size'))
128 131 lengthstr += read
129 132 try:
130 133 length = long(lengthstr[:-1])
131 134 except TypeError:
132 135 raise util.Abort(_('bad mtn packet - bad packet size %s')
133 136 % lengthstr)
134 137
135 138 read = self.mtnreadfp.read(length)
136 139 if len(read) != length:
137 140 raise util.Abort(_("bad mtn packet - unable to read full packet "
138 141 "read %s of %s") % (len(read), length))
139 142
140 143 return (commandnbr, stream, length, read)
141 144
142 145 def mtnstdioreadcommandoutput(self, command):
143 146 retval = []
144 147 while True:
145 148 commandnbr, stream, length, output = self.mtnstdioreadpacket()
146 149 self.ui.debug('mtn: read packet %s:%s:%s\n' %
147 150 (commandnbr, stream, length))
148 151
149 152 if stream == 'l':
150 153 # End of command
151 154 if output != '0':
152 155 raise util.Abort(_("mtn command '%s' returned %s") %
153 156 (command, output))
154 157 break
155 158 elif stream in 'ew':
156 159 # Error, warning output
157 160 self.ui.warn(_('%s error:\n') % self.command)
158 161 self.ui.warn(output)
159 162 elif stream == 'p':
160 163 # Progress messages
161 164 self.ui.debug('mtn: ' + output)
162 165 elif stream == 'm':
163 166 # Main stream - command output
164 167 retval.append(output)
165 168
166 169 return ''.join(retval)
167 170
168 171 def mtnloadmanifest(self, rev):
169 172 if self.manifest_rev == rev:
170 173 return
171 174 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
172 175 self.manifest_rev = rev
173 176 self.files = {}
174 177 self.dirs = {}
175 178
176 179 for e in self.manifest:
177 180 m = self.file_re.match(e)
178 181 if m:
179 182 attr = ""
180 183 name = m.group(1)
181 184 node = m.group(2)
182 185 if self.attr_execute_re.match(e):
183 186 attr += "x"
184 187 self.files[name] = (node, attr)
185 188 m = self.dir_re.match(e)
186 189 if m:
187 190 self.dirs[m.group(1)] = True
188 191
189 192 def mtnisfile(self, name, rev):
190 193 # a non-file could be a directory or a deleted or renamed file
191 194 self.mtnloadmanifest(rev)
192 195 return name in self.files
193 196
194 197 def mtnisdir(self, name, rev):
195 198 self.mtnloadmanifest(rev)
196 199 return name in self.dirs
197 200
198 201 def mtngetcerts(self, rev):
199 202 certs = {"author":"<missing>", "date":"<missing>",
200 203 "changelog":"<missing>", "branch":"<missing>"}
201 204 certlist = self.mtnrun("certs", rev)
202 205 # mtn < 0.45:
203 206 # key "test@selenic.com"
204 207 # mtn >= 0.45:
205 208 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
206 209 certlist = re.split('\n\n key ["\[]', certlist)
207 210 for e in certlist:
208 211 m = self.cert_re.match(e)
209 212 if m:
210 213 name, value = m.groups()
211 214 value = value.replace(r'\"', '"')
212 215 value = value.replace(r'\\', '\\')
213 216 certs[name] = value
214 217 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
215 218 # and all times are stored in UTC
216 219 certs["date"] = certs["date"].split('.')[0] + " UTC"
217 220 return certs
218 221
219 222 # implement the converter_source interface:
220 223
221 224 def getheads(self):
222 if not self.rev:
225 if not self.revs:
223 226 return self.mtnrun("leaves").splitlines()
224 227 else:
225 return [self.rev]
228 return self.revs
226 229
227 230 def getchanges(self, rev, full):
228 231 if full:
229 232 raise util.Abort(_("convert from monotone do not support --full"))
230 233 revision = self.mtnrun("get_revision", rev).split("\n\n")
231 234 files = {}
232 235 ignoremove = {}
233 236 renameddirs = []
234 237 copies = {}
235 238 for e in revision:
236 239 m = self.add_file_re.match(e)
237 240 if m:
238 241 files[m.group(1)] = rev
239 242 ignoremove[m.group(1)] = rev
240 243 m = self.patch_re.match(e)
241 244 if m:
242 245 files[m.group(1)] = rev
243 246 # Delete/rename is handled later when the convert engine
244 247 # discovers an IOError exception from getfile,
245 248 # but only if we add the "from" file to the list of changes.
246 249 m = self.delete_re.match(e)
247 250 if m:
248 251 files[m.group(1)] = rev
249 252 m = self.rename_re.match(e)
250 253 if m:
251 254 toname = m.group(2)
252 255 fromname = m.group(1)
253 256 if self.mtnisfile(toname, rev):
254 257 ignoremove[toname] = 1
255 258 copies[toname] = fromname
256 259 files[toname] = rev
257 260 files[fromname] = rev
258 261 elif self.mtnisdir(toname, rev):
259 262 renameddirs.append((fromname, toname))
260 263
261 264 # Directory renames can be handled only once we have recorded
262 265 # all new files
263 266 for fromdir, todir in renameddirs:
264 267 renamed = {}
265 268 for tofile in self.files:
266 269 if tofile in ignoremove:
267 270 continue
268 271 if tofile.startswith(todir + '/'):
269 272 renamed[tofile] = fromdir + tofile[len(todir):]
270 273 # Avoid chained moves like:
271 274 # d1(/a) => d3/d1(/a)
272 275 # d2 => d3
273 276 ignoremove[tofile] = 1
274 277 for tofile, fromfile in renamed.items():
275 278 self.ui.debug (_("copying file in renamed directory "
276 279 "from '%s' to '%s'")
277 280 % (fromfile, tofile), '\n')
278 281 files[tofile] = rev
279 282 copies[tofile] = fromfile
280 283 for fromfile in renamed.values():
281 284 files[fromfile] = rev
282 285
283 286 return (files.items(), copies, set())
284 287
285 288 def getfile(self, name, rev):
286 289 if not self.mtnisfile(name, rev):
287 290 return None, None
288 291 try:
289 292 data = self.mtnrun("get_file_of", name, r=rev)
290 293 except Exception:
291 294 return None, None
292 295 self.mtnloadmanifest(rev)
293 296 node, attr = self.files.get(name, (None, ""))
294 297 return data, attr
295 298
296 299 def getcommit(self, rev):
297 300 extra = {}
298 301 certs = self.mtngetcerts(rev)
299 302 if certs.get('suspend') == certs["branch"]:
300 303 extra['close'] = 1
301 304 return commit(
302 305 author=certs["author"],
303 306 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
304 307 desc=certs["changelog"],
305 308 rev=rev,
306 309 parents=self.mtnrun("parents", rev).splitlines(),
307 310 branch=certs["branch"],
308 311 extra=extra)
309 312
310 313 def gettags(self):
311 314 tags = {}
312 315 for e in self.mtnrun("tags").split("\n\n"):
313 316 m = self.tag_re.match(e)
314 317 if m:
315 318 tags[m.group(1)] = m.group(2)
316 319 return tags
317 320
318 321 def getchangedfiles(self, rev, i):
319 322 # This function is only needed to support --filemap
320 323 # ... and we don't support that
321 324 raise NotImplementedError
322 325
323 326 def before(self):
324 327 # Check if we have a new enough version to use automate stdio
325 328 version = 0.0
326 329 try:
327 330 versionstr = self.mtnrunsingle("interface_version")
328 331 version = float(versionstr)
329 332 except Exception:
330 333 raise util.Abort(_("unable to determine mtn automate interface "
331 334 "version"))
332 335
333 336 if version >= 12.0:
334 337 self.automatestdio = True
335 338 self.ui.debug("mtn automate version %s - using automate stdio\n" %
336 339 version)
337 340
338 341 # launch the long-running automate stdio process
339 342 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
340 343 '-d', self.path)
341 344 # read the headers
342 345 read = self.mtnreadfp.readline()
343 346 if read != 'format-version: 2\n':
344 347 raise util.Abort(_('mtn automate stdio header unexpected: %s')
345 348 % read)
346 349 while read != '\n':
347 350 read = self.mtnreadfp.readline()
348 351 if not read:
349 352 raise util.Abort(_("failed to reach end of mtn automate "
350 353 "stdio headers"))
351 354 else:
352 355 self.ui.debug("mtn automate version %s - not using automate stdio "
353 356 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
354 357
355 358 def after(self):
356 359 if self.automatestdio:
357 360 self.mtnwritefp.close()
358 361 self.mtnwritefp = None
359 362 self.mtnreadfp.close()
360 363 self.mtnreadfp = None
361 364
@@ -1,207 +1,210 b''
1 1 # Perforce source for convert extension.
2 2 #
3 3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 4 #
5 5 # This software may be used and distributed according to the terms of the
6 6 # GNU General Public License version 2 or any later version.
7 7
8 8 from mercurial import util
9 9 from mercurial.i18n import _
10 10
11 11 from common import commit, converter_source, checktool, NoRepo
12 12 import marshal
13 13 import re
14 14
15 15 def loaditer(f):
16 16 "Yield the dictionary objects generated by p4"
17 17 try:
18 18 while True:
19 19 d = marshal.load(f)
20 20 if not d:
21 21 break
22 22 yield d
23 23 except EOFError:
24 24 pass
25 25
26 26 class p4_source(converter_source):
27 def __init__(self, ui, path, rev=None):
28 super(p4_source, self).__init__(ui, path, rev=rev)
27 def __init__(self, ui, path, revs=None):
28 super(p4_source, self).__init__(ui, path, revs=revs)
29 29
30 30 if "/" in path and not path.startswith('//'):
31 31 raise NoRepo(_('%s does not look like a P4 repository') % path)
32 32
33 33 checktool('p4', abort=False)
34 34
35 35 self.p4changes = {}
36 36 self.heads = {}
37 37 self.changeset = {}
38 38 self.files = {}
39 39 self.tags = {}
40 40 self.lastbranch = {}
41 41 self.parent = {}
42 42 self.encoding = "latin_1"
43 43 self.depotname = {} # mapping from local name to depot name
44 44 self.re_type = re.compile(
45 45 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
46 46 "(\+\w+)?$")
47 47 self.re_keywords = re.compile(
48 48 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
49 49 r":[^$\n]*\$")
50 50 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
51 51
52 if revs and len(revs) > 1:
53 raise util.Abort(_("p4 source does not support specifying "
54 "multiple revisions"))
52 55 self._parse(ui, path)
53 56
54 57 def _parse_view(self, path):
55 58 "Read changes affecting the path"
56 59 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
57 60 stdout = util.popen(cmd, mode='rb')
58 61 for d in loaditer(stdout):
59 62 c = d.get("change", None)
60 63 if c:
61 64 self.p4changes[c] = True
62 65
63 66 def _parse(self, ui, path):
64 67 "Prepare list of P4 filenames and revisions to import"
65 68 ui.status(_('reading p4 views\n'))
66 69
67 70 # read client spec or view
68 71 if "/" in path:
69 72 self._parse_view(path)
70 73 if path.startswith("//") and path.endswith("/..."):
71 74 views = {path[:-3]:""}
72 75 else:
73 76 views = {"//": ""}
74 77 else:
75 78 cmd = 'p4 -G client -o %s' % util.shellquote(path)
76 79 clientspec = marshal.load(util.popen(cmd, mode='rb'))
77 80
78 81 views = {}
79 82 for client in clientspec:
80 83 if client.startswith("View"):
81 84 sview, cview = clientspec[client].split()
82 85 self._parse_view(sview)
83 86 if sview.endswith("...") and cview.endswith("..."):
84 87 sview = sview[:-3]
85 88 cview = cview[:-3]
86 89 cview = cview[2:]
87 90 cview = cview[cview.find("/") + 1:]
88 91 views[sview] = cview
89 92
90 93 # list of changes that affect our source files
91 94 self.p4changes = self.p4changes.keys()
92 95 self.p4changes.sort(key=int)
93 96
94 97 # list with depot pathnames, longest first
95 98 vieworder = views.keys()
96 99 vieworder.sort(key=len, reverse=True)
97 100
98 101 # handle revision limiting
99 102 startrev = self.ui.config('convert', 'p4.startrev', default=0)
100 103 self.p4changes = [x for x in self.p4changes
101 104 if ((not startrev or int(x) >= int(startrev)) and
102 (not self.rev or int(x) <= int(self.rev)))]
105 (not self.revs or int(x) <= int(self.revs[0])))]
103 106
104 107 # now read the full changelists to get the list of file revisions
105 108 ui.status(_('collecting p4 changelists\n'))
106 109 lastid = None
107 110 for change in self.p4changes:
108 111 cmd = "p4 -G describe -s %s" % change
109 112 stdout = util.popen(cmd, mode='rb')
110 113 d = marshal.load(stdout)
111 114 desc = self.recode(d.get("desc", ""))
112 115 shortdesc = desc.split("\n", 1)[0]
113 116 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
114 117 ui.status(util.ellipsis(t, 80) + '\n')
115 118
116 119 if lastid:
117 120 parents = [lastid]
118 121 else:
119 122 parents = []
120 123
121 124 date = (int(d["time"]), 0) # timezone not set
122 125 c = commit(author=self.recode(d["user"]),
123 126 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
124 127 parents=parents, desc=desc, branch='',
125 128 extra={"p4": change})
126 129
127 130 files = []
128 131 i = 0
129 132 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
130 133 oldname = d["depotFile%d" % i]
131 134 filename = None
132 135 for v in vieworder:
133 136 if oldname.startswith(v):
134 137 filename = views[v] + oldname[len(v):]
135 138 break
136 139 if filename:
137 140 files.append((filename, d["rev%d" % i]))
138 141 self.depotname[filename] = oldname
139 142 i += 1
140 143 self.changeset[change] = c
141 144 self.files[change] = files
142 145 lastid = change
143 146
144 147 if lastid:
145 148 self.heads = [lastid]
146 149
147 150 def getheads(self):
148 151 return self.heads
149 152
150 153 def getfile(self, name, rev):
151 154 cmd = 'p4 -G print %s' \
152 155 % util.shellquote("%s#%s" % (self.depotname[name], rev))
153 156 stdout = util.popen(cmd, mode='rb')
154 157
155 158 mode = None
156 159 contents = ""
157 160 keywords = None
158 161
159 162 for d in loaditer(stdout):
160 163 code = d["code"]
161 164 data = d.get("data")
162 165
163 166 if code == "error":
164 167 raise IOError(d["generic"], data)
165 168
166 169 elif code == "stat":
167 170 if d.get("action") == "purge":
168 171 return None, None
169 172 p4type = self.re_type.match(d["type"])
170 173 if p4type:
171 174 mode = ""
172 175 flags = (p4type.group(1) or "") + (p4type.group(3) or "")
173 176 if "x" in flags:
174 177 mode = "x"
175 178 if p4type.group(2) == "symlink":
176 179 mode = "l"
177 180 if "ko" in flags:
178 181 keywords = self.re_keywords_old
179 182 elif "k" in flags:
180 183 keywords = self.re_keywords
181 184
182 185 elif code == "text" or code == "binary":
183 186 contents += data
184 187
185 188 if mode is None:
186 189 return None, None
187 190
188 191 if keywords:
189 192 contents = keywords.sub("$\\1$", contents)
190 193 if mode == "l" and contents.endswith("\n"):
191 194 contents = contents[:-1]
192 195
193 196 return contents, mode
194 197
195 198 def getchanges(self, rev, full):
196 199 if full:
197 200 raise util.Abort(_("convert from p4 do not support --full"))
198 201 return self.files[rev], {}, set()
199 202
200 203 def getcommit(self, rev):
201 204 return self.changeset[rev]
202 205
203 206 def gettags(self):
204 207 return self.tags
205 208
206 209 def getchangedfiles(self, rev, i):
207 210 return sorted([x[0] for x in self.files[rev]])
@@ -1,1330 +1,1334 b''
1 1 # Subversion 1.4/1.5 Python API backend
2 2 #
3 3 # Copyright(C) 2007 Daniel Holth et al
4 4
5 5 import os, re, sys, tempfile, urllib, urllib2
6 6 import xml.dom.minidom
7 7 import cPickle as pickle
8 8
9 9 from mercurial import strutil, scmutil, util, encoding
10 10 from mercurial.i18n import _
11 11
12 12 propertycache = util.propertycache
13 13
14 14 # Subversion stuff. Works best with very recent Python SVN bindings
15 15 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
16 16 # these bindings.
17 17
18 18 from cStringIO import StringIO
19 19
20 20 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
21 21 from common import commandline, converter_source, converter_sink, mapfile
22 22 from common import makedatetimestamp
23 23
24 24 try:
25 25 from svn.core import SubversionException, Pool
26 26 import svn
27 27 import svn.client
28 28 import svn.core
29 29 import svn.ra
30 30 import svn.delta
31 31 import transport
32 32 import warnings
33 33 warnings.filterwarnings('ignore',
34 34 module='svn.core',
35 35 category=DeprecationWarning)
36 36
37 37 except ImportError:
38 38 svn = None
39 39
40 40 class SvnPathNotFound(Exception):
41 41 pass
42 42
43 43 def revsplit(rev):
44 44 """Parse a revision string and return (uuid, path, revnum).
45 45 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
46 46 ... '/proj%20B/mytrunk/mytrunk@1')
47 47 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
48 48 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
49 49 ('', '', 1)
50 50 >>> revsplit('@7')
51 51 ('', '', 7)
52 52 >>> revsplit('7')
53 53 ('', '', 0)
54 54 >>> revsplit('bad')
55 55 ('', '', 0)
56 56 """
57 57 parts = rev.rsplit('@', 1)
58 58 revnum = 0
59 59 if len(parts) > 1:
60 60 revnum = int(parts[1])
61 61 parts = parts[0].split('/', 1)
62 62 uuid = ''
63 63 mod = ''
64 64 if len(parts) > 1 and parts[0].startswith('svn:'):
65 65 uuid = parts[0][4:]
66 66 mod = '/' + parts[1]
67 67 return uuid, mod, revnum
68 68
69 69 def quote(s):
70 70 # As of svn 1.7, many svn calls expect "canonical" paths. In
71 71 # theory, we should call svn.core.*canonicalize() on all paths
72 72 # before passing them to the API. Instead, we assume the base url
73 73 # is canonical and copy the behaviour of svn URL encoding function
74 74 # so we can extend it safely with new components. The "safe"
75 75 # characters were taken from the "svn_uri__char_validity" table in
76 76 # libsvn_subr/path.c.
77 77 return urllib.quote(s, "!$&'()*+,-./:=@_~")
78 78
79 79 def geturl(path):
80 80 try:
81 81 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
82 82 except SubversionException:
83 83 # svn.client.url_from_path() fails with local repositories
84 84 pass
85 85 if os.path.isdir(path):
86 86 path = os.path.normpath(os.path.abspath(path))
87 87 if os.name == 'nt':
88 88 path = '/' + util.normpath(path)
89 89 # Module URL is later compared with the repository URL returned
90 90 # by svn API, which is UTF-8.
91 91 path = encoding.tolocal(path)
92 92 path = 'file://%s' % quote(path)
93 93 return svn.core.svn_path_canonicalize(path)
94 94
95 95 def optrev(number):
96 96 optrev = svn.core.svn_opt_revision_t()
97 97 optrev.kind = svn.core.svn_opt_revision_number
98 98 optrev.value.number = number
99 99 return optrev
100 100
101 101 class changedpath(object):
102 102 def __init__(self, p):
103 103 self.copyfrom_path = p.copyfrom_path
104 104 self.copyfrom_rev = p.copyfrom_rev
105 105 self.action = p.action
106 106
107 107 def get_log_child(fp, url, paths, start, end, limit=0,
108 108 discover_changed_paths=True, strict_node_history=False):
109 109 protocol = -1
110 110 def receiver(orig_paths, revnum, author, date, message, pool):
111 111 paths = {}
112 112 if orig_paths is not None:
113 113 for k, v in orig_paths.iteritems():
114 114 paths[k] = changedpath(v)
115 115 pickle.dump((paths, revnum, author, date, message),
116 116 fp, protocol)
117 117
118 118 try:
119 119 # Use an ra of our own so that our parent can consume
120 120 # our results without confusing the server.
121 121 t = transport.SvnRaTransport(url=url)
122 122 svn.ra.get_log(t.ra, paths, start, end, limit,
123 123 discover_changed_paths,
124 124 strict_node_history,
125 125 receiver)
126 126 except IOError:
127 127 # Caller may interrupt the iteration
128 128 pickle.dump(None, fp, protocol)
129 129 except Exception as inst:
130 130 pickle.dump(str(inst), fp, protocol)
131 131 else:
132 132 pickle.dump(None, fp, protocol)
133 133 fp.close()
134 134 # With large history, cleanup process goes crazy and suddenly
135 135 # consumes *huge* amount of memory. The output file being closed,
136 136 # there is no need for clean termination.
137 137 os._exit(0)
138 138
139 139 def debugsvnlog(ui, **opts):
140 140 """Fetch SVN log in a subprocess and channel them back to parent to
141 141 avoid memory collection issues.
142 142 """
143 143 if svn is None:
144 144 raise util.Abort(_('debugsvnlog could not load Subversion python '
145 145 'bindings'))
146 146
147 147 util.setbinary(sys.stdin)
148 148 util.setbinary(sys.stdout)
149 149 args = decodeargs(sys.stdin.read())
150 150 get_log_child(sys.stdout, *args)
151 151
152 152 class logstream(object):
153 153 """Interruptible revision log iterator."""
154 154 def __init__(self, stdout):
155 155 self._stdout = stdout
156 156
157 157 def __iter__(self):
158 158 while True:
159 159 try:
160 160 entry = pickle.load(self._stdout)
161 161 except EOFError:
162 162 raise util.Abort(_('Mercurial failed to run itself, check'
163 163 ' hg executable is in PATH'))
164 164 try:
165 165 orig_paths, revnum, author, date, message = entry
166 166 except (TypeError, ValueError):
167 167 if entry is None:
168 168 break
169 169 raise util.Abort(_("log stream exception '%s'") % entry)
170 170 yield entry
171 171
172 172 def close(self):
173 173 if self._stdout:
174 174 self._stdout.close()
175 175 self._stdout = None
176 176
177 177 class directlogstream(list):
178 178 """Direct revision log iterator.
179 179 This can be used for debugging and development but it will probably leak
180 180 memory and is not suitable for real conversions."""
181 181 def __init__(self, url, paths, start, end, limit=0,
182 182 discover_changed_paths=True, strict_node_history=False):
183 183
184 184 def receiver(orig_paths, revnum, author, date, message, pool):
185 185 paths = {}
186 186 if orig_paths is not None:
187 187 for k, v in orig_paths.iteritems():
188 188 paths[k] = changedpath(v)
189 189 self.append((paths, revnum, author, date, message))
190 190
191 191 # Use an ra of our own so that our parent can consume
192 192 # our results without confusing the server.
193 193 t = transport.SvnRaTransport(url=url)
194 194 svn.ra.get_log(t.ra, paths, start, end, limit,
195 195 discover_changed_paths,
196 196 strict_node_history,
197 197 receiver)
198 198
199 199 def close(self):
200 200 pass
201 201
202 202 # Check to see if the given path is a local Subversion repo. Verify this by
203 203 # looking for several svn-specific files and directories in the given
204 204 # directory.
205 205 def filecheck(ui, path, proto):
206 206 for x in ('locks', 'hooks', 'format', 'db'):
207 207 if not os.path.exists(os.path.join(path, x)):
208 208 return False
209 209 return True
210 210
211 211 # Check to see if a given path is the root of an svn repo over http. We verify
212 212 # this by requesting a version-controlled URL we know can't exist and looking
213 213 # for the svn-specific "not found" XML.
214 214 def httpcheck(ui, path, proto):
215 215 try:
216 216 opener = urllib2.build_opener()
217 217 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
218 218 data = rsp.read()
219 219 except urllib2.HTTPError as inst:
220 220 if inst.code != 404:
221 221 # Except for 404 we cannot know for sure this is not an svn repo
222 222 ui.warn(_('svn: cannot probe remote repository, assume it could '
223 223 'be a subversion repository. Use --source-type if you '
224 224 'know better.\n'))
225 225 return True
226 226 data = inst.fp.read()
227 227 except Exception:
228 228 # Could be urllib2.URLError if the URL is invalid or anything else.
229 229 return False
230 230 return '<m:human-readable errcode="160013">' in data
231 231
232 232 protomap = {'http': httpcheck,
233 233 'https': httpcheck,
234 234 'file': filecheck,
235 235 }
236 236 def issvnurl(ui, url):
237 237 try:
238 238 proto, path = url.split('://', 1)
239 239 if proto == 'file':
240 240 if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha()
241 241 and path[2:6].lower() == '%3a/'):
242 242 path = path[:2] + ':/' + path[6:]
243 243 path = urllib.url2pathname(path)
244 244 except ValueError:
245 245 proto = 'file'
246 246 path = os.path.abspath(url)
247 247 if proto == 'file':
248 248 path = util.pconvert(path)
249 249 check = protomap.get(proto, lambda *args: False)
250 250 while '/' in path:
251 251 if check(ui, path, proto):
252 252 return True
253 253 path = path.rsplit('/', 1)[0]
254 254 return False
255 255
256 256 # SVN conversion code stolen from bzr-svn and tailor
257 257 #
258 258 # Subversion looks like a versioned filesystem, branches structures
259 259 # are defined by conventions and not enforced by the tool. First,
260 260 # we define the potential branches (modules) as "trunk" and "branches"
261 261 # children directories. Revisions are then identified by their
262 262 # module and revision number (and a repository identifier).
263 263 #
264 264 # The revision graph is really a tree (or a forest). By default, a
265 265 # revision parent is the previous revision in the same module. If the
266 266 # module directory is copied/moved from another module then the
267 267 # revision is the module root and its parent the source revision in
268 268 # the parent module. A revision has at most one parent.
269 269 #
270 270 class svn_source(converter_source):
271 def __init__(self, ui, url, rev=None):
272 super(svn_source, self).__init__(ui, url, rev=rev)
271 def __init__(self, ui, url, revs=None):
272 super(svn_source, self).__init__(ui, url, revs=revs)
273 273
274 274 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
275 275 (os.path.exists(url) and
276 276 os.path.exists(os.path.join(url, '.svn'))) or
277 277 issvnurl(ui, url)):
278 278 raise NoRepo(_("%s does not look like a Subversion repository")
279 279 % url)
280 280 if svn is None:
281 281 raise MissingTool(_('could not load Subversion python bindings'))
282 282
283 283 try:
284 284 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
285 285 if version < (1, 4):
286 286 raise MissingTool(_('Subversion python bindings %d.%d found, '
287 287 '1.4 or later required') % version)
288 288 except AttributeError:
289 289 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
290 290 'or later required'))
291 291
292 292 self.lastrevs = {}
293 293
294 294 latest = None
295 295 try:
296 296 # Support file://path@rev syntax. Useful e.g. to convert
297 297 # deleted branches.
298 298 at = url.rfind('@')
299 299 if at >= 0:
300 300 latest = int(url[at + 1:])
301 301 url = url[:at]
302 302 except ValueError:
303 303 pass
304 304 self.url = geturl(url)
305 305 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
306 306 try:
307 307 self.transport = transport.SvnRaTransport(url=self.url)
308 308 self.ra = self.transport.ra
309 309 self.ctx = self.transport.client
310 310 self.baseurl = svn.ra.get_repos_root(self.ra)
311 311 # Module is either empty or a repository path starting with
312 312 # a slash and not ending with a slash.
313 313 self.module = urllib.unquote(self.url[len(self.baseurl):])
314 314 self.prevmodule = None
315 315 self.rootmodule = self.module
316 316 self.commits = {}
317 317 self.paths = {}
318 318 self.uuid = svn.ra.get_uuid(self.ra)
319 319 except SubversionException:
320 320 ui.traceback()
321 321 svnversion = '%d.%d.%d' % (svn.core.SVN_VER_MAJOR,
322 322 svn.core.SVN_VER_MINOR,
323 323 svn.core.SVN_VER_MICRO)
324 324 raise NoRepo(_("%s does not look like a Subversion repository "
325 325 "to libsvn version %s")
326 326 % (self.url, svnversion))
327 327
328 if rev:
328 if revs:
329 if len(revs) > 1:
330 raise util.Abort(_('subversion source does not support '
331 'specifying multiple revisions'))
329 332 try:
330 latest = int(rev)
333 latest = int(revs[0])
331 334 except ValueError:
332 raise util.Abort(_('svn: revision %s is not an integer') % rev)
335 raise util.Abort(_('svn: revision %s is not an integer') %
336 revs[0])
333 337
334 338 self.trunkname = self.ui.config('convert', 'svn.trunk',
335 339 'trunk').strip('/')
336 340 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
337 341 try:
338 342 self.startrev = int(self.startrev)
339 343 if self.startrev < 0:
340 344 self.startrev = 0
341 345 except ValueError:
342 346 raise util.Abort(_('svn: start revision %s is not an integer')
343 347 % self.startrev)
344 348
345 349 try:
346 350 self.head = self.latest(self.module, latest)
347 351 except SvnPathNotFound:
348 352 self.head = None
349 353 if not self.head:
350 354 raise util.Abort(_('no revision found in module %s')
351 355 % self.module)
352 356 self.last_changed = self.revnum(self.head)
353 357
354 358 self._changescache = (None, None)
355 359
356 360 if os.path.exists(os.path.join(url, '.svn/entries')):
357 361 self.wc = url
358 362 else:
359 363 self.wc = None
360 364 self.convertfp = None
361 365
362 366 def setrevmap(self, revmap):
363 367 lastrevs = {}
364 368 for revid in revmap.iterkeys():
365 369 uuid, module, revnum = revsplit(revid)
366 370 lastrevnum = lastrevs.setdefault(module, revnum)
367 371 if revnum > lastrevnum:
368 372 lastrevs[module] = revnum
369 373 self.lastrevs = lastrevs
370 374
371 375 def exists(self, path, optrev):
372 376 try:
373 377 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
374 378 optrev, False, self.ctx)
375 379 return True
376 380 except SubversionException:
377 381 return False
378 382
379 383 def getheads(self):
380 384
381 385 def isdir(path, revnum):
382 386 kind = self._checkpath(path, revnum)
383 387 return kind == svn.core.svn_node_dir
384 388
385 389 def getcfgpath(name, rev):
386 390 cfgpath = self.ui.config('convert', 'svn.' + name)
387 391 if cfgpath is not None and cfgpath.strip() == '':
388 392 return None
389 393 path = (cfgpath or name).strip('/')
390 394 if not self.exists(path, rev):
391 395 if self.module.endswith(path) and name == 'trunk':
392 396 # we are converting from inside this directory
393 397 return None
394 398 if cfgpath:
395 399 raise util.Abort(_('expected %s to be at %r, but not found')
396 400 % (name, path))
397 401 return None
398 402 self.ui.note(_('found %s at %r\n') % (name, path))
399 403 return path
400 404
401 405 rev = optrev(self.last_changed)
402 406 oldmodule = ''
403 407 trunk = getcfgpath('trunk', rev)
404 408 self.tags = getcfgpath('tags', rev)
405 409 branches = getcfgpath('branches', rev)
406 410
407 411 # If the project has a trunk or branches, we will extract heads
408 412 # from them. We keep the project root otherwise.
409 413 if trunk:
410 414 oldmodule = self.module or ''
411 415 self.module += '/' + trunk
412 416 self.head = self.latest(self.module, self.last_changed)
413 417 if not self.head:
414 418 raise util.Abort(_('no revision found in module %s')
415 419 % self.module)
416 420
417 421 # First head in the list is the module's head
418 422 self.heads = [self.head]
419 423 if self.tags is not None:
420 424 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
421 425
422 426 # Check if branches bring a few more heads to the list
423 427 if branches:
424 428 rpath = self.url.strip('/')
425 429 branchnames = svn.client.ls(rpath + '/' + quote(branches),
426 430 rev, False, self.ctx)
427 431 for branch in sorted(branchnames):
428 432 module = '%s/%s/%s' % (oldmodule, branches, branch)
429 433 if not isdir(module, self.last_changed):
430 434 continue
431 435 brevid = self.latest(module, self.last_changed)
432 436 if not brevid:
433 437 self.ui.note(_('ignoring empty branch %s\n') % branch)
434 438 continue
435 439 self.ui.note(_('found branch %s at %d\n') %
436 440 (branch, self.revnum(brevid)))
437 441 self.heads.append(brevid)
438 442
439 443 if self.startrev and self.heads:
440 444 if len(self.heads) > 1:
441 445 raise util.Abort(_('svn: start revision is not supported '
442 446 'with more than one branch'))
443 447 revnum = self.revnum(self.heads[0])
444 448 if revnum < self.startrev:
445 449 raise util.Abort(
446 450 _('svn: no revision found after start revision %d')
447 451 % self.startrev)
448 452
449 453 return self.heads
450 454
451 455 def _getchanges(self, rev, full):
452 456 (paths, parents) = self.paths[rev]
453 457 copies = {}
454 458 if parents:
455 459 files, self.removed, copies = self.expandpaths(rev, paths, parents)
456 460 if full or not parents:
457 461 # Perform a full checkout on roots
458 462 uuid, module, revnum = revsplit(rev)
459 463 entries = svn.client.ls(self.baseurl + quote(module),
460 464 optrev(revnum), True, self.ctx)
461 465 files = [n for n, e in entries.iteritems()
462 466 if e.kind == svn.core.svn_node_file]
463 467 self.removed = set()
464 468
465 469 files.sort()
466 470 files = zip(files, [rev] * len(files))
467 471 return (files, copies)
468 472
469 473 def getchanges(self, rev, full):
470 474 # reuse cache from getchangedfiles
471 475 if self._changescache[0] == rev and not full:
472 476 (files, copies) = self._changescache[1]
473 477 else:
474 478 (files, copies) = self._getchanges(rev, full)
475 479 # caller caches the result, so free it here to release memory
476 480 del self.paths[rev]
477 481 return (files, copies, set())
478 482
479 483 def getchangedfiles(self, rev, i):
480 484 # called from filemap - cache computed values for reuse in getchanges
481 485 (files, copies) = self._getchanges(rev, False)
482 486 self._changescache = (rev, (files, copies))
483 487 return [f[0] for f in files]
484 488
485 489 def getcommit(self, rev):
486 490 if rev not in self.commits:
487 491 uuid, module, revnum = revsplit(rev)
488 492 self.module = module
489 493 self.reparent(module)
490 494 # We assume that:
491 495 # - requests for revisions after "stop" come from the
492 496 # revision graph backward traversal. Cache all of them
493 497 # down to stop, they will be used eventually.
494 498 # - requests for revisions before "stop" come to get
495 499 # isolated branches parents. Just fetch what is needed.
496 500 stop = self.lastrevs.get(module, 0)
497 501 if revnum < stop:
498 502 stop = revnum + 1
499 503 self._fetch_revisions(revnum, stop)
500 504 if rev not in self.commits:
501 505 raise util.Abort(_('svn: revision %s not found') % revnum)
502 506 revcommit = self.commits[rev]
503 507 # caller caches the result, so free it here to release memory
504 508 del self.commits[rev]
505 509 return revcommit
506 510
507 511 def checkrevformat(self, revstr, mapname='splicemap'):
508 512 """ fails if revision format does not match the correct format"""
509 513 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
510 514 '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
511 515 '{12,12}(.*)\@[0-9]+$',revstr):
512 516 raise util.Abort(_('%s entry %s is not a valid revision'
513 517 ' identifier') % (mapname, revstr))
514 518
515 519 def numcommits(self):
516 520 return int(self.head.rsplit('@', 1)[1]) - self.startrev
517 521
518 522 def gettags(self):
519 523 tags = {}
520 524 if self.tags is None:
521 525 return tags
522 526
523 527 # svn tags are just a convention, project branches left in a
524 528 # 'tags' directory. There is no other relationship than
525 529 # ancestry, which is expensive to discover and makes them hard
526 530 # to update incrementally. Worse, past revisions may be
527 531 # referenced by tags far away in the future, requiring a deep
528 532 # history traversal on every calculation. Current code
529 533 # performs a single backward traversal, tracking moves within
530 534 # the tags directory (tag renaming) and recording a new tag
531 535 # everytime a project is copied from outside the tags
532 536 # directory. It also lists deleted tags, this behaviour may
533 537 # change in the future.
534 538 pendings = []
535 539 tagspath = self.tags
536 540 start = svn.ra.get_latest_revnum(self.ra)
537 541 stream = self._getlog([self.tags], start, self.startrev)
538 542 try:
539 543 for entry in stream:
540 544 origpaths, revnum, author, date, message = entry
541 545 if not origpaths:
542 546 origpaths = []
543 547 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
544 548 in origpaths.iteritems() if e.copyfrom_path]
545 549 # Apply moves/copies from more specific to general
546 550 copies.sort(reverse=True)
547 551
548 552 srctagspath = tagspath
549 553 if copies and copies[-1][2] == tagspath:
550 554 # Track tags directory moves
551 555 srctagspath = copies.pop()[0]
552 556
553 557 for source, sourcerev, dest in copies:
554 558 if not dest.startswith(tagspath + '/'):
555 559 continue
556 560 for tag in pendings:
557 561 if tag[0].startswith(dest):
558 562 tagpath = source + tag[0][len(dest):]
559 563 tag[:2] = [tagpath, sourcerev]
560 564 break
561 565 else:
562 566 pendings.append([source, sourcerev, dest])
563 567
564 568 # Filter out tags with children coming from different
565 569 # parts of the repository like:
566 570 # /tags/tag.1 (from /trunk:10)
567 571 # /tags/tag.1/foo (from /branches/foo:12)
568 572 # Here/tags/tag.1 discarded as well as its children.
569 573 # It happens with tools like cvs2svn. Such tags cannot
570 574 # be represented in mercurial.
571 575 addeds = dict((p, e.copyfrom_path) for p, e
572 576 in origpaths.iteritems()
573 577 if e.action == 'A' and e.copyfrom_path)
574 578 badroots = set()
575 579 for destroot in addeds:
576 580 for source, sourcerev, dest in pendings:
577 581 if (not dest.startswith(destroot + '/')
578 582 or source.startswith(addeds[destroot] + '/')):
579 583 continue
580 584 badroots.add(destroot)
581 585 break
582 586
583 587 for badroot in badroots:
584 588 pendings = [p for p in pendings if p[2] != badroot
585 589 and not p[2].startswith(badroot + '/')]
586 590
587 591 # Tell tag renamings from tag creations
588 592 renamings = []
589 593 for source, sourcerev, dest in pendings:
590 594 tagname = dest.split('/')[-1]
591 595 if source.startswith(srctagspath):
592 596 renamings.append([source, sourcerev, tagname])
593 597 continue
594 598 if tagname in tags:
595 599 # Keep the latest tag value
596 600 continue
597 601 # From revision may be fake, get one with changes
598 602 try:
599 603 tagid = self.latest(source, sourcerev)
600 604 if tagid and tagname not in tags:
601 605 tags[tagname] = tagid
602 606 except SvnPathNotFound:
603 607 # It happens when we are following directories
604 608 # we assumed were copied with their parents
605 609 # but were really created in the tag
606 610 # directory.
607 611 pass
608 612 pendings = renamings
609 613 tagspath = srctagspath
610 614 finally:
611 615 stream.close()
612 616 return tags
613 617
614 618 def converted(self, rev, destrev):
615 619 if not self.wc:
616 620 return
617 621 if self.convertfp is None:
618 622 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
619 623 'a')
620 624 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
621 625 self.convertfp.flush()
622 626
623 627 def revid(self, revnum, module=None):
624 628 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
625 629
626 630 def revnum(self, rev):
627 631 return int(rev.split('@')[-1])
628 632
629 633 def latest(self, path, stop=None):
630 634 """Find the latest revid affecting path, up to stop revision
631 635 number. If stop is None, default to repository latest
632 636 revision. It may return a revision in a different module,
633 637 since a branch may be moved without a change being
634 638 reported. Return None if computed module does not belong to
635 639 rootmodule subtree.
636 640 """
637 641 def findchanges(path, start, stop=None):
638 642 stream = self._getlog([path], start, stop or 1)
639 643 try:
640 644 for entry in stream:
641 645 paths, revnum, author, date, message = entry
642 646 if stop is None and paths:
643 647 # We do not know the latest changed revision,
644 648 # keep the first one with changed paths.
645 649 break
646 650 if revnum <= stop:
647 651 break
648 652
649 653 for p in paths:
650 654 if (not path.startswith(p) or
651 655 not paths[p].copyfrom_path):
652 656 continue
653 657 newpath = paths[p].copyfrom_path + path[len(p):]
654 658 self.ui.debug("branch renamed from %s to %s at %d\n" %
655 659 (path, newpath, revnum))
656 660 path = newpath
657 661 break
658 662 if not paths:
659 663 revnum = None
660 664 return revnum, path
661 665 finally:
662 666 stream.close()
663 667
664 668 if not path.startswith(self.rootmodule):
665 669 # Requests on foreign branches may be forbidden at server level
666 670 self.ui.debug('ignoring foreign branch %r\n' % path)
667 671 return None
668 672
669 673 if stop is None:
670 674 stop = svn.ra.get_latest_revnum(self.ra)
671 675 try:
672 676 prevmodule = self.reparent('')
673 677 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
674 678 self.reparent(prevmodule)
675 679 except SubversionException:
676 680 dirent = None
677 681 if not dirent:
678 682 raise SvnPathNotFound(_('%s not found up to revision %d')
679 683 % (path, stop))
680 684
681 685 # stat() gives us the previous revision on this line of
682 686 # development, but it might be in *another module*. Fetch the
683 687 # log and detect renames down to the latest revision.
684 688 revnum, realpath = findchanges(path, stop, dirent.created_rev)
685 689 if revnum is None:
686 690 # Tools like svnsync can create empty revision, when
687 691 # synchronizing only a subtree for instance. These empty
688 692 # revisions created_rev still have their original values
689 693 # despite all changes having disappeared and can be
690 694 # returned by ra.stat(), at least when stating the root
691 695 # module. In that case, do not trust created_rev and scan
692 696 # the whole history.
693 697 revnum, realpath = findchanges(path, stop)
694 698 if revnum is None:
695 699 self.ui.debug('ignoring empty branch %r\n' % realpath)
696 700 return None
697 701
698 702 if not realpath.startswith(self.rootmodule):
699 703 self.ui.debug('ignoring foreign branch %r\n' % realpath)
700 704 return None
701 705 return self.revid(revnum, realpath)
702 706
703 707 def reparent(self, module):
704 708 """Reparent the svn transport and return the previous parent."""
705 709 if self.prevmodule == module:
706 710 return module
707 711 svnurl = self.baseurl + quote(module)
708 712 prevmodule = self.prevmodule
709 713 if prevmodule is None:
710 714 prevmodule = ''
711 715 self.ui.debug("reparent to %s\n" % svnurl)
712 716 svn.ra.reparent(self.ra, svnurl)
713 717 self.prevmodule = module
714 718 return prevmodule
715 719
716 720 def expandpaths(self, rev, paths, parents):
717 721 changed, removed = set(), set()
718 722 copies = {}
719 723
720 724 new_module, revnum = revsplit(rev)[1:]
721 725 if new_module != self.module:
722 726 self.module = new_module
723 727 self.reparent(self.module)
724 728
725 729 for i, (path, ent) in enumerate(paths):
726 730 self.ui.progress(_('scanning paths'), i, item=path,
727 731 total=len(paths))
728 732 entrypath = self.getrelpath(path)
729 733
730 734 kind = self._checkpath(entrypath, revnum)
731 735 if kind == svn.core.svn_node_file:
732 736 changed.add(self.recode(entrypath))
733 737 if not ent.copyfrom_path or not parents:
734 738 continue
735 739 # Copy sources not in parent revisions cannot be
736 740 # represented, ignore their origin for now
737 741 pmodule, prevnum = revsplit(parents[0])[1:]
738 742 if ent.copyfrom_rev < prevnum:
739 743 continue
740 744 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
741 745 if not copyfrom_path:
742 746 continue
743 747 self.ui.debug("copied to %s from %s@%s\n" %
744 748 (entrypath, copyfrom_path, ent.copyfrom_rev))
745 749 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
746 750 elif kind == 0: # gone, but had better be a deleted *file*
747 751 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
748 752 pmodule, prevnum = revsplit(parents[0])[1:]
749 753 parentpath = pmodule + "/" + entrypath
750 754 fromkind = self._checkpath(entrypath, prevnum, pmodule)
751 755
752 756 if fromkind == svn.core.svn_node_file:
753 757 removed.add(self.recode(entrypath))
754 758 elif fromkind == svn.core.svn_node_dir:
755 759 oroot = parentpath.strip('/')
756 760 nroot = path.strip('/')
757 761 children = self._iterfiles(oroot, prevnum)
758 762 for childpath in children:
759 763 childpath = childpath.replace(oroot, nroot)
760 764 childpath = self.getrelpath("/" + childpath, pmodule)
761 765 if childpath:
762 766 removed.add(self.recode(childpath))
763 767 else:
764 768 self.ui.debug('unknown path in revision %d: %s\n' % \
765 769 (revnum, path))
766 770 elif kind == svn.core.svn_node_dir:
767 771 if ent.action == 'M':
768 772 # If the directory just had a prop change,
769 773 # then we shouldn't need to look for its children.
770 774 continue
771 775 if ent.action == 'R' and parents:
772 776 # If a directory is replacing a file, mark the previous
773 777 # file as deleted
774 778 pmodule, prevnum = revsplit(parents[0])[1:]
775 779 pkind = self._checkpath(entrypath, prevnum, pmodule)
776 780 if pkind == svn.core.svn_node_file:
777 781 removed.add(self.recode(entrypath))
778 782 elif pkind == svn.core.svn_node_dir:
779 783 # We do not know what files were kept or removed,
780 784 # mark them all as changed.
781 785 for childpath in self._iterfiles(pmodule, prevnum):
782 786 childpath = self.getrelpath("/" + childpath)
783 787 if childpath:
784 788 changed.add(self.recode(childpath))
785 789
786 790 for childpath in self._iterfiles(path, revnum):
787 791 childpath = self.getrelpath("/" + childpath)
788 792 if childpath:
789 793 changed.add(self.recode(childpath))
790 794
791 795 # Handle directory copies
792 796 if not ent.copyfrom_path or not parents:
793 797 continue
794 798 # Copy sources not in parent revisions cannot be
795 799 # represented, ignore their origin for now
796 800 pmodule, prevnum = revsplit(parents[0])[1:]
797 801 if ent.copyfrom_rev < prevnum:
798 802 continue
799 803 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
800 804 if not copyfrompath:
801 805 continue
802 806 self.ui.debug("mark %s came from %s:%d\n"
803 807 % (path, copyfrompath, ent.copyfrom_rev))
804 808 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
805 809 for childpath in children:
806 810 childpath = self.getrelpath("/" + childpath, pmodule)
807 811 if not childpath:
808 812 continue
809 813 copytopath = path + childpath[len(copyfrompath):]
810 814 copytopath = self.getrelpath(copytopath)
811 815 copies[self.recode(copytopath)] = self.recode(childpath)
812 816
813 817 self.ui.progress(_('scanning paths'), None)
814 818 changed.update(removed)
815 819 return (list(changed), removed, copies)
816 820
817 821 def _fetch_revisions(self, from_revnum, to_revnum):
818 822 if from_revnum < to_revnum:
819 823 from_revnum, to_revnum = to_revnum, from_revnum
820 824
821 825 self.child_cset = None
822 826
823 827 def parselogentry(orig_paths, revnum, author, date, message):
824 828 """Return the parsed commit object or None, and True if
825 829 the revision is a branch root.
826 830 """
827 831 self.ui.debug("parsing revision %d (%d changes)\n" %
828 832 (revnum, len(orig_paths)))
829 833
830 834 branched = False
831 835 rev = self.revid(revnum)
832 836 # branch log might return entries for a parent we already have
833 837
834 838 if rev in self.commits or revnum < to_revnum:
835 839 return None, branched
836 840
837 841 parents = []
838 842 # check whether this revision is the start of a branch or part
839 843 # of a branch renaming
840 844 orig_paths = sorted(orig_paths.iteritems())
841 845 root_paths = [(p, e) for p, e in orig_paths
842 846 if self.module.startswith(p)]
843 847 if root_paths:
844 848 path, ent = root_paths[-1]
845 849 if ent.copyfrom_path:
846 850 branched = True
847 851 newpath = ent.copyfrom_path + self.module[len(path):]
848 852 # ent.copyfrom_rev may not be the actual last revision
849 853 previd = self.latest(newpath, ent.copyfrom_rev)
850 854 if previd is not None:
851 855 prevmodule, prevnum = revsplit(previd)[1:]
852 856 if prevnum >= self.startrev:
853 857 parents = [previd]
854 858 self.ui.note(
855 859 _('found parent of branch %s at %d: %s\n') %
856 860 (self.module, prevnum, prevmodule))
857 861 else:
858 862 self.ui.debug("no copyfrom path, don't know what to do.\n")
859 863
860 864 paths = []
861 865 # filter out unrelated paths
862 866 for path, ent in orig_paths:
863 867 if self.getrelpath(path) is None:
864 868 continue
865 869 paths.append((path, ent))
866 870
867 871 # Example SVN datetime. Includes microseconds.
868 872 # ISO-8601 conformant
869 873 # '2007-01-04T17:35:00.902377Z'
870 874 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
871 875 if self.ui.configbool('convert', 'localtimezone'):
872 876 date = makedatetimestamp(date[0])
873 877
874 878 if message:
875 879 log = self.recode(message)
876 880 else:
877 881 log = ''
878 882
879 883 if author:
880 884 author = self.recode(author)
881 885 else:
882 886 author = ''
883 887
884 888 try:
885 889 branch = self.module.split("/")[-1]
886 890 if branch == self.trunkname:
887 891 branch = None
888 892 except IndexError:
889 893 branch = None
890 894
891 895 cset = commit(author=author,
892 896 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
893 897 desc=log,
894 898 parents=parents,
895 899 branch=branch,
896 900 rev=rev)
897 901
898 902 self.commits[rev] = cset
899 903 # The parents list is *shared* among self.paths and the
900 904 # commit object. Both will be updated below.
901 905 self.paths[rev] = (paths, cset.parents)
902 906 if self.child_cset and not self.child_cset.parents:
903 907 self.child_cset.parents[:] = [rev]
904 908 self.child_cset = cset
905 909 return cset, branched
906 910
907 911 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
908 912 (self.module, from_revnum, to_revnum))
909 913
910 914 try:
911 915 firstcset = None
912 916 lastonbranch = False
913 917 stream = self._getlog([self.module], from_revnum, to_revnum)
914 918 try:
915 919 for entry in stream:
916 920 paths, revnum, author, date, message = entry
917 921 if revnum < self.startrev:
918 922 lastonbranch = True
919 923 break
920 924 if not paths:
921 925 self.ui.debug('revision %d has no entries\n' % revnum)
922 926 # If we ever leave the loop on an empty
923 927 # revision, do not try to get a parent branch
924 928 lastonbranch = lastonbranch or revnum == 0
925 929 continue
926 930 cset, lastonbranch = parselogentry(paths, revnum, author,
927 931 date, message)
928 932 if cset:
929 933 firstcset = cset
930 934 if lastonbranch:
931 935 break
932 936 finally:
933 937 stream.close()
934 938
935 939 if not lastonbranch and firstcset and not firstcset.parents:
936 940 # The first revision of the sequence (the last fetched one)
937 941 # has invalid parents if not a branch root. Find the parent
938 942 # revision now, if any.
939 943 try:
940 944 firstrevnum = self.revnum(firstcset.rev)
941 945 if firstrevnum > 1:
942 946 latest = self.latest(self.module, firstrevnum - 1)
943 947 if latest:
944 948 firstcset.parents.append(latest)
945 949 except SvnPathNotFound:
946 950 pass
947 951 except SubversionException as xxx_todo_changeme:
948 952 (inst, num) = xxx_todo_changeme.args
949 953 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
950 954 raise util.Abort(_('svn: branch has no revision %s')
951 955 % to_revnum)
952 956 raise
953 957
954 958 def getfile(self, file, rev):
955 959 # TODO: ra.get_file transmits the whole file instead of diffs.
956 960 if file in self.removed:
957 961 return None, None
958 962 mode = ''
959 963 try:
960 964 new_module, revnum = revsplit(rev)[1:]
961 965 if self.module != new_module:
962 966 self.module = new_module
963 967 self.reparent(self.module)
964 968 io = StringIO()
965 969 info = svn.ra.get_file(self.ra, file, revnum, io)
966 970 data = io.getvalue()
967 971 # ra.get_file() seems to keep a reference on the input buffer
968 972 # preventing collection. Release it explicitly.
969 973 io.close()
970 974 if isinstance(info, list):
971 975 info = info[-1]
972 976 mode = ("svn:executable" in info) and 'x' or ''
973 977 mode = ("svn:special" in info) and 'l' or mode
974 978 except SubversionException as e:
975 979 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
976 980 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
977 981 if e.apr_err in notfound: # File not found
978 982 return None, None
979 983 raise
980 984 if mode == 'l':
981 985 link_prefix = "link "
982 986 if data.startswith(link_prefix):
983 987 data = data[len(link_prefix):]
984 988 return data, mode
985 989
986 990 def _iterfiles(self, path, revnum):
987 991 """Enumerate all files in path at revnum, recursively."""
988 992 path = path.strip('/')
989 993 pool = Pool()
990 994 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
991 995 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
992 996 if path:
993 997 path += '/'
994 998 return ((path + p) for p, e in entries.iteritems()
995 999 if e.kind == svn.core.svn_node_file)
996 1000
997 1001 def getrelpath(self, path, module=None):
998 1002 if module is None:
999 1003 module = self.module
1000 1004 # Given the repository url of this wc, say
1001 1005 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
1002 1006 # extract the "entry" portion (a relative path) from what
1003 1007 # svn log --xml says, i.e.
1004 1008 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
1005 1009 # that is to say "tests/PloneTestCase.py"
1006 1010 if path.startswith(module):
1007 1011 relative = path.rstrip('/')[len(module):]
1008 1012 if relative.startswith('/'):
1009 1013 return relative[1:]
1010 1014 elif relative == '':
1011 1015 return relative
1012 1016
1013 1017 # The path is outside our tracked tree...
1014 1018 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
1015 1019 return None
1016 1020
1017 1021 def _checkpath(self, path, revnum, module=None):
1018 1022 if module is not None:
1019 1023 prevmodule = self.reparent('')
1020 1024 path = module + '/' + path
1021 1025 try:
1022 1026 # ra.check_path does not like leading slashes very much, it leads
1023 1027 # to PROPFIND subversion errors
1024 1028 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1025 1029 finally:
1026 1030 if module is not None:
1027 1031 self.reparent(prevmodule)
1028 1032
1029 1033 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1030 1034 strict_node_history=False):
1031 1035 # Normalize path names, svn >= 1.5 only wants paths relative to
1032 1036 # supplied URL
1033 1037 relpaths = []
1034 1038 for p in paths:
1035 1039 if not p.startswith('/'):
1036 1040 p = self.module + '/' + p
1037 1041 relpaths.append(p.strip('/'))
1038 1042 args = [self.baseurl, relpaths, start, end, limit,
1039 1043 discover_changed_paths, strict_node_history]
1040 1044 # undocumented feature: debugsvnlog can be disabled
1041 1045 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1042 1046 return directlogstream(*args)
1043 1047 arg = encodeargs(args)
1044 1048 hgexe = util.hgexecutable()
1045 1049 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1046 1050 stdin, stdout = util.popen2(util.quotecommand(cmd))
1047 1051 stdin.write(arg)
1048 1052 try:
1049 1053 stdin.close()
1050 1054 except IOError:
1051 1055 raise util.Abort(_('Mercurial failed to run itself, check'
1052 1056 ' hg executable is in PATH'))
1053 1057 return logstream(stdout)
1054 1058
1055 1059 pre_revprop_change = '''#!/bin/sh
1056 1060
1057 1061 REPOS="$1"
1058 1062 REV="$2"
1059 1063 USER="$3"
1060 1064 PROPNAME="$4"
1061 1065 ACTION="$5"
1062 1066
1063 1067 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1064 1068 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1065 1069 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1066 1070
1067 1071 echo "Changing prohibited revision property" >&2
1068 1072 exit 1
1069 1073 '''
1070 1074
1071 1075 class svn_sink(converter_sink, commandline):
1072 1076 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1073 1077 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1074 1078
1075 1079 def prerun(self):
1076 1080 if self.wc:
1077 1081 os.chdir(self.wc)
1078 1082
1079 1083 def postrun(self):
1080 1084 if self.wc:
1081 1085 os.chdir(self.cwd)
1082 1086
1083 1087 def join(self, name):
1084 1088 return os.path.join(self.wc, '.svn', name)
1085 1089
1086 1090 def revmapfile(self):
1087 1091 return self.join('hg-shamap')
1088 1092
1089 1093 def authorfile(self):
1090 1094 return self.join('hg-authormap')
1091 1095
1092 1096 def __init__(self, ui, path):
1093 1097
1094 1098 converter_sink.__init__(self, ui, path)
1095 1099 commandline.__init__(self, ui, 'svn')
1096 1100 self.delete = []
1097 1101 self.setexec = []
1098 1102 self.delexec = []
1099 1103 self.copies = []
1100 1104 self.wc = None
1101 1105 self.cwd = os.getcwd()
1102 1106
1103 1107 created = False
1104 1108 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1105 1109 self.wc = os.path.realpath(path)
1106 1110 self.run0('update')
1107 1111 else:
1108 1112 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1109 1113 path = os.path.realpath(path)
1110 1114 if os.path.isdir(os.path.dirname(path)):
1111 1115 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1112 1116 ui.status(_('initializing svn repository %r\n') %
1113 1117 os.path.basename(path))
1114 1118 commandline(ui, 'svnadmin').run0('create', path)
1115 1119 created = path
1116 1120 path = util.normpath(path)
1117 1121 if not path.startswith('/'):
1118 1122 path = '/' + path
1119 1123 path = 'file://' + path
1120 1124
1121 1125 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1122 1126 ui.status(_('initializing svn working copy %r\n')
1123 1127 % os.path.basename(wcpath))
1124 1128 self.run0('checkout', path, wcpath)
1125 1129
1126 1130 self.wc = wcpath
1127 1131 self.opener = scmutil.opener(self.wc)
1128 1132 self.wopener = scmutil.opener(self.wc)
1129 1133 self.childmap = mapfile(ui, self.join('hg-childmap'))
1130 1134 if util.checkexec(self.wc):
1131 1135 self.is_exec = util.isexec
1132 1136 else:
1133 1137 self.is_exec = None
1134 1138
1135 1139 if created:
1136 1140 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1137 1141 fp = open(hook, 'w')
1138 1142 fp.write(pre_revprop_change)
1139 1143 fp.close()
1140 1144 util.setflags(hook, False, True)
1141 1145
1142 1146 output = self.run0('info')
1143 1147 self.uuid = self.uuid_re.search(output).group(1).strip()
1144 1148
1145 1149 def wjoin(self, *names):
1146 1150 return os.path.join(self.wc, *names)
1147 1151
1148 1152 @propertycache
1149 1153 def manifest(self):
1150 1154 # As of svn 1.7, the "add" command fails when receiving
1151 1155 # already tracked entries, so we have to track and filter them
1152 1156 # ourselves.
1153 1157 m = set()
1154 1158 output = self.run0('ls', recursive=True, xml=True)
1155 1159 doc = xml.dom.minidom.parseString(output)
1156 1160 for e in doc.getElementsByTagName('entry'):
1157 1161 for n in e.childNodes:
1158 1162 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1159 1163 continue
1160 1164 name = ''.join(c.data for c in n.childNodes
1161 1165 if c.nodeType == c.TEXT_NODE)
1162 1166 # Entries are compared with names coming from
1163 1167 # mercurial, so bytes with undefined encoding. Our
1164 1168 # best bet is to assume they are in local
1165 1169 # encoding. They will be passed to command line calls
1166 1170 # later anyway, so they better be.
1167 1171 m.add(encoding.tolocal(name.encode('utf-8')))
1168 1172 break
1169 1173 return m
1170 1174
1171 1175 def putfile(self, filename, flags, data):
1172 1176 if 'l' in flags:
1173 1177 self.wopener.symlink(data, filename)
1174 1178 else:
1175 1179 try:
1176 1180 if os.path.islink(self.wjoin(filename)):
1177 1181 os.unlink(filename)
1178 1182 except OSError:
1179 1183 pass
1180 1184 self.wopener.write(filename, data)
1181 1185
1182 1186 if self.is_exec:
1183 1187 if self.is_exec(self.wjoin(filename)):
1184 1188 if 'x' not in flags:
1185 1189 self.delexec.append(filename)
1186 1190 else:
1187 1191 if 'x' in flags:
1188 1192 self.setexec.append(filename)
1189 1193 util.setflags(self.wjoin(filename), False, 'x' in flags)
1190 1194
1191 1195 def _copyfile(self, source, dest):
1192 1196 # SVN's copy command pukes if the destination file exists, but
1193 1197 # our copyfile method expects to record a copy that has
1194 1198 # already occurred. Cross the semantic gap.
1195 1199 wdest = self.wjoin(dest)
1196 1200 exists = os.path.lexists(wdest)
1197 1201 if exists:
1198 1202 fd, tempname = tempfile.mkstemp(
1199 1203 prefix='hg-copy-', dir=os.path.dirname(wdest))
1200 1204 os.close(fd)
1201 1205 os.unlink(tempname)
1202 1206 os.rename(wdest, tempname)
1203 1207 try:
1204 1208 self.run0('copy', source, dest)
1205 1209 finally:
1206 1210 self.manifest.add(dest)
1207 1211 if exists:
1208 1212 try:
1209 1213 os.unlink(wdest)
1210 1214 except OSError:
1211 1215 pass
1212 1216 os.rename(tempname, wdest)
1213 1217
1214 1218 def dirs_of(self, files):
1215 1219 dirs = set()
1216 1220 for f in files:
1217 1221 if os.path.isdir(self.wjoin(f)):
1218 1222 dirs.add(f)
1219 1223 for i in strutil.rfindall(f, '/'):
1220 1224 dirs.add(f[:i])
1221 1225 return dirs
1222 1226
1223 1227 def add_dirs(self, files):
1224 1228 add_dirs = [d for d in sorted(self.dirs_of(files))
1225 1229 if d not in self.manifest]
1226 1230 if add_dirs:
1227 1231 self.manifest.update(add_dirs)
1228 1232 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1229 1233 return add_dirs
1230 1234
1231 1235 def add_files(self, files):
1232 1236 files = [f for f in files if f not in self.manifest]
1233 1237 if files:
1234 1238 self.manifest.update(files)
1235 1239 self.xargs(files, 'add', quiet=True)
1236 1240 return files
1237 1241
1238 1242 def addchild(self, parent, child):
1239 1243 self.childmap[parent] = child
1240 1244
1241 1245 def revid(self, rev):
1242 1246 return u"svn:%s@%s" % (self.uuid, rev)
1243 1247
1244 1248 def putcommit(self, files, copies, parents, commit, source, revmap, full,
1245 1249 cleanp2):
1246 1250 for parent in parents:
1247 1251 try:
1248 1252 return self.revid(self.childmap[parent])
1249 1253 except KeyError:
1250 1254 pass
1251 1255
1252 1256 # Apply changes to working copy
1253 1257 for f, v in files:
1254 1258 data, mode = source.getfile(f, v)
1255 1259 if data is None:
1256 1260 self.delete.append(f)
1257 1261 else:
1258 1262 self.putfile(f, mode, data)
1259 1263 if f in copies:
1260 1264 self.copies.append([copies[f], f])
1261 1265 if full:
1262 1266 self.delete.extend(sorted(self.manifest.difference(files)))
1263 1267 files = [f[0] for f in files]
1264 1268
1265 1269 entries = set(self.delete)
1266 1270 files = frozenset(files)
1267 1271 entries.update(self.add_dirs(files.difference(entries)))
1268 1272 if self.copies:
1269 1273 for s, d in self.copies:
1270 1274 self._copyfile(s, d)
1271 1275 self.copies = []
1272 1276 if self.delete:
1273 1277 self.xargs(self.delete, 'delete')
1274 1278 for f in self.delete:
1275 1279 self.manifest.remove(f)
1276 1280 self.delete = []
1277 1281 entries.update(self.add_files(files.difference(entries)))
1278 1282 if self.delexec:
1279 1283 self.xargs(self.delexec, 'propdel', 'svn:executable')
1280 1284 self.delexec = []
1281 1285 if self.setexec:
1282 1286 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1283 1287 self.setexec = []
1284 1288
1285 1289 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1286 1290 fp = os.fdopen(fd, 'w')
1287 1291 fp.write(commit.desc)
1288 1292 fp.close()
1289 1293 try:
1290 1294 output = self.run0('commit',
1291 1295 username=util.shortuser(commit.author),
1292 1296 file=messagefile,
1293 1297 encoding='utf-8')
1294 1298 try:
1295 1299 rev = self.commit_re.search(output).group(1)
1296 1300 except AttributeError:
1297 1301 if parents and not files:
1298 1302 return parents[0]
1299 1303 self.ui.warn(_('unexpected svn output:\n'))
1300 1304 self.ui.warn(output)
1301 1305 raise util.Abort(_('unable to cope with svn output'))
1302 1306 if commit.rev:
1303 1307 self.run('propset', 'hg:convert-rev', commit.rev,
1304 1308 revprop=True, revision=rev)
1305 1309 if commit.branch and commit.branch != 'default':
1306 1310 self.run('propset', 'hg:convert-branch', commit.branch,
1307 1311 revprop=True, revision=rev)
1308 1312 for parent in parents:
1309 1313 self.addchild(parent, rev)
1310 1314 return self.revid(rev)
1311 1315 finally:
1312 1316 os.unlink(messagefile)
1313 1317
1314 1318 def puttags(self, tags):
1315 1319 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1316 1320 return None, None
1317 1321
1318 1322 def hascommitfrommap(self, rev):
1319 1323 # We trust that revisions referenced in a map still is present
1320 1324 # TODO: implement something better if necessary and feasible
1321 1325 return True
1322 1326
1323 1327 def hascommitforsplicemap(self, rev):
1324 1328 # This is not correct as one can convert to an existing subversion
1325 1329 # repository and childmap would not list all revisions. Too bad.
1326 1330 if rev in self.childmap:
1327 1331 return True
1328 1332 raise util.Abort(_('splice map revision %s not found in subversion '
1329 1333 'child map (revision lookups are not implemented)')
1330 1334 % rev)
@@ -1,505 +1,505 b''
1 1 $ cat >> $HGRCPATH <<EOF
2 2 > [extensions]
3 3 > convert=
4 4 > [convert]
5 5 > hg.saverev=False
6 6 > EOF
7 7 $ hg help convert
8 8 hg convert [OPTION]... SOURCE [DEST [REVMAP]]
9 9
10 10 convert a foreign SCM repository to a Mercurial one.
11 11
12 12 Accepted source formats [identifiers]:
13 13
14 14 - Mercurial [hg]
15 15 - CVS [cvs]
16 16 - Darcs [darcs]
17 17 - git [git]
18 18 - Subversion [svn]
19 19 - Monotone [mtn]
20 20 - GNU Arch [gnuarch]
21 21 - Bazaar [bzr]
22 22 - Perforce [p4]
23 23
24 24 Accepted destination formats [identifiers]:
25 25
26 26 - Mercurial [hg]
27 27 - Subversion [svn] (history on branches is not preserved)
28 28
29 29 If no revision is given, all revisions will be converted. Otherwise,
30 30 convert will only import up to the named revision (given in a format
31 31 understood by the source).
32 32
33 33 If no destination directory name is specified, it defaults to the basename
34 34 of the source with "-hg" appended. If the destination repository doesn't
35 35 exist, it will be created.
36 36
37 37 By default, all sources except Mercurial will use --branchsort. Mercurial
38 38 uses --sourcesort to preserve original revision numbers order. Sort modes
39 39 have the following effects:
40 40
41 41 --branchsort convert from parent to child revision when possible, which
42 42 means branches are usually converted one after the other.
43 43 It generates more compact repositories.
44 44 --datesort sort revisions by date. Converted repositories have good-
45 45 looking changelogs but are often an order of magnitude
46 46 larger than the same ones generated by --branchsort.
47 47 --sourcesort try to preserve source revisions order, only supported by
48 48 Mercurial sources.
49 49 --closesort try to move closed revisions as close as possible to parent
50 50 branches, only supported by Mercurial sources.
51 51
52 52 If "REVMAP" isn't given, it will be put in a default location
53 53 ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that
54 54 maps each source commit ID to the destination ID for that revision, like
55 55 so:
56 56
57 57 <source ID> <destination ID>
58 58
59 59 If the file doesn't exist, it's automatically created. It's updated on
60 60 each commit copied, so "hg convert" can be interrupted and can be run
61 61 repeatedly to copy new commits.
62 62
63 63 The authormap is a simple text file that maps each source commit author to
64 64 a destination commit author. It is handy for source SCMs that use unix
65 65 logins to identify authors (e.g.: CVS). One line per author mapping and
66 66 the line format is:
67 67
68 68 source author = destination author
69 69
70 70 Empty lines and lines starting with a "#" are ignored.
71 71
72 72 The filemap is a file that allows filtering and remapping of files and
73 73 directories. Each line can contain one of the following directives:
74 74
75 75 include path/to/file-or-dir
76 76
77 77 exclude path/to/file-or-dir
78 78
79 79 rename path/to/source path/to/destination
80 80
81 81 Comment lines start with "#". A specified path matches if it equals the
82 82 full relative name of a file or one of its parent directories. The
83 83 "include" or "exclude" directive with the longest matching path applies,
84 84 so line order does not matter.
85 85
86 86 The "include" directive causes a file, or all files under a directory, to
87 87 be included in the destination repository. The default if there are no
88 88 "include" statements is to include everything. If there are any "include"
89 89 statements, nothing else is included. The "exclude" directive causes files
90 90 or directories to be omitted. The "rename" directive renames a file or
91 91 directory if it is converted. To rename from a subdirectory into the root
92 92 of the repository, use "." as the path to rename to.
93 93
94 94 "--full" will make sure the converted changesets contain exactly the right
95 95 files with the right content. It will make a full conversion of all files,
96 96 not just the ones that have changed. Files that already are correct will
97 97 not be changed. This can be used to apply filemap changes when converting
98 98 incrementally. This is currently only supported for Mercurial and
99 99 Subversion.
100 100
101 101 The splicemap is a file that allows insertion of synthetic history,
102 102 letting you specify the parents of a revision. This is useful if you want
103 103 to e.g. give a Subversion merge two parents, or graft two disconnected
104 104 series of history together. Each entry contains a key, followed by a
105 105 space, followed by one or two comma-separated values:
106 106
107 107 key parent1, parent2
108 108
109 109 The key is the revision ID in the source revision control system whose
110 110 parents should be modified (same format as a key in .hg/shamap). The
111 111 values are the revision IDs (in either the source or destination revision
112 112 control system) that should be used as the new parents for that node. For
113 113 example, if you have merged "release-1.0" into "trunk", then you should
114 114 specify the revision on "trunk" as the first parent and the one on the
115 115 "release-1.0" branch as the second.
116 116
117 117 The branchmap is a file that allows you to rename a branch when it is
118 118 being brought in from whatever external repository. When used in
119 119 conjunction with a splicemap, it allows for a powerful combination to help
120 120 fix even the most badly mismanaged repositories and turn them into nicely
121 121 structured Mercurial repositories. The branchmap contains lines of the
122 122 form:
123 123
124 124 original_branch_name new_branch_name
125 125
126 126 where "original_branch_name" is the name of the branch in the source
127 127 repository, and "new_branch_name" is the name of the branch is the
128 128 destination repository. No whitespace is allowed in the branch names. This
129 129 can be used to (for instance) move code in one repository from "default"
130 130 to a named branch.
131 131
132 132 Mercurial Source
133 133 ################
134 134
135 135 The Mercurial source recognizes the following configuration options, which
136 136 you can set on the command line with "--config":
137 137
138 138 convert.hg.ignoreerrors
139 139 ignore integrity errors when reading. Use it to fix
140 140 Mercurial repositories with missing revlogs, by converting
141 141 from and to Mercurial. Default is False.
142 142 convert.hg.saverev
143 143 store original revision ID in changeset (forces target IDs
144 144 to change). It takes a boolean argument and defaults to
145 145 False.
146 146 convert.hg.revs
147 147 revset specifying the source revisions to convert.
148 148
149 149 CVS Source
150 150 ##########
151 151
152 152 CVS source will use a sandbox (i.e. a checked-out copy) from CVS to
153 153 indicate the starting point of what will be converted. Direct access to
154 154 the repository files is not needed, unless of course the repository is
155 155 ":local:". The conversion uses the top level directory in the sandbox to
156 156 find the CVS repository, and then uses CVS rlog commands to find files to
157 157 convert. This means that unless a filemap is given, all files under the
158 158 starting directory will be converted, and that any directory
159 159 reorganization in the CVS sandbox is ignored.
160 160
161 161 The following options can be used with "--config":
162 162
163 163 convert.cvsps.cache
164 164 Set to False to disable remote log caching, for testing and
165 165 debugging purposes. Default is True.
166 166 convert.cvsps.fuzz
167 167 Specify the maximum time (in seconds) that is allowed
168 168 between commits with identical user and log message in a
169 169 single changeset. When very large files were checked in as
170 170 part of a changeset then the default may not be long enough.
171 171 The default is 60.
172 172 convert.cvsps.mergeto
173 173 Specify a regular expression to which commit log messages
174 174 are matched. If a match occurs, then the conversion process
175 175 will insert a dummy revision merging the branch on which
176 176 this log message occurs to the branch indicated in the
177 177 regex. Default is "{{mergetobranch ([-\w]+)}}"
178 178 convert.cvsps.mergefrom
179 179 Specify a regular expression to which commit log messages
180 180 are matched. If a match occurs, then the conversion process
181 181 will add the most recent revision on the branch indicated in
182 182 the regex as the second parent of the changeset. Default is
183 183 "{{mergefrombranch ([-\w]+)}}"
184 184 convert.localtimezone
185 185 use local time (as determined by the TZ environment
186 186 variable) for changeset date/times. The default is False
187 187 (use UTC).
188 188 hooks.cvslog Specify a Python function to be called at the end of
189 189 gathering the CVS log. The function is passed a list with
190 190 the log entries, and can modify the entries in-place, or add
191 191 or delete them.
192 192 hooks.cvschangesets
193 193 Specify a Python function to be called after the changesets
194 194 are calculated from the CVS log. The function is passed a
195 195 list with the changeset entries, and can modify the
196 196 changesets in-place, or add or delete them.
197 197
198 198 An additional "debugcvsps" Mercurial command allows the builtin changeset
199 199 merging code to be run without doing a conversion. Its parameters and
200 200 output are similar to that of cvsps 2.1. Please see the command help for
201 201 more details.
202 202
203 203 Subversion Source
204 204 #################
205 205
206 206 Subversion source detects classical trunk/branches/tags layouts. By
207 207 default, the supplied "svn://repo/path/" source URL is converted as a
208 208 single branch. If "svn://repo/path/trunk" exists it replaces the default
209 209 branch. If "svn://repo/path/branches" exists, its subdirectories are
210 210 listed as possible branches. If "svn://repo/path/tags" exists, it is
211 211 looked for tags referencing converted branches. Default "trunk",
212 212 "branches" and "tags" values can be overridden with following options. Set
213 213 them to paths relative to the source URL, or leave them blank to disable
214 214 auto detection.
215 215
216 216 The following options can be set with "--config":
217 217
218 218 convert.svn.branches
219 219 specify the directory containing branches. The default is
220 220 "branches".
221 221 convert.svn.tags
222 222 specify the directory containing tags. The default is
223 223 "tags".
224 224 convert.svn.trunk
225 225 specify the name of the trunk branch. The default is
226 226 "trunk".
227 227 convert.localtimezone
228 228 use local time (as determined by the TZ environment
229 229 variable) for changeset date/times. The default is False
230 230 (use UTC).
231 231
232 232 Source history can be retrieved starting at a specific revision, instead
233 233 of being integrally converted. Only single branch conversions are
234 234 supported.
235 235
236 236 convert.svn.startrev
237 237 specify start Subversion revision number. The default is 0.
238 238
239 239 Git Source
240 240 ##########
241 241
242 242 The Git importer converts commits from all reachable branches (refs in
243 243 refs/heads) and remotes (refs in refs/remotes) to Mercurial. Branches are
244 244 converted to bookmarks with the same name, with the leading 'refs/heads'
245 245 stripped. Git submodules are converted to Git subrepos in Mercurial.
246 246
247 247 The following options can be set with "--config":
248 248
249 249 convert.git.similarity
250 250 specify how similar files modified in a commit must be to be
251 251 imported as renames or copies, as a percentage between "0"
252 252 (disabled) and "100" (files must be identical). For example,
253 253 "90" means that a delete/add pair will be imported as a
254 254 rename if more than 90% of the file hasn't changed. The
255 255 default is "50".
256 256 convert.git.findcopiesharder
257 257 while detecting copies, look at all files in the working
258 258 copy instead of just changed ones. This is very expensive
259 259 for large projects, and is only effective when
260 260 "convert.git.similarity" is greater than 0. The default is
261 261 False.
262 262
263 263 Perforce Source
264 264 ###############
265 265
266 266 The Perforce (P4) importer can be given a p4 depot path or a client
267 267 specification as source. It will convert all files in the source to a flat
268 268 Mercurial repository, ignoring labels, branches and integrations. Note
269 269 that when a depot path is given you then usually should specify a target
270 270 directory, because otherwise the target may be named "...-hg".
271 271
272 272 It is possible to limit the amount of source history to be converted by
273 273 specifying an initial Perforce revision:
274 274
275 275 convert.p4.startrev
276 276 specify initial Perforce revision (a Perforce changelist
277 277 number).
278 278
279 279 Mercurial Destination
280 280 #####################
281 281
282 282 The Mercurial destination will recognize Mercurial subrepositories in the
283 283 destination directory, and update the .hgsubstate file automatically if
284 284 the destination subrepositories contain the <dest>/<sub>/.hg/shamap file.
285 285 Converting a repository with subrepositories requires converting a single
286 286 repository at a time, from the bottom up.
287 287
288 288 The following options are supported:
289 289
290 290 convert.hg.clonebranches
291 291 dispatch source branches in separate clones. The default is
292 292 False.
293 293 convert.hg.tagsbranch
294 294 branch name for tag revisions, defaults to "default".
295 295 convert.hg.usebranchnames
296 296 preserve branch names. The default is True.
297 297
298 298 All Destinations
299 299 ################
300 300
301 301 All destination types accept the following options:
302 302
303 303 convert.skiptags
304 304 does not convert tags from the source repo to the target
305 305 repo. The default is False.
306 306
307 options:
307 options ([+] can be repeated):
308 308
309 309 -s --source-type TYPE source repository type
310 310 -d --dest-type TYPE destination repository type
311 -r --rev REV import up to source revision REV
311 -r --rev REV [+] import up to source revision REV
312 312 -A --authormap FILE remap usernames using this file
313 313 --filemap FILE remap file names using contents of file
314 314 --full apply filemap changes by converting all files again
315 315 --splicemap FILE splice synthesized history into place
316 316 --branchmap FILE change branch names while converting
317 317 --branchsort try to sort changesets by branches
318 318 --datesort try to sort changesets by date
319 319 --sourcesort preserve source changesets order
320 320 --closesort try to reorder closed revisions
321 321
322 322 (some details hidden, use --verbose to show complete help)
323 323 $ hg init a
324 324 $ cd a
325 325 $ echo a > a
326 326 $ hg ci -d'0 0' -Ama
327 327 adding a
328 328 $ hg cp a b
329 329 $ hg ci -d'1 0' -mb
330 330 $ hg rm a
331 331 $ hg ci -d'2 0' -mc
332 332 $ hg mv b a
333 333 $ hg ci -d'3 0' -md
334 334 $ echo a >> a
335 335 $ hg ci -d'4 0' -me
336 336 $ cd ..
337 337 $ hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
338 338 assuming destination a-hg
339 339 initializing destination a-hg repository
340 340 scanning source...
341 341 sorting...
342 342 converting...
343 343 4 a
344 344 3 b
345 345 2 c
346 346 1 d
347 347 0 e
348 348 $ hg --cwd a-hg pull ../a
349 349 pulling from ../a
350 350 searching for changes
351 351 no changes found
352 352
353 353 conversion to existing file should fail
354 354
355 355 $ touch bogusfile
356 356 $ hg convert a bogusfile
357 357 initializing destination bogusfile repository
358 358 abort: cannot create new bundle repository
359 359 [255]
360 360
361 361 #if unix-permissions no-root
362 362
363 363 conversion to dir without permissions should fail
364 364
365 365 $ mkdir bogusdir
366 366 $ chmod 000 bogusdir
367 367
368 368 $ hg convert a bogusdir
369 369 abort: Permission denied: 'bogusdir'
370 370 [255]
371 371
372 372 user permissions should succeed
373 373
374 374 $ chmod 700 bogusdir
375 375 $ hg convert a bogusdir
376 376 initializing destination bogusdir repository
377 377 scanning source...
378 378 sorting...
379 379 converting...
380 380 4 a
381 381 3 b
382 382 2 c
383 383 1 d
384 384 0 e
385 385
386 386 #endif
387 387
388 388 test pre and post conversion actions
389 389
390 390 $ echo 'include b' > filemap
391 391 $ hg convert --debug --filemap filemap a partialb | \
392 392 > grep 'run hg'
393 393 run hg source pre-conversion action
394 394 run hg sink pre-conversion action
395 395 run hg sink post-conversion action
396 396 run hg source post-conversion action
397 397
398 398 converting empty dir should fail "nicely
399 399
400 400 $ mkdir emptydir
401 401
402 402 override $PATH to ensure p4 not visible; use $PYTHON in case we're
403 403 running from a devel copy, not a temp installation
404 404
405 405 $ PATH="$BINDIR" $PYTHON "$BINDIR"/hg convert emptydir
406 406 assuming destination emptydir-hg
407 407 initializing destination emptydir-hg repository
408 408 emptydir does not look like a CVS checkout
409 409 emptydir does not look like a Git repository
410 410 emptydir does not look like a Subversion repository
411 411 emptydir is not a local Mercurial repository
412 412 emptydir does not look like a darcs repository
413 413 emptydir does not look like a monotone repository
414 414 emptydir does not look like a GNU Arch repository
415 415 emptydir does not look like a Bazaar repository
416 416 cannot find required "p4" tool
417 417 abort: emptydir: missing or unsupported repository
418 418 [255]
419 419
420 420 convert with imaginary source type
421 421
422 422 $ hg convert --source-type foo a a-foo
423 423 initializing destination a-foo repository
424 424 abort: foo: invalid source repository type
425 425 [255]
426 426
427 427 convert with imaginary sink type
428 428
429 429 $ hg convert --dest-type foo a a-foo
430 430 abort: foo: invalid destination repository type
431 431 [255]
432 432
433 433 testing: convert must not produce duplicate entries in fncache
434 434
435 435 $ hg convert a b
436 436 initializing destination b repository
437 437 scanning source...
438 438 sorting...
439 439 converting...
440 440 4 a
441 441 3 b
442 442 2 c
443 443 1 d
444 444 0 e
445 445
446 446 contents of fncache file:
447 447
448 448 $ cat b/.hg/store/fncache | sort
449 449 data/a.i
450 450 data/b.i
451 451
452 452 test bogus URL
453 453
454 454 $ hg convert -q bzr+ssh://foobar@selenic.com/baz baz
455 455 abort: bzr+ssh://foobar@selenic.com/baz: missing or unsupported repository
456 456 [255]
457 457
458 458 test revset converted() lookup
459 459
460 460 $ hg --config convert.hg.saverev=True convert a c
461 461 initializing destination c repository
462 462 scanning source...
463 463 sorting...
464 464 converting...
465 465 4 a
466 466 3 b
467 467 2 c
468 468 1 d
469 469 0 e
470 470 $ echo f > c/f
471 471 $ hg -R c ci -d'0 0' -Amf
472 472 adding f
473 473 created new head
474 474 $ hg -R c log -r "converted(09d945a62ce6)"
475 475 changeset: 1:98c3dd46a874
476 476 user: test
477 477 date: Thu Jan 01 00:00:01 1970 +0000
478 478 summary: b
479 479
480 480 $ hg -R c log -r "converted()"
481 481 changeset: 0:31ed57b2037c
482 482 user: test
483 483 date: Thu Jan 01 00:00:00 1970 +0000
484 484 summary: a
485 485
486 486 changeset: 1:98c3dd46a874
487 487 user: test
488 488 date: Thu Jan 01 00:00:01 1970 +0000
489 489 summary: b
490 490
491 491 changeset: 2:3b9ca06ef716
492 492 user: test
493 493 date: Thu Jan 01 00:00:02 1970 +0000
494 494 summary: c
495 495
496 496 changeset: 3:4e0debd37cf2
497 497 user: test
498 498 date: Thu Jan 01 00:00:03 1970 +0000
499 499 summary: d
500 500
501 501 changeset: 4:9de3bc9349c5
502 502 user: test
503 503 date: Thu Jan 01 00:00:04 1970 +0000
504 504 summary: e
505 505
General Comments 0
You need to be logged in to leave comments. Login now