##// END OF EJS Templates
convert: introduce --full for converting all files...
Mads Kiilerich -
r22300:35ab037d default
parent child Browse files
Show More
@@ -1,372 +1,382 b''
1 # convert.py Foreign SCM converter
1 # convert.py Foreign SCM converter
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 '''import revisions from foreign VCS repositories into Mercurial'''
8 '''import revisions from foreign VCS repositories into Mercurial'''
9
9
10 import convcmd
10 import convcmd
11 import cvsps
11 import cvsps
12 import subversion
12 import subversion
13 from mercurial import cmdutil, templatekw
13 from mercurial import cmdutil, templatekw
14 from mercurial.i18n import _
14 from mercurial.i18n import _
15
15
16 cmdtable = {}
16 cmdtable = {}
17 command = cmdutil.command(cmdtable)
17 command = cmdutil.command(cmdtable)
18 testedwith = 'internal'
18 testedwith = 'internal'
19
19
20 # Commands definition was moved elsewhere to ease demandload job.
20 # Commands definition was moved elsewhere to ease demandload job.
21
21
22 @command('convert',
22 @command('convert',
23 [('', 'authors', '',
23 [('', 'authors', '',
24 _('username mapping filename (DEPRECATED, use --authormap instead)'),
24 _('username mapping filename (DEPRECATED, use --authormap instead)'),
25 _('FILE')),
25 _('FILE')),
26 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
26 ('s', 'source-type', '', _('source repository type'), _('TYPE')),
27 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
27 ('d', 'dest-type', '', _('destination repository type'), _('TYPE')),
28 ('r', 'rev', '', _('import up to source revision REV'), _('REV')),
28 ('r', 'rev', '', _('import up to source revision REV'), _('REV')),
29 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
29 ('A', 'authormap', '', _('remap usernames using this file'), _('FILE')),
30 ('', 'filemap', '', _('remap file names using contents of file'),
30 ('', 'filemap', '', _('remap file names using contents of file'),
31 _('FILE')),
31 _('FILE')),
32 ('', 'full', None,
33 _('apply filemap changes by converting all files again')),
32 ('', 'splicemap', '', _('splice synthesized history into place'),
34 ('', 'splicemap', '', _('splice synthesized history into place'),
33 _('FILE')),
35 _('FILE')),
34 ('', 'branchmap', '', _('change branch names while converting'),
36 ('', 'branchmap', '', _('change branch names while converting'),
35 _('FILE')),
37 _('FILE')),
36 ('', 'branchsort', None, _('try to sort changesets by branches')),
38 ('', 'branchsort', None, _('try to sort changesets by branches')),
37 ('', 'datesort', None, _('try to sort changesets by date')),
39 ('', 'datesort', None, _('try to sort changesets by date')),
38 ('', 'sourcesort', None, _('preserve source changesets order')),
40 ('', 'sourcesort', None, _('preserve source changesets order')),
39 ('', 'closesort', None, _('try to reorder closed revisions'))],
41 ('', 'closesort', None, _('try to reorder closed revisions'))],
40 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
42 _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]'),
41 norepo=True)
43 norepo=True)
42 def convert(ui, src, dest=None, revmapfile=None, **opts):
44 def convert(ui, src, dest=None, revmapfile=None, **opts):
43 """convert a foreign SCM repository to a Mercurial one.
45 """convert a foreign SCM repository to a Mercurial one.
44
46
45 Accepted source formats [identifiers]:
47 Accepted source formats [identifiers]:
46
48
47 - Mercurial [hg]
49 - Mercurial [hg]
48 - CVS [cvs]
50 - CVS [cvs]
49 - Darcs [darcs]
51 - Darcs [darcs]
50 - git [git]
52 - git [git]
51 - Subversion [svn]
53 - Subversion [svn]
52 - Monotone [mtn]
54 - Monotone [mtn]
53 - GNU Arch [gnuarch]
55 - GNU Arch [gnuarch]
54 - Bazaar [bzr]
56 - Bazaar [bzr]
55 - Perforce [p4]
57 - Perforce [p4]
56
58
57 Accepted destination formats [identifiers]:
59 Accepted destination formats [identifiers]:
58
60
59 - Mercurial [hg]
61 - Mercurial [hg]
60 - Subversion [svn] (history on branches is not preserved)
62 - Subversion [svn] (history on branches is not preserved)
61
63
62 If no revision is given, all revisions will be converted.
64 If no revision is given, all revisions will be converted.
63 Otherwise, convert will only import up to the named revision
65 Otherwise, convert will only import up to the named revision
64 (given in a format understood by the source).
66 (given in a format understood by the source).
65
67
66 If no destination directory name is specified, it defaults to the
68 If no destination directory name is specified, it defaults to the
67 basename of the source with ``-hg`` appended. If the destination
69 basename of the source with ``-hg`` appended. If the destination
68 repository doesn't exist, it will be created.
70 repository doesn't exist, it will be created.
69
71
70 By default, all sources except Mercurial will use --branchsort.
72 By default, all sources except Mercurial will use --branchsort.
71 Mercurial uses --sourcesort to preserve original revision numbers
73 Mercurial uses --sourcesort to preserve original revision numbers
72 order. Sort modes have the following effects:
74 order. Sort modes have the following effects:
73
75
74 --branchsort convert from parent to child revision when possible,
76 --branchsort convert from parent to child revision when possible,
75 which means branches are usually converted one after
77 which means branches are usually converted one after
76 the other. It generates more compact repositories.
78 the other. It generates more compact repositories.
77
79
78 --datesort sort revisions by date. Converted repositories have
80 --datesort sort revisions by date. Converted repositories have
79 good-looking changelogs but are often an order of
81 good-looking changelogs but are often an order of
80 magnitude larger than the same ones generated by
82 magnitude larger than the same ones generated by
81 --branchsort.
83 --branchsort.
82
84
83 --sourcesort try to preserve source revisions order, only
85 --sourcesort try to preserve source revisions order, only
84 supported by Mercurial sources.
86 supported by Mercurial sources.
85
87
86 --closesort try to move closed revisions as close as possible
88 --closesort try to move closed revisions as close as possible
87 to parent branches, only supported by Mercurial
89 to parent branches, only supported by Mercurial
88 sources.
90 sources.
89
91
90 If ``REVMAP`` isn't given, it will be put in a default location
92 If ``REVMAP`` isn't given, it will be put in a default location
91 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
93 (``<dest>/.hg/shamap`` by default). The ``REVMAP`` is a simple
92 text file that maps each source commit ID to the destination ID
94 text file that maps each source commit ID to the destination ID
93 for that revision, like so::
95 for that revision, like so::
94
96
95 <source ID> <destination ID>
97 <source ID> <destination ID>
96
98
97 If the file doesn't exist, it's automatically created. It's
99 If the file doesn't exist, it's automatically created. It's
98 updated on each commit copied, so :hg:`convert` can be interrupted
100 updated on each commit copied, so :hg:`convert` can be interrupted
99 and can be run repeatedly to copy new commits.
101 and can be run repeatedly to copy new commits.
100
102
101 The authormap is a simple text file that maps each source commit
103 The authormap is a simple text file that maps each source commit
102 author to a destination commit author. It is handy for source SCMs
104 author to a destination commit author. It is handy for source SCMs
103 that use unix logins to identify authors (e.g.: CVS). One line per
105 that use unix logins to identify authors (e.g.: CVS). One line per
104 author mapping and the line format is::
106 author mapping and the line format is::
105
107
106 source author = destination author
108 source author = destination author
107
109
108 Empty lines and lines starting with a ``#`` are ignored.
110 Empty lines and lines starting with a ``#`` are ignored.
109
111
110 The filemap is a file that allows filtering and remapping of files
112 The filemap is a file that allows filtering and remapping of files
111 and directories. Each line can contain one of the following
113 and directories. Each line can contain one of the following
112 directives::
114 directives::
113
115
114 include path/to/file-or-dir
116 include path/to/file-or-dir
115
117
116 exclude path/to/file-or-dir
118 exclude path/to/file-or-dir
117
119
118 rename path/to/source path/to/destination
120 rename path/to/source path/to/destination
119
121
120 Comment lines start with ``#``. A specified path matches if it
122 Comment lines start with ``#``. A specified path matches if it
121 equals the full relative name of a file or one of its parent
123 equals the full relative name of a file or one of its parent
122 directories. The ``include`` or ``exclude`` directive with the
124 directories. The ``include`` or ``exclude`` directive with the
123 longest matching path applies, so line order does not matter.
125 longest matching path applies, so line order does not matter.
124
126
125 The ``include`` directive causes a file, or all files under a
127 The ``include`` directive causes a file, or all files under a
126 directory, to be included in the destination repository. The default
128 directory, to be included in the destination repository. The default
127 if there are no ``include`` statements is to include everything.
129 if there are no ``include`` statements is to include everything.
128 If there are any ``include`` statements, nothing else is included.
130 If there are any ``include`` statements, nothing else is included.
129 The ``exclude`` directive causes files or directories to
131 The ``exclude`` directive causes files or directories to
130 be omitted. The ``rename`` directive renames a file or directory if
132 be omitted. The ``rename`` directive renames a file or directory if
131 it is converted. To rename from a subdirectory into the root of
133 it is converted. To rename from a subdirectory into the root of
132 the repository, use ``.`` as the path to rename to.
134 the repository, use ``.`` as the path to rename to.
133
135
136 ``--full`` will make sure the converted changesets contain exactly
137 the right files with the right content. It will make a full
138 conversion of all files, not just the ones that have
139 changed. Files that already are correct will not be changed. This
140 can be used to apply filemap changes when converting
141 incrementally. This is currently only supported for Mercurial and
142 Subversion.
143
134 The splicemap is a file that allows insertion of synthetic
144 The splicemap is a file that allows insertion of synthetic
135 history, letting you specify the parents of a revision. This is
145 history, letting you specify the parents of a revision. This is
136 useful if you want to e.g. give a Subversion merge two parents, or
146 useful if you want to e.g. give a Subversion merge two parents, or
137 graft two disconnected series of history together. Each entry
147 graft two disconnected series of history together. Each entry
138 contains a key, followed by a space, followed by one or two
148 contains a key, followed by a space, followed by one or two
139 comma-separated values::
149 comma-separated values::
140
150
141 key parent1, parent2
151 key parent1, parent2
142
152
143 The key is the revision ID in the source
153 The key is the revision ID in the source
144 revision control system whose parents should be modified (same
154 revision control system whose parents should be modified (same
145 format as a key in .hg/shamap). The values are the revision IDs
155 format as a key in .hg/shamap). The values are the revision IDs
146 (in either the source or destination revision control system) that
156 (in either the source or destination revision control system) that
147 should be used as the new parents for that node. For example, if
157 should be used as the new parents for that node. For example, if
148 you have merged "release-1.0" into "trunk", then you should
158 you have merged "release-1.0" into "trunk", then you should
149 specify the revision on "trunk" as the first parent and the one on
159 specify the revision on "trunk" as the first parent and the one on
150 the "release-1.0" branch as the second.
160 the "release-1.0" branch as the second.
151
161
152 The branchmap is a file that allows you to rename a branch when it is
162 The branchmap is a file that allows you to rename a branch when it is
153 being brought in from whatever external repository. When used in
163 being brought in from whatever external repository. When used in
154 conjunction with a splicemap, it allows for a powerful combination
164 conjunction with a splicemap, it allows for a powerful combination
155 to help fix even the most badly mismanaged repositories and turn them
165 to help fix even the most badly mismanaged repositories and turn them
156 into nicely structured Mercurial repositories. The branchmap contains
166 into nicely structured Mercurial repositories. The branchmap contains
157 lines of the form::
167 lines of the form::
158
168
159 original_branch_name new_branch_name
169 original_branch_name new_branch_name
160
170
161 where "original_branch_name" is the name of the branch in the
171 where "original_branch_name" is the name of the branch in the
162 source repository, and "new_branch_name" is the name of the branch
172 source repository, and "new_branch_name" is the name of the branch
163 is the destination repository. No whitespace is allowed in the
173 is the destination repository. No whitespace is allowed in the
164 branch names. This can be used to (for instance) move code in one
174 branch names. This can be used to (for instance) move code in one
165 repository from "default" to a named branch.
175 repository from "default" to a named branch.
166
176
167 Mercurial Source
177 Mercurial Source
168 ################
178 ################
169
179
170 The Mercurial source recognizes the following configuration
180 The Mercurial source recognizes the following configuration
171 options, which you can set on the command line with ``--config``:
181 options, which you can set on the command line with ``--config``:
172
182
173 :convert.hg.ignoreerrors: ignore integrity errors when reading.
183 :convert.hg.ignoreerrors: ignore integrity errors when reading.
174 Use it to fix Mercurial repositories with missing revlogs, by
184 Use it to fix Mercurial repositories with missing revlogs, by
175 converting from and to Mercurial. Default is False.
185 converting from and to Mercurial. Default is False.
176
186
177 :convert.hg.saverev: store original revision ID in changeset
187 :convert.hg.saverev: store original revision ID in changeset
178 (forces target IDs to change). It takes a boolean argument and
188 (forces target IDs to change). It takes a boolean argument and
179 defaults to False.
189 defaults to False.
180
190
181 :convert.hg.revs: revset specifying the source revisions to convert.
191 :convert.hg.revs: revset specifying the source revisions to convert.
182
192
183 CVS Source
193 CVS Source
184 ##########
194 ##########
185
195
186 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
196 CVS source will use a sandbox (i.e. a checked-out copy) from CVS
187 to indicate the starting point of what will be converted. Direct
197 to indicate the starting point of what will be converted. Direct
188 access to the repository files is not needed, unless of course the
198 access to the repository files is not needed, unless of course the
189 repository is ``:local:``. The conversion uses the top level
199 repository is ``:local:``. The conversion uses the top level
190 directory in the sandbox to find the CVS repository, and then uses
200 directory in the sandbox to find the CVS repository, and then uses
191 CVS rlog commands to find files to convert. This means that unless
201 CVS rlog commands to find files to convert. This means that unless
192 a filemap is given, all files under the starting directory will be
202 a filemap is given, all files under the starting directory will be
193 converted, and that any directory reorganization in the CVS
203 converted, and that any directory reorganization in the CVS
194 sandbox is ignored.
204 sandbox is ignored.
195
205
196 The following options can be used with ``--config``:
206 The following options can be used with ``--config``:
197
207
198 :convert.cvsps.cache: Set to False to disable remote log caching,
208 :convert.cvsps.cache: Set to False to disable remote log caching,
199 for testing and debugging purposes. Default is True.
209 for testing and debugging purposes. Default is True.
200
210
201 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
211 :convert.cvsps.fuzz: Specify the maximum time (in seconds) that is
202 allowed between commits with identical user and log message in
212 allowed between commits with identical user and log message in
203 a single changeset. When very large files were checked in as
213 a single changeset. When very large files were checked in as
204 part of a changeset then the default may not be long enough.
214 part of a changeset then the default may not be long enough.
205 The default is 60.
215 The default is 60.
206
216
207 :convert.cvsps.mergeto: Specify a regular expression to which
217 :convert.cvsps.mergeto: Specify a regular expression to which
208 commit log messages are matched. If a match occurs, then the
218 commit log messages are matched. If a match occurs, then the
209 conversion process will insert a dummy revision merging the
219 conversion process will insert a dummy revision merging the
210 branch on which this log message occurs to the branch
220 branch on which this log message occurs to the branch
211 indicated in the regex. Default is ``{{mergetobranch
221 indicated in the regex. Default is ``{{mergetobranch
212 ([-\\w]+)}}``
222 ([-\\w]+)}}``
213
223
214 :convert.cvsps.mergefrom: Specify a regular expression to which
224 :convert.cvsps.mergefrom: Specify a regular expression to which
215 commit log messages are matched. If a match occurs, then the
225 commit log messages are matched. If a match occurs, then the
216 conversion process will add the most recent revision on the
226 conversion process will add the most recent revision on the
217 branch indicated in the regex as the second parent of the
227 branch indicated in the regex as the second parent of the
218 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
228 changeset. Default is ``{{mergefrombranch ([-\\w]+)}}``
219
229
220 :convert.localtimezone: use local time (as determined by the TZ
230 :convert.localtimezone: use local time (as determined by the TZ
221 environment variable) for changeset date/times. The default
231 environment variable) for changeset date/times. The default
222 is False (use UTC).
232 is False (use UTC).
223
233
224 :hooks.cvslog: Specify a Python function to be called at the end of
234 :hooks.cvslog: Specify a Python function to be called at the end of
225 gathering the CVS log. The function is passed a list with the
235 gathering the CVS log. The function is passed a list with the
226 log entries, and can modify the entries in-place, or add or
236 log entries, and can modify the entries in-place, or add or
227 delete them.
237 delete them.
228
238
229 :hooks.cvschangesets: Specify a Python function to be called after
239 :hooks.cvschangesets: Specify a Python function to be called after
230 the changesets are calculated from the CVS log. The
240 the changesets are calculated from the CVS log. The
231 function is passed a list with the changeset entries, and can
241 function is passed a list with the changeset entries, and can
232 modify the changesets in-place, or add or delete them.
242 modify the changesets in-place, or add or delete them.
233
243
234 An additional "debugcvsps" Mercurial command allows the builtin
244 An additional "debugcvsps" Mercurial command allows the builtin
235 changeset merging code to be run without doing a conversion. Its
245 changeset merging code to be run without doing a conversion. Its
236 parameters and output are similar to that of cvsps 2.1. Please see
246 parameters and output are similar to that of cvsps 2.1. Please see
237 the command help for more details.
247 the command help for more details.
238
248
239 Subversion Source
249 Subversion Source
240 #################
250 #################
241
251
242 Subversion source detects classical trunk/branches/tags layouts.
252 Subversion source detects classical trunk/branches/tags layouts.
243 By default, the supplied ``svn://repo/path/`` source URL is
253 By default, the supplied ``svn://repo/path/`` source URL is
244 converted as a single branch. If ``svn://repo/path/trunk`` exists
254 converted as a single branch. If ``svn://repo/path/trunk`` exists
245 it replaces the default branch. If ``svn://repo/path/branches``
255 it replaces the default branch. If ``svn://repo/path/branches``
246 exists, its subdirectories are listed as possible branches. If
256 exists, its subdirectories are listed as possible branches. If
247 ``svn://repo/path/tags`` exists, it is looked for tags referencing
257 ``svn://repo/path/tags`` exists, it is looked for tags referencing
248 converted branches. Default ``trunk``, ``branches`` and ``tags``
258 converted branches. Default ``trunk``, ``branches`` and ``tags``
249 values can be overridden with following options. Set them to paths
259 values can be overridden with following options. Set them to paths
250 relative to the source URL, or leave them blank to disable auto
260 relative to the source URL, or leave them blank to disable auto
251 detection.
261 detection.
252
262
253 The following options can be set with ``--config``:
263 The following options can be set with ``--config``:
254
264
255 :convert.svn.branches: specify the directory containing branches.
265 :convert.svn.branches: specify the directory containing branches.
256 The default is ``branches``.
266 The default is ``branches``.
257
267
258 :convert.svn.tags: specify the directory containing tags. The
268 :convert.svn.tags: specify the directory containing tags. The
259 default is ``tags``.
269 default is ``tags``.
260
270
261 :convert.svn.trunk: specify the name of the trunk branch. The
271 :convert.svn.trunk: specify the name of the trunk branch. The
262 default is ``trunk``.
272 default is ``trunk``.
263
273
264 :convert.localtimezone: use local time (as determined by the TZ
274 :convert.localtimezone: use local time (as determined by the TZ
265 environment variable) for changeset date/times. The default
275 environment variable) for changeset date/times. The default
266 is False (use UTC).
276 is False (use UTC).
267
277
268 Source history can be retrieved starting at a specific revision,
278 Source history can be retrieved starting at a specific revision,
269 instead of being integrally converted. Only single branch
279 instead of being integrally converted. Only single branch
270 conversions are supported.
280 conversions are supported.
271
281
272 :convert.svn.startrev: specify start Subversion revision number.
282 :convert.svn.startrev: specify start Subversion revision number.
273 The default is 0.
283 The default is 0.
274
284
275 Perforce Source
285 Perforce Source
276 ###############
286 ###############
277
287
278 The Perforce (P4) importer can be given a p4 depot path or a
288 The Perforce (P4) importer can be given a p4 depot path or a
279 client specification as source. It will convert all files in the
289 client specification as source. It will convert all files in the
280 source to a flat Mercurial repository, ignoring labels, branches
290 source to a flat Mercurial repository, ignoring labels, branches
281 and integrations. Note that when a depot path is given you then
291 and integrations. Note that when a depot path is given you then
282 usually should specify a target directory, because otherwise the
292 usually should specify a target directory, because otherwise the
283 target may be named ``...-hg``.
293 target may be named ``...-hg``.
284
294
285 It is possible to limit the amount of source history to be
295 It is possible to limit the amount of source history to be
286 converted by specifying an initial Perforce revision:
296 converted by specifying an initial Perforce revision:
287
297
288 :convert.p4.startrev: specify initial Perforce revision (a
298 :convert.p4.startrev: specify initial Perforce revision (a
289 Perforce changelist number).
299 Perforce changelist number).
290
300
291 Mercurial Destination
301 Mercurial Destination
292 #####################
302 #####################
293
303
294 The following options are supported:
304 The following options are supported:
295
305
296 :convert.hg.clonebranches: dispatch source branches in separate
306 :convert.hg.clonebranches: dispatch source branches in separate
297 clones. The default is False.
307 clones. The default is False.
298
308
299 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
309 :convert.hg.tagsbranch: branch name for tag revisions, defaults to
300 ``default``.
310 ``default``.
301
311
302 :convert.hg.usebranchnames: preserve branch names. The default is
312 :convert.hg.usebranchnames: preserve branch names. The default is
303 True.
313 True.
304 """
314 """
305 return convcmd.convert(ui, src, dest, revmapfile, **opts)
315 return convcmd.convert(ui, src, dest, revmapfile, **opts)
306
316
307 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
317 @command('debugsvnlog', [], 'hg debugsvnlog', norepo=True)
308 def debugsvnlog(ui, **opts):
318 def debugsvnlog(ui, **opts):
309 return subversion.debugsvnlog(ui, **opts)
319 return subversion.debugsvnlog(ui, **opts)
310
320
311 @command('debugcvsps',
321 @command('debugcvsps',
312 [
322 [
313 # Main options shared with cvsps-2.1
323 # Main options shared with cvsps-2.1
314 ('b', 'branches', [], _('only return changes on specified branches')),
324 ('b', 'branches', [], _('only return changes on specified branches')),
315 ('p', 'prefix', '', _('prefix to remove from file names')),
325 ('p', 'prefix', '', _('prefix to remove from file names')),
316 ('r', 'revisions', [],
326 ('r', 'revisions', [],
317 _('only return changes after or between specified tags')),
327 _('only return changes after or between specified tags')),
318 ('u', 'update-cache', None, _("update cvs log cache")),
328 ('u', 'update-cache', None, _("update cvs log cache")),
319 ('x', 'new-cache', None, _("create new cvs log cache")),
329 ('x', 'new-cache', None, _("create new cvs log cache")),
320 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
330 ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
321 ('', 'root', '', _('specify cvsroot')),
331 ('', 'root', '', _('specify cvsroot')),
322 # Options specific to builtin cvsps
332 # Options specific to builtin cvsps
323 ('', 'parents', '', _('show parent changesets')),
333 ('', 'parents', '', _('show parent changesets')),
324 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
334 ('', 'ancestors', '', _('show current changeset in ancestor branches')),
325 # Options that are ignored for compatibility with cvsps-2.1
335 # Options that are ignored for compatibility with cvsps-2.1
326 ('A', 'cvs-direct', None, _('ignored for compatibility')),
336 ('A', 'cvs-direct', None, _('ignored for compatibility')),
327 ],
337 ],
328 _('hg debugcvsps [OPTION]... [PATH]...'),
338 _('hg debugcvsps [OPTION]... [PATH]...'),
329 norepo=True)
339 norepo=True)
330 def debugcvsps(ui, *args, **opts):
340 def debugcvsps(ui, *args, **opts):
331 '''create changeset information from CVS
341 '''create changeset information from CVS
332
342
333 This command is intended as a debugging tool for the CVS to
343 This command is intended as a debugging tool for the CVS to
334 Mercurial converter, and can be used as a direct replacement for
344 Mercurial converter, and can be used as a direct replacement for
335 cvsps.
345 cvsps.
336
346
337 Hg debugcvsps reads the CVS rlog for current directory (or any
347 Hg debugcvsps reads the CVS rlog for current directory (or any
338 named directory) in the CVS repository, and converts the log to a
348 named directory) in the CVS repository, and converts the log to a
339 series of changesets based on matching commit log entries and
349 series of changesets based on matching commit log entries and
340 dates.'''
350 dates.'''
341 return cvsps.debugcvsps(ui, *args, **opts)
351 return cvsps.debugcvsps(ui, *args, **opts)
342
352
343 def kwconverted(ctx, name):
353 def kwconverted(ctx, name):
344 rev = ctx.extra().get('convert_revision', '')
354 rev = ctx.extra().get('convert_revision', '')
345 if rev.startswith('svn:'):
355 if rev.startswith('svn:'):
346 if name == 'svnrev':
356 if name == 'svnrev':
347 return str(subversion.revsplit(rev)[2])
357 return str(subversion.revsplit(rev)[2])
348 elif name == 'svnpath':
358 elif name == 'svnpath':
349 return subversion.revsplit(rev)[1]
359 return subversion.revsplit(rev)[1]
350 elif name == 'svnuuid':
360 elif name == 'svnuuid':
351 return subversion.revsplit(rev)[0]
361 return subversion.revsplit(rev)[0]
352 return rev
362 return rev
353
363
354 def kwsvnrev(repo, ctx, **args):
364 def kwsvnrev(repo, ctx, **args):
355 """:svnrev: String. Converted subversion revision number."""
365 """:svnrev: String. Converted subversion revision number."""
356 return kwconverted(ctx, 'svnrev')
366 return kwconverted(ctx, 'svnrev')
357
367
358 def kwsvnpath(repo, ctx, **args):
368 def kwsvnpath(repo, ctx, **args):
359 """:svnpath: String. Converted subversion revision project path."""
369 """:svnpath: String. Converted subversion revision project path."""
360 return kwconverted(ctx, 'svnpath')
370 return kwconverted(ctx, 'svnpath')
361
371
362 def kwsvnuuid(repo, ctx, **args):
372 def kwsvnuuid(repo, ctx, **args):
363 """:svnuuid: String. Converted subversion revision repository identifier."""
373 """:svnuuid: String. Converted subversion revision repository identifier."""
364 return kwconverted(ctx, 'svnuuid')
374 return kwconverted(ctx, 'svnuuid')
365
375
366 def extsetup(ui):
376 def extsetup(ui):
367 templatekw.keywords['svnrev'] = kwsvnrev
377 templatekw.keywords['svnrev'] = kwsvnrev
368 templatekw.keywords['svnpath'] = kwsvnpath
378 templatekw.keywords['svnpath'] = kwsvnpath
369 templatekw.keywords['svnuuid'] = kwsvnuuid
379 templatekw.keywords['svnuuid'] = kwsvnuuid
370
380
371 # tell hggettext to extract docstrings from these functions:
381 # tell hggettext to extract docstrings from these functions:
372 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
382 i18nfunctions = [kwsvnrev, kwsvnpath, kwsvnuuid]
@@ -1,284 +1,285 b''
1 # bzr.py - bzr support for the convert extension
1 # bzr.py - bzr support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
3 # Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
8 # This module is for handling 'bzr', that was formerly known as Bazaar-NG;
9 # it cannot access 'bar' repositories, but they were never used very much
9 # it cannot access 'bar' repositories, but they were never used very much
10
10
11 import os
11 import os
12 from mercurial import demandimport
12 from mercurial import demandimport
13 # these do not work with demandimport, blacklist
13 # these do not work with demandimport, blacklist
14 demandimport.ignore.extend([
14 demandimport.ignore.extend([
15 'bzrlib.transactions',
15 'bzrlib.transactions',
16 'bzrlib.urlutils',
16 'bzrlib.urlutils',
17 'ElementPath',
17 'ElementPath',
18 ])
18 ])
19
19
20 from mercurial.i18n import _
20 from mercurial.i18n import _
21 from mercurial import util
21 from mercurial import util
22 from common import NoRepo, commit, converter_source
22 from common import NoRepo, commit, converter_source
23
23
24 try:
24 try:
25 # bazaar imports
25 # bazaar imports
26 from bzrlib import bzrdir, revision, errors
26 from bzrlib import bzrdir, revision, errors
27 from bzrlib.revisionspec import RevisionSpec
27 from bzrlib.revisionspec import RevisionSpec
28 except ImportError:
28 except ImportError:
29 pass
29 pass
30
30
31 supportedkinds = ('file', 'symlink')
31 supportedkinds = ('file', 'symlink')
32
32
33 class bzr_source(converter_source):
33 class bzr_source(converter_source):
34 """Reads Bazaar repositories by using the Bazaar Python libraries"""
34 """Reads Bazaar repositories by using the Bazaar Python libraries"""
35
35
36 def __init__(self, ui, path, rev=None):
36 def __init__(self, ui, path, rev=None):
37 super(bzr_source, self).__init__(ui, path, rev=rev)
37 super(bzr_source, self).__init__(ui, path, rev=rev)
38
38
39 if not os.path.exists(os.path.join(path, '.bzr')):
39 if not os.path.exists(os.path.join(path, '.bzr')):
40 raise NoRepo(_('%s does not look like a Bazaar repository')
40 raise NoRepo(_('%s does not look like a Bazaar repository')
41 % path)
41 % path)
42
42
43 try:
43 try:
44 # access bzrlib stuff
44 # access bzrlib stuff
45 bzrdir
45 bzrdir
46 except NameError:
46 except NameError:
47 raise NoRepo(_('Bazaar modules could not be loaded'))
47 raise NoRepo(_('Bazaar modules could not be loaded'))
48
48
49 path = os.path.abspath(path)
49 path = os.path.abspath(path)
50 self._checkrepotype(path)
50 self._checkrepotype(path)
51 try:
51 try:
52 self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
52 self.sourcerepo = bzrdir.BzrDir.open(path).open_repository()
53 except errors.NoRepositoryPresent:
53 except errors.NoRepositoryPresent:
54 raise NoRepo(_('%s does not look like a Bazaar repository')
54 raise NoRepo(_('%s does not look like a Bazaar repository')
55 % path)
55 % path)
56 self._parentids = {}
56 self._parentids = {}
57
57
58 def _checkrepotype(self, path):
58 def _checkrepotype(self, path):
59 # Lightweight checkouts detection is informational but probably
59 # Lightweight checkouts detection is informational but probably
60 # fragile at API level. It should not terminate the conversion.
60 # fragile at API level. It should not terminate the conversion.
61 try:
61 try:
62 from bzrlib import bzrdir
62 from bzrlib import bzrdir
63 dir = bzrdir.BzrDir.open_containing(path)[0]
63 dir = bzrdir.BzrDir.open_containing(path)[0]
64 try:
64 try:
65 tree = dir.open_workingtree(recommend_upgrade=False)
65 tree = dir.open_workingtree(recommend_upgrade=False)
66 branch = tree.branch
66 branch = tree.branch
67 except (errors.NoWorkingTree, errors.NotLocalUrl):
67 except (errors.NoWorkingTree, errors.NotLocalUrl):
68 tree = None
68 tree = None
69 branch = dir.open_branch()
69 branch = dir.open_branch()
70 if (tree is not None and tree.bzrdir.root_transport.base !=
70 if (tree is not None and tree.bzrdir.root_transport.base !=
71 branch.bzrdir.root_transport.base):
71 branch.bzrdir.root_transport.base):
72 self.ui.warn(_('warning: lightweight checkouts may cause '
72 self.ui.warn(_('warning: lightweight checkouts may cause '
73 'conversion failures, try with a regular '
73 'conversion failures, try with a regular '
74 'branch instead.\n'))
74 'branch instead.\n'))
75 except Exception:
75 except Exception:
76 self.ui.note(_('bzr source type could not be determined\n'))
76 self.ui.note(_('bzr source type could not be determined\n'))
77
77
78 def before(self):
78 def before(self):
79 """Before the conversion begins, acquire a read lock
79 """Before the conversion begins, acquire a read lock
80 for all the operations that might need it. Fortunately
80 for all the operations that might need it. Fortunately
81 read locks don't block other reads or writes to the
81 read locks don't block other reads or writes to the
82 repository, so this shouldn't have any impact on the usage of
82 repository, so this shouldn't have any impact on the usage of
83 the source repository.
83 the source repository.
84
84
85 The alternative would be locking on every operation that
85 The alternative would be locking on every operation that
86 needs locks (there are currently two: getting the file and
86 needs locks (there are currently two: getting the file and
87 getting the parent map) and releasing immediately after,
87 getting the parent map) and releasing immediately after,
88 but this approach can take even 40% longer."""
88 but this approach can take even 40% longer."""
89 self.sourcerepo.lock_read()
89 self.sourcerepo.lock_read()
90
90
91 def after(self):
91 def after(self):
92 self.sourcerepo.unlock()
92 self.sourcerepo.unlock()
93
93
94 def _bzrbranches(self):
94 def _bzrbranches(self):
95 return self.sourcerepo.find_branches(using=True)
95 return self.sourcerepo.find_branches(using=True)
96
96
97 def getheads(self):
97 def getheads(self):
98 if not self.rev:
98 if not self.rev:
99 # Set using=True to avoid nested repositories (see issue3254)
99 # Set using=True to avoid nested repositories (see issue3254)
100 heads = sorted([b.last_revision() for b in self._bzrbranches()])
100 heads = sorted([b.last_revision() for b in self._bzrbranches()])
101 else:
101 else:
102 revid = None
102 revid = None
103 for branch in self._bzrbranches():
103 for branch in self._bzrbranches():
104 try:
104 try:
105 r = RevisionSpec.from_string(self.rev)
105 r = RevisionSpec.from_string(self.rev)
106 info = r.in_history(branch)
106 info = r.in_history(branch)
107 except errors.BzrError:
107 except errors.BzrError:
108 pass
108 pass
109 revid = info.rev_id
109 revid = info.rev_id
110 if revid is None:
110 if revid is None:
111 raise util.Abort(_('%s is not a valid revision') % self.rev)
111 raise util.Abort(_('%s is not a valid revision') % self.rev)
112 heads = [revid]
112 heads = [revid]
113 # Empty repositories return 'null:', which cannot be retrieved
113 # Empty repositories return 'null:', which cannot be retrieved
114 heads = [h for h in heads if h != 'null:']
114 heads = [h for h in heads if h != 'null:']
115 return heads
115 return heads
116
116
117 def getfile(self, name, rev):
117 def getfile(self, name, rev):
118 revtree = self.sourcerepo.revision_tree(rev)
118 revtree = self.sourcerepo.revision_tree(rev)
119 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
119 fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
120 kind = None
120 kind = None
121 if fileid is not None:
121 if fileid is not None:
122 kind = revtree.kind(fileid)
122 kind = revtree.kind(fileid)
123 if kind not in supportedkinds:
123 if kind not in supportedkinds:
124 # the file is not available anymore - was deleted
124 # the file is not available anymore - was deleted
125 return None, None
125 return None, None
126 mode = self._modecache[(name, rev)]
126 mode = self._modecache[(name, rev)]
127 if kind == 'symlink':
127 if kind == 'symlink':
128 target = revtree.get_symlink_target(fileid)
128 target = revtree.get_symlink_target(fileid)
129 if target is None:
129 if target is None:
130 raise util.Abort(_('%s.%s symlink has no target')
130 raise util.Abort(_('%s.%s symlink has no target')
131 % (name, rev))
131 % (name, rev))
132 return target, mode
132 return target, mode
133 else:
133 else:
134 sio = revtree.get_file(fileid)
134 sio = revtree.get_file(fileid)
135 return sio.read(), mode
135 return sio.read(), mode
136
136
137 def getchanges(self, version):
137 def getchanges(self, version, full):
138 # set up caches: modecache and revtree
138 if full:
139 raise util.Abort(_("convert from cvs do not support --full"))
139 self._modecache = {}
140 self._modecache = {}
140 self._revtree = self.sourcerepo.revision_tree(version)
141 self._revtree = self.sourcerepo.revision_tree(version)
141 # get the parentids from the cache
142 # get the parentids from the cache
142 parentids = self._parentids.pop(version)
143 parentids = self._parentids.pop(version)
143 # only diff against first parent id
144 # only diff against first parent id
144 prevtree = self.sourcerepo.revision_tree(parentids[0])
145 prevtree = self.sourcerepo.revision_tree(parentids[0])
145 return self._gettreechanges(self._revtree, prevtree)
146 return self._gettreechanges(self._revtree, prevtree)
146
147
147 def getcommit(self, version):
148 def getcommit(self, version):
148 rev = self.sourcerepo.get_revision(version)
149 rev = self.sourcerepo.get_revision(version)
149 # populate parent id cache
150 # populate parent id cache
150 if not rev.parent_ids:
151 if not rev.parent_ids:
151 parents = []
152 parents = []
152 self._parentids[version] = (revision.NULL_REVISION,)
153 self._parentids[version] = (revision.NULL_REVISION,)
153 else:
154 else:
154 parents = self._filterghosts(rev.parent_ids)
155 parents = self._filterghosts(rev.parent_ids)
155 self._parentids[version] = parents
156 self._parentids[version] = parents
156
157
157 branch = self.recode(rev.properties.get('branch-nick', u'default'))
158 branch = self.recode(rev.properties.get('branch-nick', u'default'))
158 if branch == 'trunk':
159 if branch == 'trunk':
159 branch = 'default'
160 branch = 'default'
160 return commit(parents=parents,
161 return commit(parents=parents,
161 date='%d %d' % (rev.timestamp, -rev.timezone),
162 date='%d %d' % (rev.timestamp, -rev.timezone),
162 author=self.recode(rev.committer),
163 author=self.recode(rev.committer),
163 desc=self.recode(rev.message),
164 desc=self.recode(rev.message),
164 branch=branch,
165 branch=branch,
165 rev=version)
166 rev=version)
166
167
167 def gettags(self):
168 def gettags(self):
168 bytetags = {}
169 bytetags = {}
169 for branch in self._bzrbranches():
170 for branch in self._bzrbranches():
170 if not branch.supports_tags():
171 if not branch.supports_tags():
171 return {}
172 return {}
172 tagdict = branch.tags.get_tag_dict()
173 tagdict = branch.tags.get_tag_dict()
173 for name, rev in tagdict.iteritems():
174 for name, rev in tagdict.iteritems():
174 bytetags[self.recode(name)] = rev
175 bytetags[self.recode(name)] = rev
175 return bytetags
176 return bytetags
176
177
177 def getchangedfiles(self, rev, i):
178 def getchangedfiles(self, rev, i):
178 self._modecache = {}
179 self._modecache = {}
179 curtree = self.sourcerepo.revision_tree(rev)
180 curtree = self.sourcerepo.revision_tree(rev)
180 if i is not None:
181 if i is not None:
181 parentid = self._parentids[rev][i]
182 parentid = self._parentids[rev][i]
182 else:
183 else:
183 # no parent id, get the empty revision
184 # no parent id, get the empty revision
184 parentid = revision.NULL_REVISION
185 parentid = revision.NULL_REVISION
185
186
186 prevtree = self.sourcerepo.revision_tree(parentid)
187 prevtree = self.sourcerepo.revision_tree(parentid)
187 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
188 changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
188 return changes
189 return changes
189
190
190 def _gettreechanges(self, current, origin):
191 def _gettreechanges(self, current, origin):
191 revid = current._revision_id
192 revid = current._revision_id
192 changes = []
193 changes = []
193 renames = {}
194 renames = {}
194 seen = set()
195 seen = set()
195 # Process the entries by reverse lexicographic name order to
196 # Process the entries by reverse lexicographic name order to
196 # handle nested renames correctly, most specific first.
197 # handle nested renames correctly, most specific first.
197 curchanges = sorted(current.iter_changes(origin),
198 curchanges = sorted(current.iter_changes(origin),
198 key=lambda c: c[1][0] or c[1][1],
199 key=lambda c: c[1][0] or c[1][1],
199 reverse=True)
200 reverse=True)
200 for (fileid, paths, changed_content, versioned, parent, name,
201 for (fileid, paths, changed_content, versioned, parent, name,
201 kind, executable) in curchanges:
202 kind, executable) in curchanges:
202
203
203 if paths[0] == u'' or paths[1] == u'':
204 if paths[0] == u'' or paths[1] == u'':
204 # ignore changes to tree root
205 # ignore changes to tree root
205 continue
206 continue
206
207
207 # bazaar tracks directories, mercurial does not, so
208 # bazaar tracks directories, mercurial does not, so
208 # we have to rename the directory contents
209 # we have to rename the directory contents
209 if kind[1] == 'directory':
210 if kind[1] == 'directory':
210 if kind[0] not in (None, 'directory'):
211 if kind[0] not in (None, 'directory'):
211 # Replacing 'something' with a directory, record it
212 # Replacing 'something' with a directory, record it
212 # so it can be removed.
213 # so it can be removed.
213 changes.append((self.recode(paths[0]), revid))
214 changes.append((self.recode(paths[0]), revid))
214
215
215 if kind[0] == 'directory' and None not in paths:
216 if kind[0] == 'directory' and None not in paths:
216 renaming = paths[0] != paths[1]
217 renaming = paths[0] != paths[1]
217 # neither an add nor an delete - a move
218 # neither an add nor an delete - a move
218 # rename all directory contents manually
219 # rename all directory contents manually
219 subdir = origin.inventory.path2id(paths[0])
220 subdir = origin.inventory.path2id(paths[0])
220 # get all child-entries of the directory
221 # get all child-entries of the directory
221 for name, entry in origin.inventory.iter_entries(subdir):
222 for name, entry in origin.inventory.iter_entries(subdir):
222 # hg does not track directory renames
223 # hg does not track directory renames
223 if entry.kind == 'directory':
224 if entry.kind == 'directory':
224 continue
225 continue
225 frompath = self.recode(paths[0] + '/' + name)
226 frompath = self.recode(paths[0] + '/' + name)
226 if frompath in seen:
227 if frompath in seen:
227 # Already handled by a more specific change entry
228 # Already handled by a more specific change entry
228 # This is important when you have:
229 # This is important when you have:
229 # a => b
230 # a => b
230 # a/c => a/c
231 # a/c => a/c
231 # Here a/c must not be renamed into b/c
232 # Here a/c must not be renamed into b/c
232 continue
233 continue
233 seen.add(frompath)
234 seen.add(frompath)
234 if not renaming:
235 if not renaming:
235 continue
236 continue
236 topath = self.recode(paths[1] + '/' + name)
237 topath = self.recode(paths[1] + '/' + name)
237 # register the files as changed
238 # register the files as changed
238 changes.append((frompath, revid))
239 changes.append((frompath, revid))
239 changes.append((topath, revid))
240 changes.append((topath, revid))
240 # add to mode cache
241 # add to mode cache
241 mode = ((entry.executable and 'x')
242 mode = ((entry.executable and 'x')
242 or (entry.kind == 'symlink' and 's')
243 or (entry.kind == 'symlink' and 's')
243 or '')
244 or '')
244 self._modecache[(topath, revid)] = mode
245 self._modecache[(topath, revid)] = mode
245 # register the change as move
246 # register the change as move
246 renames[topath] = frompath
247 renames[topath] = frompath
247
248
248 # no further changes, go to the next change
249 # no further changes, go to the next change
249 continue
250 continue
250
251
251 # we got unicode paths, need to convert them
252 # we got unicode paths, need to convert them
252 path, topath = paths
253 path, topath = paths
253 if path is not None:
254 if path is not None:
254 path = self.recode(path)
255 path = self.recode(path)
255 if topath is not None:
256 if topath is not None:
256 topath = self.recode(topath)
257 topath = self.recode(topath)
257 seen.add(path or topath)
258 seen.add(path or topath)
258
259
259 if topath is None:
260 if topath is None:
260 # file deleted
261 # file deleted
261 changes.append((path, revid))
262 changes.append((path, revid))
262 continue
263 continue
263
264
264 # renamed
265 # renamed
265 if path and path != topath:
266 if path and path != topath:
266 renames[topath] = path
267 renames[topath] = path
267 changes.append((path, revid))
268 changes.append((path, revid))
268
269
269 # populate the mode cache
270 # populate the mode cache
270 kind, executable = [e[1] for e in (kind, executable)]
271 kind, executable = [e[1] for e in (kind, executable)]
271 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
272 mode = ((executable and 'x') or (kind == 'symlink' and 'l')
272 or '')
273 or '')
273 self._modecache[(topath, revid)] = mode
274 self._modecache[(topath, revid)] = mode
274 changes.append((topath, revid))
275 changes.append((topath, revid))
275
276
276 return changes, renames
277 return changes, renames
277
278
278 def _filterghosts(self, ids):
279 def _filterghosts(self, ids):
279 """Filters out ghost revisions which hg does not support, see
280 """Filters out ghost revisions which hg does not support, see
280 <http://bazaar-vcs.org/GhostRevision>
281 <http://bazaar-vcs.org/GhostRevision>
281 """
282 """
282 parentmap = self.sourcerepo.get_parent_map(ids)
283 parentmap = self.sourcerepo.get_parent_map(ids)
283 parents = tuple([parent for parent in ids if parent in parentmap])
284 parents = tuple([parent for parent in ids if parent in parentmap])
284 return parents
285 return parents
@@ -1,450 +1,452 b''
1 # common.py - common code for the convert extension
1 # common.py - common code for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import base64, errno, subprocess, os, datetime, re
8 import base64, errno, subprocess, os, datetime, re
9 import cPickle as pickle
9 import cPickle as pickle
10 from mercurial import util
10 from mercurial import util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 propertycache = util.propertycache
13 propertycache = util.propertycache
14
14
15 def encodeargs(args):
15 def encodeargs(args):
16 def encodearg(s):
16 def encodearg(s):
17 lines = base64.encodestring(s)
17 lines = base64.encodestring(s)
18 lines = [l.splitlines()[0] for l in lines]
18 lines = [l.splitlines()[0] for l in lines]
19 return ''.join(lines)
19 return ''.join(lines)
20
20
21 s = pickle.dumps(args)
21 s = pickle.dumps(args)
22 return encodearg(s)
22 return encodearg(s)
23
23
24 def decodeargs(s):
24 def decodeargs(s):
25 s = base64.decodestring(s)
25 s = base64.decodestring(s)
26 return pickle.loads(s)
26 return pickle.loads(s)
27
27
28 class MissingTool(Exception):
28 class MissingTool(Exception):
29 pass
29 pass
30
30
31 def checktool(exe, name=None, abort=True):
31 def checktool(exe, name=None, abort=True):
32 name = name or exe
32 name = name or exe
33 if not util.findexe(exe):
33 if not util.findexe(exe):
34 exc = abort and util.Abort or MissingTool
34 exc = abort and util.Abort or MissingTool
35 raise exc(_('cannot find required "%s" tool') % name)
35 raise exc(_('cannot find required "%s" tool') % name)
36
36
37 class NoRepo(Exception):
37 class NoRepo(Exception):
38 pass
38 pass
39
39
40 SKIPREV = 'SKIP'
40 SKIPREV = 'SKIP'
41
41
42 class commit(object):
42 class commit(object):
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
43 def __init__(self, author, date, desc, parents, branch=None, rev=None,
44 extra={}, sortkey=None):
44 extra={}, sortkey=None):
45 self.author = author or 'unknown'
45 self.author = author or 'unknown'
46 self.date = date or '0 0'
46 self.date = date or '0 0'
47 self.desc = desc
47 self.desc = desc
48 self.parents = parents
48 self.parents = parents
49 self.branch = branch
49 self.branch = branch
50 self.rev = rev
50 self.rev = rev
51 self.extra = extra
51 self.extra = extra
52 self.sortkey = sortkey
52 self.sortkey = sortkey
53
53
54 class converter_source(object):
54 class converter_source(object):
55 """Conversion source interface"""
55 """Conversion source interface"""
56
56
57 def __init__(self, ui, path=None, rev=None):
57 def __init__(self, ui, path=None, rev=None):
58 """Initialize conversion source (or raise NoRepo("message")
58 """Initialize conversion source (or raise NoRepo("message")
59 exception if path is not a valid repository)"""
59 exception if path is not a valid repository)"""
60 self.ui = ui
60 self.ui = ui
61 self.path = path
61 self.path = path
62 self.rev = rev
62 self.rev = rev
63
63
64 self.encoding = 'utf-8'
64 self.encoding = 'utf-8'
65
65
66 def checkhexformat(self, revstr, mapname='splicemap'):
66 def checkhexformat(self, revstr, mapname='splicemap'):
67 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
67 """ fails if revstr is not a 40 byte hex. mercurial and git both uses
68 such format for their revision numbering
68 such format for their revision numbering
69 """
69 """
70 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
70 if not re.match(r'[0-9a-fA-F]{40,40}$', revstr):
71 raise util.Abort(_('%s entry %s is not a valid revision'
71 raise util.Abort(_('%s entry %s is not a valid revision'
72 ' identifier') % (mapname, revstr))
72 ' identifier') % (mapname, revstr))
73
73
74 def before(self):
74 def before(self):
75 pass
75 pass
76
76
77 def after(self):
77 def after(self):
78 pass
78 pass
79
79
80 def setrevmap(self, revmap):
80 def setrevmap(self, revmap):
81 """set the map of already-converted revisions"""
81 """set the map of already-converted revisions"""
82 pass
82 pass
83
83
84 def getheads(self):
84 def getheads(self):
85 """Return a list of this repository's heads"""
85 """Return a list of this repository's heads"""
86 raise NotImplementedError
86 raise NotImplementedError
87
87
88 def getfile(self, name, rev):
88 def getfile(self, name, rev):
89 """Return a pair (data, mode) where data is the file content
89 """Return a pair (data, mode) where data is the file content
90 as a string and mode one of '', 'x' or 'l'. rev is the
90 as a string and mode one of '', 'x' or 'l'. rev is the
91 identifier returned by a previous call to getchanges().
91 identifier returned by a previous call to getchanges().
92 Data is None if file is missing/deleted in rev.
92 Data is None if file is missing/deleted in rev.
93 """
93 """
94 raise NotImplementedError
94 raise NotImplementedError
95
95
96 def getchanges(self, version):
96 def getchanges(self, version, full):
97 """Returns a tuple of (files, copies).
97 """Returns a tuple of (files, copies).
98
98
99 files is a sorted list of (filename, id) tuples for all files
99 files is a sorted list of (filename, id) tuples for all files
100 changed between version and its first parent returned by
100 changed between version and its first parent returned by
101 getcommit(). id is the source revision id of the file.
101 getcommit(). If full, all files in that revision is returned.
102 id is the source revision id of the file.
102
103
103 copies is a dictionary of dest: source
104 copies is a dictionary of dest: source
104 """
105 """
105 raise NotImplementedError
106 raise NotImplementedError
106
107
107 def getcommit(self, version):
108 def getcommit(self, version):
108 """Return the commit object for version"""
109 """Return the commit object for version"""
109 raise NotImplementedError
110 raise NotImplementedError
110
111
111 def gettags(self):
112 def gettags(self):
112 """Return the tags as a dictionary of name: revision
113 """Return the tags as a dictionary of name: revision
113
114
114 Tag names must be UTF-8 strings.
115 Tag names must be UTF-8 strings.
115 """
116 """
116 raise NotImplementedError
117 raise NotImplementedError
117
118
118 def recode(self, s, encoding=None):
119 def recode(self, s, encoding=None):
119 if not encoding:
120 if not encoding:
120 encoding = self.encoding or 'utf-8'
121 encoding = self.encoding or 'utf-8'
121
122
122 if isinstance(s, unicode):
123 if isinstance(s, unicode):
123 return s.encode("utf-8")
124 return s.encode("utf-8")
124 try:
125 try:
125 return s.decode(encoding).encode("utf-8")
126 return s.decode(encoding).encode("utf-8")
126 except UnicodeError:
127 except UnicodeError:
127 try:
128 try:
128 return s.decode("latin-1").encode("utf-8")
129 return s.decode("latin-1").encode("utf-8")
129 except UnicodeError:
130 except UnicodeError:
130 return s.decode(encoding, "replace").encode("utf-8")
131 return s.decode(encoding, "replace").encode("utf-8")
131
132
132 def getchangedfiles(self, rev, i):
133 def getchangedfiles(self, rev, i):
133 """Return the files changed by rev compared to parent[i].
134 """Return the files changed by rev compared to parent[i].
134
135
135 i is an index selecting one of the parents of rev. The return
136 i is an index selecting one of the parents of rev. The return
136 value should be the list of files that are different in rev and
137 value should be the list of files that are different in rev and
137 this parent.
138 this parent.
138
139
139 If rev has no parents, i is None.
140 If rev has no parents, i is None.
140
141
141 This function is only needed to support --filemap
142 This function is only needed to support --filemap
142 """
143 """
143 raise NotImplementedError
144 raise NotImplementedError
144
145
145 def converted(self, rev, sinkrev):
146 def converted(self, rev, sinkrev):
146 '''Notify the source that a revision has been converted.'''
147 '''Notify the source that a revision has been converted.'''
147 pass
148 pass
148
149
149 def hasnativeorder(self):
150 def hasnativeorder(self):
150 """Return true if this source has a meaningful, native revision
151 """Return true if this source has a meaningful, native revision
151 order. For instance, Mercurial revisions are store sequentially
152 order. For instance, Mercurial revisions are store sequentially
152 while there is no such global ordering with Darcs.
153 while there is no such global ordering with Darcs.
153 """
154 """
154 return False
155 return False
155
156
156 def hasnativeclose(self):
157 def hasnativeclose(self):
157 """Return true if this source has ability to close branch.
158 """Return true if this source has ability to close branch.
158 """
159 """
159 return False
160 return False
160
161
161 def lookuprev(self, rev):
162 def lookuprev(self, rev):
162 """If rev is a meaningful revision reference in source, return
163 """If rev is a meaningful revision reference in source, return
163 the referenced identifier in the same format used by getcommit().
164 the referenced identifier in the same format used by getcommit().
164 return None otherwise.
165 return None otherwise.
165 """
166 """
166 return None
167 return None
167
168
168 def getbookmarks(self):
169 def getbookmarks(self):
169 """Return the bookmarks as a dictionary of name: revision
170 """Return the bookmarks as a dictionary of name: revision
170
171
171 Bookmark names are to be UTF-8 strings.
172 Bookmark names are to be UTF-8 strings.
172 """
173 """
173 return {}
174 return {}
174
175
175 def checkrevformat(self, revstr, mapname='splicemap'):
176 def checkrevformat(self, revstr, mapname='splicemap'):
176 """revstr is a string that describes a revision in the given
177 """revstr is a string that describes a revision in the given
177 source control system. Return true if revstr has correct
178 source control system. Return true if revstr has correct
178 format.
179 format.
179 """
180 """
180 return True
181 return True
181
182
182 class converter_sink(object):
183 class converter_sink(object):
183 """Conversion sink (target) interface"""
184 """Conversion sink (target) interface"""
184
185
185 def __init__(self, ui, path):
186 def __init__(self, ui, path):
186 """Initialize conversion sink (or raise NoRepo("message")
187 """Initialize conversion sink (or raise NoRepo("message")
187 exception if path is not a valid repository)
188 exception if path is not a valid repository)
188
189
189 created is a list of paths to remove if a fatal error occurs
190 created is a list of paths to remove if a fatal error occurs
190 later"""
191 later"""
191 self.ui = ui
192 self.ui = ui
192 self.path = path
193 self.path = path
193 self.created = []
194 self.created = []
194
195
195 def revmapfile(self):
196 def revmapfile(self):
196 """Path to a file that will contain lines
197 """Path to a file that will contain lines
197 source_rev_id sink_rev_id
198 source_rev_id sink_rev_id
198 mapping equivalent revision identifiers for each system."""
199 mapping equivalent revision identifiers for each system."""
199 raise NotImplementedError
200 raise NotImplementedError
200
201
201 def authorfile(self):
202 def authorfile(self):
202 """Path to a file that will contain lines
203 """Path to a file that will contain lines
203 srcauthor=dstauthor
204 srcauthor=dstauthor
204 mapping equivalent authors identifiers for each system."""
205 mapping equivalent authors identifiers for each system."""
205 return None
206 return None
206
207
207 def putcommit(self, files, copies, parents, commit, source, revmap):
208 def putcommit(self, files, copies, parents, commit, source, revmap, full):
208 """Create a revision with all changed files listed in 'files'
209 """Create a revision with all changed files listed in 'files'
209 and having listed parents. 'commit' is a commit object
210 and having listed parents. 'commit' is a commit object
210 containing at a minimum the author, date, and message for this
211 containing at a minimum the author, date, and message for this
211 changeset. 'files' is a list of (path, version) tuples,
212 changeset. 'files' is a list of (path, version) tuples,
212 'copies' is a dictionary mapping destinations to sources,
213 'copies' is a dictionary mapping destinations to sources,
213 'source' is the source repository, and 'revmap' is a mapfile
214 'source' is the source repository, and 'revmap' is a mapfile
214 of source revisions to converted revisions. Only getfile() and
215 of source revisions to converted revisions. Only getfile() and
215 lookuprev() should be called on 'source'.
216 lookuprev() should be called on 'source'. 'full' means that 'files'
217 is complete and all other files should be removed.
216
218
217 Note that the sink repository is not told to update itself to
219 Note that the sink repository is not told to update itself to
218 a particular revision (or even what that revision would be)
220 a particular revision (or even what that revision would be)
219 before it receives the file data.
221 before it receives the file data.
220 """
222 """
221 raise NotImplementedError
223 raise NotImplementedError
222
224
223 def puttags(self, tags):
225 def puttags(self, tags):
224 """Put tags into sink.
226 """Put tags into sink.
225
227
226 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
228 tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
227 Return a pair (tag_revision, tag_parent_revision), or (None, None)
229 Return a pair (tag_revision, tag_parent_revision), or (None, None)
228 if nothing was changed.
230 if nothing was changed.
229 """
231 """
230 raise NotImplementedError
232 raise NotImplementedError
231
233
232 def setbranch(self, branch, pbranches):
234 def setbranch(self, branch, pbranches):
233 """Set the current branch name. Called before the first putcommit
235 """Set the current branch name. Called before the first putcommit
234 on the branch.
236 on the branch.
235 branch: branch name for subsequent commits
237 branch: branch name for subsequent commits
236 pbranches: (converted parent revision, parent branch) tuples"""
238 pbranches: (converted parent revision, parent branch) tuples"""
237 pass
239 pass
238
240
239 def setfilemapmode(self, active):
241 def setfilemapmode(self, active):
240 """Tell the destination that we're using a filemap
242 """Tell the destination that we're using a filemap
241
243
242 Some converter_sources (svn in particular) can claim that a file
244 Some converter_sources (svn in particular) can claim that a file
243 was changed in a revision, even if there was no change. This method
245 was changed in a revision, even if there was no change. This method
244 tells the destination that we're using a filemap and that it should
246 tells the destination that we're using a filemap and that it should
245 filter empty revisions.
247 filter empty revisions.
246 """
248 """
247 pass
249 pass
248
250
249 def before(self):
251 def before(self):
250 pass
252 pass
251
253
252 def after(self):
254 def after(self):
253 pass
255 pass
254
256
255 def putbookmarks(self, bookmarks):
257 def putbookmarks(self, bookmarks):
256 """Put bookmarks into sink.
258 """Put bookmarks into sink.
257
259
258 bookmarks: {bookmarkname: sink_rev_id, ...}
260 bookmarks: {bookmarkname: sink_rev_id, ...}
259 where bookmarkname is an UTF-8 string.
261 where bookmarkname is an UTF-8 string.
260 """
262 """
261 pass
263 pass
262
264
263 def hascommitfrommap(self, rev):
265 def hascommitfrommap(self, rev):
264 """Return False if a rev mentioned in a filemap is known to not be
266 """Return False if a rev mentioned in a filemap is known to not be
265 present."""
267 present."""
266 raise NotImplementedError
268 raise NotImplementedError
267
269
268 def hascommitforsplicemap(self, rev):
270 def hascommitforsplicemap(self, rev):
269 """This method is for the special needs for splicemap handling and not
271 """This method is for the special needs for splicemap handling and not
270 for general use. Returns True if the sink contains rev, aborts on some
272 for general use. Returns True if the sink contains rev, aborts on some
271 special cases."""
273 special cases."""
272 raise NotImplementedError
274 raise NotImplementedError
273
275
274 class commandline(object):
276 class commandline(object):
275 def __init__(self, ui, command):
277 def __init__(self, ui, command):
276 self.ui = ui
278 self.ui = ui
277 self.command = command
279 self.command = command
278
280
279 def prerun(self):
281 def prerun(self):
280 pass
282 pass
281
283
282 def postrun(self):
284 def postrun(self):
283 pass
285 pass
284
286
285 def _cmdline(self, cmd, *args, **kwargs):
287 def _cmdline(self, cmd, *args, **kwargs):
286 cmdline = [self.command, cmd] + list(args)
288 cmdline = [self.command, cmd] + list(args)
287 for k, v in kwargs.iteritems():
289 for k, v in kwargs.iteritems():
288 if len(k) == 1:
290 if len(k) == 1:
289 cmdline.append('-' + k)
291 cmdline.append('-' + k)
290 else:
292 else:
291 cmdline.append('--' + k.replace('_', '-'))
293 cmdline.append('--' + k.replace('_', '-'))
292 try:
294 try:
293 if len(k) == 1:
295 if len(k) == 1:
294 cmdline.append('' + v)
296 cmdline.append('' + v)
295 else:
297 else:
296 cmdline[-1] += '=' + v
298 cmdline[-1] += '=' + v
297 except TypeError:
299 except TypeError:
298 pass
300 pass
299 cmdline = [util.shellquote(arg) for arg in cmdline]
301 cmdline = [util.shellquote(arg) for arg in cmdline]
300 if not self.ui.debugflag:
302 if not self.ui.debugflag:
301 cmdline += ['2>', os.devnull]
303 cmdline += ['2>', os.devnull]
302 cmdline = ' '.join(cmdline)
304 cmdline = ' '.join(cmdline)
303 return cmdline
305 return cmdline
304
306
305 def _run(self, cmd, *args, **kwargs):
307 def _run(self, cmd, *args, **kwargs):
306 def popen(cmdline):
308 def popen(cmdline):
307 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
309 p = subprocess.Popen(cmdline, shell=True, bufsize=-1,
308 close_fds=util.closefds,
310 close_fds=util.closefds,
309 stdout=subprocess.PIPE)
311 stdout=subprocess.PIPE)
310 return p
312 return p
311 return self._dorun(popen, cmd, *args, **kwargs)
313 return self._dorun(popen, cmd, *args, **kwargs)
312
314
313 def _run2(self, cmd, *args, **kwargs):
315 def _run2(self, cmd, *args, **kwargs):
314 return self._dorun(util.popen2, cmd, *args, **kwargs)
316 return self._dorun(util.popen2, cmd, *args, **kwargs)
315
317
316 def _dorun(self, openfunc, cmd, *args, **kwargs):
318 def _dorun(self, openfunc, cmd, *args, **kwargs):
317 cmdline = self._cmdline(cmd, *args, **kwargs)
319 cmdline = self._cmdline(cmd, *args, **kwargs)
318 self.ui.debug('running: %s\n' % (cmdline,))
320 self.ui.debug('running: %s\n' % (cmdline,))
319 self.prerun()
321 self.prerun()
320 try:
322 try:
321 return openfunc(cmdline)
323 return openfunc(cmdline)
322 finally:
324 finally:
323 self.postrun()
325 self.postrun()
324
326
325 def run(self, cmd, *args, **kwargs):
327 def run(self, cmd, *args, **kwargs):
326 p = self._run(cmd, *args, **kwargs)
328 p = self._run(cmd, *args, **kwargs)
327 output = p.communicate()[0]
329 output = p.communicate()[0]
328 self.ui.debug(output)
330 self.ui.debug(output)
329 return output, p.returncode
331 return output, p.returncode
330
332
331 def runlines(self, cmd, *args, **kwargs):
333 def runlines(self, cmd, *args, **kwargs):
332 p = self._run(cmd, *args, **kwargs)
334 p = self._run(cmd, *args, **kwargs)
333 output = p.stdout.readlines()
335 output = p.stdout.readlines()
334 p.wait()
336 p.wait()
335 self.ui.debug(''.join(output))
337 self.ui.debug(''.join(output))
336 return output, p.returncode
338 return output, p.returncode
337
339
338 def checkexit(self, status, output=''):
340 def checkexit(self, status, output=''):
339 if status:
341 if status:
340 if output:
342 if output:
341 self.ui.warn(_('%s error:\n') % self.command)
343 self.ui.warn(_('%s error:\n') % self.command)
342 self.ui.warn(output)
344 self.ui.warn(output)
343 msg = util.explainexit(status)[0]
345 msg = util.explainexit(status)[0]
344 raise util.Abort('%s %s' % (self.command, msg))
346 raise util.Abort('%s %s' % (self.command, msg))
345
347
346 def run0(self, cmd, *args, **kwargs):
348 def run0(self, cmd, *args, **kwargs):
347 output, status = self.run(cmd, *args, **kwargs)
349 output, status = self.run(cmd, *args, **kwargs)
348 self.checkexit(status, output)
350 self.checkexit(status, output)
349 return output
351 return output
350
352
351 def runlines0(self, cmd, *args, **kwargs):
353 def runlines0(self, cmd, *args, **kwargs):
352 output, status = self.runlines(cmd, *args, **kwargs)
354 output, status = self.runlines(cmd, *args, **kwargs)
353 self.checkexit(status, ''.join(output))
355 self.checkexit(status, ''.join(output))
354 return output
356 return output
355
357
356 @propertycache
358 @propertycache
357 def argmax(self):
359 def argmax(self):
358 # POSIX requires at least 4096 bytes for ARG_MAX
360 # POSIX requires at least 4096 bytes for ARG_MAX
359 argmax = 4096
361 argmax = 4096
360 try:
362 try:
361 argmax = os.sysconf("SC_ARG_MAX")
363 argmax = os.sysconf("SC_ARG_MAX")
362 except (AttributeError, ValueError):
364 except (AttributeError, ValueError):
363 pass
365 pass
364
366
365 # Windows shells impose their own limits on command line length,
367 # Windows shells impose their own limits on command line length,
366 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
368 # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
367 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
369 # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
368 # details about cmd.exe limitations.
370 # details about cmd.exe limitations.
369
371
370 # Since ARG_MAX is for command line _and_ environment, lower our limit
372 # Since ARG_MAX is for command line _and_ environment, lower our limit
371 # (and make happy Windows shells while doing this).
373 # (and make happy Windows shells while doing this).
372 return argmax // 2 - 1
374 return argmax // 2 - 1
373
375
374 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
376 def _limit_arglist(self, arglist, cmd, *args, **kwargs):
375 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
377 cmdlen = len(self._cmdline(cmd, *args, **kwargs))
376 limit = self.argmax - cmdlen
378 limit = self.argmax - cmdlen
377 bytes = 0
379 bytes = 0
378 fl = []
380 fl = []
379 for fn in arglist:
381 for fn in arglist:
380 b = len(fn) + 3
382 b = len(fn) + 3
381 if bytes + b < limit or len(fl) == 0:
383 if bytes + b < limit or len(fl) == 0:
382 fl.append(fn)
384 fl.append(fn)
383 bytes += b
385 bytes += b
384 else:
386 else:
385 yield fl
387 yield fl
386 fl = [fn]
388 fl = [fn]
387 bytes = b
389 bytes = b
388 if fl:
390 if fl:
389 yield fl
391 yield fl
390
392
391 def xargs(self, arglist, cmd, *args, **kwargs):
393 def xargs(self, arglist, cmd, *args, **kwargs):
392 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
394 for l in self._limit_arglist(arglist, cmd, *args, **kwargs):
393 self.run0(cmd, *(list(args) + l), **kwargs)
395 self.run0(cmd, *(list(args) + l), **kwargs)
394
396
395 class mapfile(dict):
397 class mapfile(dict):
396 def __init__(self, ui, path):
398 def __init__(self, ui, path):
397 super(mapfile, self).__init__()
399 super(mapfile, self).__init__()
398 self.ui = ui
400 self.ui = ui
399 self.path = path
401 self.path = path
400 self.fp = None
402 self.fp = None
401 self.order = []
403 self.order = []
402 self._read()
404 self._read()
403
405
404 def _read(self):
406 def _read(self):
405 if not self.path:
407 if not self.path:
406 return
408 return
407 try:
409 try:
408 fp = open(self.path, 'r')
410 fp = open(self.path, 'r')
409 except IOError, err:
411 except IOError, err:
410 if err.errno != errno.ENOENT:
412 if err.errno != errno.ENOENT:
411 raise
413 raise
412 return
414 return
413 for i, line in enumerate(fp):
415 for i, line in enumerate(fp):
414 line = line.splitlines()[0].rstrip()
416 line = line.splitlines()[0].rstrip()
415 if not line:
417 if not line:
416 # Ignore blank lines
418 # Ignore blank lines
417 continue
419 continue
418 try:
420 try:
419 key, value = line.rsplit(' ', 1)
421 key, value = line.rsplit(' ', 1)
420 except ValueError:
422 except ValueError:
421 raise util.Abort(
423 raise util.Abort(
422 _('syntax error in %s(%d): key/value pair expected')
424 _('syntax error in %s(%d): key/value pair expected')
423 % (self.path, i + 1))
425 % (self.path, i + 1))
424 if key not in self:
426 if key not in self:
425 self.order.append(key)
427 self.order.append(key)
426 super(mapfile, self).__setitem__(key, value)
428 super(mapfile, self).__setitem__(key, value)
427 fp.close()
429 fp.close()
428
430
429 def __setitem__(self, key, value):
431 def __setitem__(self, key, value):
430 if self.fp is None:
432 if self.fp is None:
431 try:
433 try:
432 self.fp = open(self.path, 'a')
434 self.fp = open(self.path, 'a')
433 except IOError, err:
435 except IOError, err:
434 raise util.Abort(_('could not open map file %r: %s') %
436 raise util.Abort(_('could not open map file %r: %s') %
435 (self.path, err.strerror))
437 (self.path, err.strerror))
436 self.fp.write('%s %s\n' % (key, value))
438 self.fp.write('%s %s\n' % (key, value))
437 self.fp.flush()
439 self.fp.flush()
438 super(mapfile, self).__setitem__(key, value)
440 super(mapfile, self).__setitem__(key, value)
439
441
440 def close(self):
442 def close(self):
441 if self.fp:
443 if self.fp:
442 self.fp.close()
444 self.fp.close()
443 self.fp = None
445 self.fp = None
444
446
445 def makedatetimestamp(t):
447 def makedatetimestamp(t):
446 """Like util.makedate() but for time t instead of current time"""
448 """Like util.makedate() but for time t instead of current time"""
447 delta = (datetime.datetime.utcfromtimestamp(t) -
449 delta = (datetime.datetime.utcfromtimestamp(t) -
448 datetime.datetime.fromtimestamp(t))
450 datetime.datetime.fromtimestamp(t))
449 tz = delta.days * 86400 + delta.seconds
451 tz = delta.days * 86400 + delta.seconds
450 return t, tz
452 return t, tz
@@ -1,532 +1,532 b''
1 # convcmd - convert extension commands definition
1 # convcmd - convert extension commands definition
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
8 from common import NoRepo, MissingTool, SKIPREV, mapfile
9 from cvs import convert_cvs
9 from cvs import convert_cvs
10 from darcs import darcs_source
10 from darcs import darcs_source
11 from git import convert_git
11 from git import convert_git
12 from hg import mercurial_source, mercurial_sink
12 from hg import mercurial_source, mercurial_sink
13 from subversion import svn_source, svn_sink
13 from subversion import svn_source, svn_sink
14 from monotone import monotone_source
14 from monotone import monotone_source
15 from gnuarch import gnuarch_source
15 from gnuarch import gnuarch_source
16 from bzr import bzr_source
16 from bzr import bzr_source
17 from p4 import p4_source
17 from p4 import p4_source
18 import filemap
18 import filemap
19
19
20 import os, shutil, shlex
20 import os, shutil, shlex
21 from mercurial import hg, util, encoding
21 from mercurial import hg, util, encoding
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23
23
24 orig_encoding = 'ascii'
24 orig_encoding = 'ascii'
25
25
26 def recode(s):
26 def recode(s):
27 if isinstance(s, unicode):
27 if isinstance(s, unicode):
28 return s.encode(orig_encoding, 'replace')
28 return s.encode(orig_encoding, 'replace')
29 else:
29 else:
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
30 return s.decode('utf-8').encode(orig_encoding, 'replace')
31
31
32 source_converters = [
32 source_converters = [
33 ('cvs', convert_cvs, 'branchsort'),
33 ('cvs', convert_cvs, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
34 ('git', convert_git, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
35 ('svn', svn_source, 'branchsort'),
36 ('hg', mercurial_source, 'sourcesort'),
36 ('hg', mercurial_source, 'sourcesort'),
37 ('darcs', darcs_source, 'branchsort'),
37 ('darcs', darcs_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
38 ('mtn', monotone_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
39 ('gnuarch', gnuarch_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
40 ('bzr', bzr_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
41 ('p4', p4_source, 'branchsort'),
42 ]
42 ]
43
43
44 sink_converters = [
44 sink_converters = [
45 ('hg', mercurial_sink),
45 ('hg', mercurial_sink),
46 ('svn', svn_sink),
46 ('svn', svn_sink),
47 ]
47 ]
48
48
49 def convertsource(ui, path, type, rev):
49 def convertsource(ui, path, type, rev):
50 exceptions = []
50 exceptions = []
51 if type and type not in [s[0] for s in source_converters]:
51 if type and type not in [s[0] for s in source_converters]:
52 raise util.Abort(_('%s: invalid source repository type') % type)
52 raise util.Abort(_('%s: invalid source repository type') % type)
53 for name, source, sortmode in source_converters:
53 for name, source, sortmode in source_converters:
54 try:
54 try:
55 if not type or name == type:
55 if not type or name == type:
56 return source(ui, path, rev), sortmode
56 return source(ui, path, rev), sortmode
57 except (NoRepo, MissingTool), inst:
57 except (NoRepo, MissingTool), inst:
58 exceptions.append(inst)
58 exceptions.append(inst)
59 if not ui.quiet:
59 if not ui.quiet:
60 for inst in exceptions:
60 for inst in exceptions:
61 ui.write("%s\n" % inst)
61 ui.write("%s\n" % inst)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
62 raise util.Abort(_('%s: missing or unsupported repository') % path)
63
63
64 def convertsink(ui, path, type):
64 def convertsink(ui, path, type):
65 if type and type not in [s[0] for s in sink_converters]:
65 if type and type not in [s[0] for s in sink_converters]:
66 raise util.Abort(_('%s: invalid destination repository type') % type)
66 raise util.Abort(_('%s: invalid destination repository type') % type)
67 for name, sink in sink_converters:
67 for name, sink in sink_converters:
68 try:
68 try:
69 if not type or name == type:
69 if not type or name == type:
70 return sink(ui, path)
70 return sink(ui, path)
71 except NoRepo, inst:
71 except NoRepo, inst:
72 ui.note(_("convert: %s\n") % inst)
72 ui.note(_("convert: %s\n") % inst)
73 except MissingTool, inst:
73 except MissingTool, inst:
74 raise util.Abort('%s\n' % inst)
74 raise util.Abort('%s\n' % inst)
75 raise util.Abort(_('%s: unknown repository type') % path)
75 raise util.Abort(_('%s: unknown repository type') % path)
76
76
77 class progresssource(object):
77 class progresssource(object):
78 def __init__(self, ui, source, filecount):
78 def __init__(self, ui, source, filecount):
79 self.ui = ui
79 self.ui = ui
80 self.source = source
80 self.source = source
81 self.filecount = filecount
81 self.filecount = filecount
82 self.retrieved = 0
82 self.retrieved = 0
83
83
84 def getfile(self, file, rev):
84 def getfile(self, file, rev):
85 self.retrieved += 1
85 self.retrieved += 1
86 self.ui.progress(_('getting files'), self.retrieved,
86 self.ui.progress(_('getting files'), self.retrieved,
87 item=file, total=self.filecount)
87 item=file, total=self.filecount)
88 return self.source.getfile(file, rev)
88 return self.source.getfile(file, rev)
89
89
90 def lookuprev(self, rev):
90 def lookuprev(self, rev):
91 return self.source.lookuprev(rev)
91 return self.source.lookuprev(rev)
92
92
93 def close(self):
93 def close(self):
94 self.ui.progress(_('getting files'), None)
94 self.ui.progress(_('getting files'), None)
95
95
96 class converter(object):
96 class converter(object):
97 def __init__(self, ui, source, dest, revmapfile, opts):
97 def __init__(self, ui, source, dest, revmapfile, opts):
98
98
99 self.source = source
99 self.source = source
100 self.dest = dest
100 self.dest = dest
101 self.ui = ui
101 self.ui = ui
102 self.opts = opts
102 self.opts = opts
103 self.commitcache = {}
103 self.commitcache = {}
104 self.authors = {}
104 self.authors = {}
105 self.authorfile = None
105 self.authorfile = None
106
106
107 # Record converted revisions persistently: maps source revision
107 # Record converted revisions persistently: maps source revision
108 # ID to target revision ID (both strings). (This is how
108 # ID to target revision ID (both strings). (This is how
109 # incremental conversions work.)
109 # incremental conversions work.)
110 self.map = mapfile(ui, revmapfile)
110 self.map = mapfile(ui, revmapfile)
111
111
112 # Read first the dst author map if any
112 # Read first the dst author map if any
113 authorfile = self.dest.authorfile()
113 authorfile = self.dest.authorfile()
114 if authorfile and os.path.exists(authorfile):
114 if authorfile and os.path.exists(authorfile):
115 self.readauthormap(authorfile)
115 self.readauthormap(authorfile)
116 # Extend/Override with new author map if necessary
116 # Extend/Override with new author map if necessary
117 if opts.get('authormap'):
117 if opts.get('authormap'):
118 self.readauthormap(opts.get('authormap'))
118 self.readauthormap(opts.get('authormap'))
119 self.authorfile = self.dest.authorfile()
119 self.authorfile = self.dest.authorfile()
120
120
121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
121 self.splicemap = self.parsesplicemap(opts.get('splicemap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
122 self.branchmap = mapfile(ui, opts.get('branchmap'))
123
123
124 def parsesplicemap(self, path):
124 def parsesplicemap(self, path):
125 """ check and validate the splicemap format and
125 """ check and validate the splicemap format and
126 return a child/parents dictionary.
126 return a child/parents dictionary.
127 Format checking has two parts.
127 Format checking has two parts.
128 1. generic format which is same across all source types
128 1. generic format which is same across all source types
129 2. specific format checking which may be different for
129 2. specific format checking which may be different for
130 different source type. This logic is implemented in
130 different source type. This logic is implemented in
131 checkrevformat function in source files like
131 checkrevformat function in source files like
132 hg.py, subversion.py etc.
132 hg.py, subversion.py etc.
133 """
133 """
134
134
135 if not path:
135 if not path:
136 return {}
136 return {}
137 m = {}
137 m = {}
138 try:
138 try:
139 fp = open(path, 'r')
139 fp = open(path, 'r')
140 for i, line in enumerate(fp):
140 for i, line in enumerate(fp):
141 line = line.splitlines()[0].rstrip()
141 line = line.splitlines()[0].rstrip()
142 if not line:
142 if not line:
143 # Ignore blank lines
143 # Ignore blank lines
144 continue
144 continue
145 # split line
145 # split line
146 lex = shlex.shlex(line, posix=True)
146 lex = shlex.shlex(line, posix=True)
147 lex.whitespace_split = True
147 lex.whitespace_split = True
148 lex.whitespace += ','
148 lex.whitespace += ','
149 line = list(lex)
149 line = list(lex)
150 # check number of parents
150 # check number of parents
151 if not (2 <= len(line) <= 3):
151 if not (2 <= len(line) <= 3):
152 raise util.Abort(_('syntax error in %s(%d): child parent1'
152 raise util.Abort(_('syntax error in %s(%d): child parent1'
153 '[,parent2] expected') % (path, i + 1))
153 '[,parent2] expected') % (path, i + 1))
154 for part in line:
154 for part in line:
155 self.source.checkrevformat(part)
155 self.source.checkrevformat(part)
156 child, p1, p2 = line[0], line[1:2], line[2:]
156 child, p1, p2 = line[0], line[1:2], line[2:]
157 if p1 == p2:
157 if p1 == p2:
158 m[child] = p1
158 m[child] = p1
159 else:
159 else:
160 m[child] = p1 + p2
160 m[child] = p1 + p2
161 # if file does not exist or error reading, exit
161 # if file does not exist or error reading, exit
162 except IOError:
162 except IOError:
163 raise util.Abort(_('splicemap file not found or error reading %s:')
163 raise util.Abort(_('splicemap file not found or error reading %s:')
164 % path)
164 % path)
165 return m
165 return m
166
166
167
167
168 def walktree(self, heads):
168 def walktree(self, heads):
169 '''Return a mapping that identifies the uncommitted parents of every
169 '''Return a mapping that identifies the uncommitted parents of every
170 uncommitted changeset.'''
170 uncommitted changeset.'''
171 visit = heads
171 visit = heads
172 known = set()
172 known = set()
173 parents = {}
173 parents = {}
174 while visit:
174 while visit:
175 n = visit.pop(0)
175 n = visit.pop(0)
176 if n in known:
176 if n in known:
177 continue
177 continue
178 if n in self.map:
178 if n in self.map:
179 m = self.map[n]
179 m = self.map[n]
180 if m == SKIPREV or self.dest.hascommitfrommap(m):
180 if m == SKIPREV or self.dest.hascommitfrommap(m):
181 continue
181 continue
182 known.add(n)
182 known.add(n)
183 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
183 self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
184 commit = self.cachecommit(n)
184 commit = self.cachecommit(n)
185 parents[n] = []
185 parents[n] = []
186 for p in commit.parents:
186 for p in commit.parents:
187 parents[n].append(p)
187 parents[n].append(p)
188 visit.append(p)
188 visit.append(p)
189 self.ui.progress(_('scanning'), None)
189 self.ui.progress(_('scanning'), None)
190
190
191 return parents
191 return parents
192
192
193 def mergesplicemap(self, parents, splicemap):
193 def mergesplicemap(self, parents, splicemap):
194 """A splicemap redefines child/parent relationships. Check the
194 """A splicemap redefines child/parent relationships. Check the
195 map contains valid revision identifiers and merge the new
195 map contains valid revision identifiers and merge the new
196 links in the source graph.
196 links in the source graph.
197 """
197 """
198 for c in sorted(splicemap):
198 for c in sorted(splicemap):
199 if c not in parents:
199 if c not in parents:
200 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
200 if not self.dest.hascommitforsplicemap(self.map.get(c, c)):
201 # Could be in source but not converted during this run
201 # Could be in source but not converted during this run
202 self.ui.warn(_('splice map revision %s is not being '
202 self.ui.warn(_('splice map revision %s is not being '
203 'converted, ignoring\n') % c)
203 'converted, ignoring\n') % c)
204 continue
204 continue
205 pc = []
205 pc = []
206 for p in splicemap[c]:
206 for p in splicemap[c]:
207 # We do not have to wait for nodes already in dest.
207 # We do not have to wait for nodes already in dest.
208 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
208 if self.dest.hascommitforsplicemap(self.map.get(p, p)):
209 continue
209 continue
210 # Parent is not in dest and not being converted, not good
210 # Parent is not in dest and not being converted, not good
211 if p not in parents:
211 if p not in parents:
212 raise util.Abort(_('unknown splice map parent: %s') % p)
212 raise util.Abort(_('unknown splice map parent: %s') % p)
213 pc.append(p)
213 pc.append(p)
214 parents[c] = pc
214 parents[c] = pc
215
215
216 def toposort(self, parents, sortmode):
216 def toposort(self, parents, sortmode):
217 '''Return an ordering such that every uncommitted changeset is
217 '''Return an ordering such that every uncommitted changeset is
218 preceded by all its uncommitted ancestors.'''
218 preceded by all its uncommitted ancestors.'''
219
219
220 def mapchildren(parents):
220 def mapchildren(parents):
221 """Return a (children, roots) tuple where 'children' maps parent
221 """Return a (children, roots) tuple where 'children' maps parent
222 revision identifiers to children ones, and 'roots' is the list of
222 revision identifiers to children ones, and 'roots' is the list of
223 revisions without parents. 'parents' must be a mapping of revision
223 revisions without parents. 'parents' must be a mapping of revision
224 identifier to its parents ones.
224 identifier to its parents ones.
225 """
225 """
226 visit = sorted(parents)
226 visit = sorted(parents)
227 seen = set()
227 seen = set()
228 children = {}
228 children = {}
229 roots = []
229 roots = []
230
230
231 while visit:
231 while visit:
232 n = visit.pop(0)
232 n = visit.pop(0)
233 if n in seen:
233 if n in seen:
234 continue
234 continue
235 seen.add(n)
235 seen.add(n)
236 # Ensure that nodes without parents are present in the
236 # Ensure that nodes without parents are present in the
237 # 'children' mapping.
237 # 'children' mapping.
238 children.setdefault(n, [])
238 children.setdefault(n, [])
239 hasparent = False
239 hasparent = False
240 for p in parents[n]:
240 for p in parents[n]:
241 if p not in self.map:
241 if p not in self.map:
242 visit.append(p)
242 visit.append(p)
243 hasparent = True
243 hasparent = True
244 children.setdefault(p, []).append(n)
244 children.setdefault(p, []).append(n)
245 if not hasparent:
245 if not hasparent:
246 roots.append(n)
246 roots.append(n)
247
247
248 return children, roots
248 return children, roots
249
249
250 # Sort functions are supposed to take a list of revisions which
250 # Sort functions are supposed to take a list of revisions which
251 # can be converted immediately and pick one
251 # can be converted immediately and pick one
252
252
253 def makebranchsorter():
253 def makebranchsorter():
254 """If the previously converted revision has a child in the
254 """If the previously converted revision has a child in the
255 eligible revisions list, pick it. Return the list head
255 eligible revisions list, pick it. Return the list head
256 otherwise. Branch sort attempts to minimize branch
256 otherwise. Branch sort attempts to minimize branch
257 switching, which is harmful for Mercurial backend
257 switching, which is harmful for Mercurial backend
258 compression.
258 compression.
259 """
259 """
260 prev = [None]
260 prev = [None]
261 def picknext(nodes):
261 def picknext(nodes):
262 next = nodes[0]
262 next = nodes[0]
263 for n in nodes:
263 for n in nodes:
264 if prev[0] in parents[n]:
264 if prev[0] in parents[n]:
265 next = n
265 next = n
266 break
266 break
267 prev[0] = next
267 prev[0] = next
268 return next
268 return next
269 return picknext
269 return picknext
270
270
271 def makesourcesorter():
271 def makesourcesorter():
272 """Source specific sort."""
272 """Source specific sort."""
273 keyfn = lambda n: self.commitcache[n].sortkey
273 keyfn = lambda n: self.commitcache[n].sortkey
274 def picknext(nodes):
274 def picknext(nodes):
275 return sorted(nodes, key=keyfn)[0]
275 return sorted(nodes, key=keyfn)[0]
276 return picknext
276 return picknext
277
277
278 def makeclosesorter():
278 def makeclosesorter():
279 """Close order sort."""
279 """Close order sort."""
280 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
280 keyfn = lambda n: ('close' not in self.commitcache[n].extra,
281 self.commitcache[n].sortkey)
281 self.commitcache[n].sortkey)
282 def picknext(nodes):
282 def picknext(nodes):
283 return sorted(nodes, key=keyfn)[0]
283 return sorted(nodes, key=keyfn)[0]
284 return picknext
284 return picknext
285
285
286 def makedatesorter():
286 def makedatesorter():
287 """Sort revisions by date."""
287 """Sort revisions by date."""
288 dates = {}
288 dates = {}
289 def getdate(n):
289 def getdate(n):
290 if n not in dates:
290 if n not in dates:
291 dates[n] = util.parsedate(self.commitcache[n].date)
291 dates[n] = util.parsedate(self.commitcache[n].date)
292 return dates[n]
292 return dates[n]
293
293
294 def picknext(nodes):
294 def picknext(nodes):
295 return min([(getdate(n), n) for n in nodes])[1]
295 return min([(getdate(n), n) for n in nodes])[1]
296
296
297 return picknext
297 return picknext
298
298
299 if sortmode == 'branchsort':
299 if sortmode == 'branchsort':
300 picknext = makebranchsorter()
300 picknext = makebranchsorter()
301 elif sortmode == 'datesort':
301 elif sortmode == 'datesort':
302 picknext = makedatesorter()
302 picknext = makedatesorter()
303 elif sortmode == 'sourcesort':
303 elif sortmode == 'sourcesort':
304 picknext = makesourcesorter()
304 picknext = makesourcesorter()
305 elif sortmode == 'closesort':
305 elif sortmode == 'closesort':
306 picknext = makeclosesorter()
306 picknext = makeclosesorter()
307 else:
307 else:
308 raise util.Abort(_('unknown sort mode: %s') % sortmode)
308 raise util.Abort(_('unknown sort mode: %s') % sortmode)
309
309
310 children, actives = mapchildren(parents)
310 children, actives = mapchildren(parents)
311
311
312 s = []
312 s = []
313 pendings = {}
313 pendings = {}
314 while actives:
314 while actives:
315 n = picknext(actives)
315 n = picknext(actives)
316 actives.remove(n)
316 actives.remove(n)
317 s.append(n)
317 s.append(n)
318
318
319 # Update dependents list
319 # Update dependents list
320 for c in children.get(n, []):
320 for c in children.get(n, []):
321 if c not in pendings:
321 if c not in pendings:
322 pendings[c] = [p for p in parents[c] if p not in self.map]
322 pendings[c] = [p for p in parents[c] if p not in self.map]
323 try:
323 try:
324 pendings[c].remove(n)
324 pendings[c].remove(n)
325 except ValueError:
325 except ValueError:
326 raise util.Abort(_('cycle detected between %s and %s')
326 raise util.Abort(_('cycle detected between %s and %s')
327 % (recode(c), recode(n)))
327 % (recode(c), recode(n)))
328 if not pendings[c]:
328 if not pendings[c]:
329 # Parents are converted, node is eligible
329 # Parents are converted, node is eligible
330 actives.insert(0, c)
330 actives.insert(0, c)
331 pendings[c] = None
331 pendings[c] = None
332
332
333 if len(s) != len(parents):
333 if len(s) != len(parents):
334 raise util.Abort(_("not all revisions were sorted"))
334 raise util.Abort(_("not all revisions were sorted"))
335
335
336 return s
336 return s
337
337
338 def writeauthormap(self):
338 def writeauthormap(self):
339 authorfile = self.authorfile
339 authorfile = self.authorfile
340 if authorfile:
340 if authorfile:
341 self.ui.status(_('writing author map file %s\n') % authorfile)
341 self.ui.status(_('writing author map file %s\n') % authorfile)
342 ofile = open(authorfile, 'w+')
342 ofile = open(authorfile, 'w+')
343 for author in self.authors:
343 for author in self.authors:
344 ofile.write("%s=%s\n" % (author, self.authors[author]))
344 ofile.write("%s=%s\n" % (author, self.authors[author]))
345 ofile.close()
345 ofile.close()
346
346
347 def readauthormap(self, authorfile):
347 def readauthormap(self, authorfile):
348 afile = open(authorfile, 'r')
348 afile = open(authorfile, 'r')
349 for line in afile:
349 for line in afile:
350
350
351 line = line.strip()
351 line = line.strip()
352 if not line or line.startswith('#'):
352 if not line or line.startswith('#'):
353 continue
353 continue
354
354
355 try:
355 try:
356 srcauthor, dstauthor = line.split('=', 1)
356 srcauthor, dstauthor = line.split('=', 1)
357 except ValueError:
357 except ValueError:
358 msg = _('ignoring bad line in author map file %s: %s\n')
358 msg = _('ignoring bad line in author map file %s: %s\n')
359 self.ui.warn(msg % (authorfile, line.rstrip()))
359 self.ui.warn(msg % (authorfile, line.rstrip()))
360 continue
360 continue
361
361
362 srcauthor = srcauthor.strip()
362 srcauthor = srcauthor.strip()
363 dstauthor = dstauthor.strip()
363 dstauthor = dstauthor.strip()
364 if self.authors.get(srcauthor) in (None, dstauthor):
364 if self.authors.get(srcauthor) in (None, dstauthor):
365 msg = _('mapping author %s to %s\n')
365 msg = _('mapping author %s to %s\n')
366 self.ui.debug(msg % (srcauthor, dstauthor))
366 self.ui.debug(msg % (srcauthor, dstauthor))
367 self.authors[srcauthor] = dstauthor
367 self.authors[srcauthor] = dstauthor
368 continue
368 continue
369
369
370 m = _('overriding mapping for author %s, was %s, will be %s\n')
370 m = _('overriding mapping for author %s, was %s, will be %s\n')
371 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
371 self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
372
372
373 afile.close()
373 afile.close()
374
374
375 def cachecommit(self, rev):
375 def cachecommit(self, rev):
376 commit = self.source.getcommit(rev)
376 commit = self.source.getcommit(rev)
377 commit.author = self.authors.get(commit.author, commit.author)
377 commit.author = self.authors.get(commit.author, commit.author)
378 # If commit.branch is None, this commit is coming from the source
378 # If commit.branch is None, this commit is coming from the source
379 # repository's default branch and destined for the default branch in the
379 # repository's default branch and destined for the default branch in the
380 # destination repository. For such commits, passing a literal "None"
380 # destination repository. For such commits, passing a literal "None"
381 # string to branchmap.get() below allows the user to map "None" to an
381 # string to branchmap.get() below allows the user to map "None" to an
382 # alternate default branch in the destination repository.
382 # alternate default branch in the destination repository.
383 commit.branch = self.branchmap.get(str(commit.branch), commit.branch)
383 commit.branch = self.branchmap.get(str(commit.branch), commit.branch)
384 self.commitcache[rev] = commit
384 self.commitcache[rev] = commit
385 return commit
385 return commit
386
386
387 def copy(self, rev):
387 def copy(self, rev):
388 commit = self.commitcache[rev]
388 commit = self.commitcache[rev]
389
389 full = self.opts.get('full')
390 changes = self.source.getchanges(rev)
390 changes = self.source.getchanges(rev, full)
391 if isinstance(changes, basestring):
391 if isinstance(changes, basestring):
392 if changes == SKIPREV:
392 if changes == SKIPREV:
393 dest = SKIPREV
393 dest = SKIPREV
394 else:
394 else:
395 dest = self.map[changes]
395 dest = self.map[changes]
396 self.map[rev] = dest
396 self.map[rev] = dest
397 return
397 return
398 files, copies = changes
398 files, copies = changes
399 pbranches = []
399 pbranches = []
400 if commit.parents:
400 if commit.parents:
401 for prev in commit.parents:
401 for prev in commit.parents:
402 if prev not in self.commitcache:
402 if prev not in self.commitcache:
403 self.cachecommit(prev)
403 self.cachecommit(prev)
404 pbranches.append((self.map[prev],
404 pbranches.append((self.map[prev],
405 self.commitcache[prev].branch))
405 self.commitcache[prev].branch))
406 self.dest.setbranch(commit.branch, pbranches)
406 self.dest.setbranch(commit.branch, pbranches)
407 try:
407 try:
408 parents = self.splicemap[rev]
408 parents = self.splicemap[rev]
409 self.ui.status(_('spliced in %s as parents of %s\n') %
409 self.ui.status(_('spliced in %s as parents of %s\n') %
410 (parents, rev))
410 (parents, rev))
411 parents = [self.map.get(p, p) for p in parents]
411 parents = [self.map.get(p, p) for p in parents]
412 except KeyError:
412 except KeyError:
413 parents = [b[0] for b in pbranches]
413 parents = [b[0] for b in pbranches]
414 source = progresssource(self.ui, self.source, len(files))
414 source = progresssource(self.ui, self.source, len(files))
415 newnode = self.dest.putcommit(files, copies, parents, commit,
415 newnode = self.dest.putcommit(files, copies, parents, commit,
416 source, self.map)
416 source, self.map, full)
417 source.close()
417 source.close()
418 self.source.converted(rev, newnode)
418 self.source.converted(rev, newnode)
419 self.map[rev] = newnode
419 self.map[rev] = newnode
420
420
421 def convert(self, sortmode):
421 def convert(self, sortmode):
422 try:
422 try:
423 self.source.before()
423 self.source.before()
424 self.dest.before()
424 self.dest.before()
425 self.source.setrevmap(self.map)
425 self.source.setrevmap(self.map)
426 self.ui.status(_("scanning source...\n"))
426 self.ui.status(_("scanning source...\n"))
427 heads = self.source.getheads()
427 heads = self.source.getheads()
428 parents = self.walktree(heads)
428 parents = self.walktree(heads)
429 self.mergesplicemap(parents, self.splicemap)
429 self.mergesplicemap(parents, self.splicemap)
430 self.ui.status(_("sorting...\n"))
430 self.ui.status(_("sorting...\n"))
431 t = self.toposort(parents, sortmode)
431 t = self.toposort(parents, sortmode)
432 num = len(t)
432 num = len(t)
433 c = None
433 c = None
434
434
435 self.ui.status(_("converting...\n"))
435 self.ui.status(_("converting...\n"))
436 for i, c in enumerate(t):
436 for i, c in enumerate(t):
437 num -= 1
437 num -= 1
438 desc = self.commitcache[c].desc
438 desc = self.commitcache[c].desc
439 if "\n" in desc:
439 if "\n" in desc:
440 desc = desc.splitlines()[0]
440 desc = desc.splitlines()[0]
441 # convert log message to local encoding without using
441 # convert log message to local encoding without using
442 # tolocal() because the encoding.encoding convert()
442 # tolocal() because the encoding.encoding convert()
443 # uses is 'utf-8'
443 # uses is 'utf-8'
444 self.ui.status("%d %s\n" % (num, recode(desc)))
444 self.ui.status("%d %s\n" % (num, recode(desc)))
445 self.ui.note(_("source: %s\n") % recode(c))
445 self.ui.note(_("source: %s\n") % recode(c))
446 self.ui.progress(_('converting'), i, unit=_('revisions'),
446 self.ui.progress(_('converting'), i, unit=_('revisions'),
447 total=len(t))
447 total=len(t))
448 self.copy(c)
448 self.copy(c)
449 self.ui.progress(_('converting'), None)
449 self.ui.progress(_('converting'), None)
450
450
451 tags = self.source.gettags()
451 tags = self.source.gettags()
452 ctags = {}
452 ctags = {}
453 for k in tags:
453 for k in tags:
454 v = tags[k]
454 v = tags[k]
455 if self.map.get(v, SKIPREV) != SKIPREV:
455 if self.map.get(v, SKIPREV) != SKIPREV:
456 ctags[k] = self.map[v]
456 ctags[k] = self.map[v]
457
457
458 if c and ctags:
458 if c and ctags:
459 nrev, tagsparent = self.dest.puttags(ctags)
459 nrev, tagsparent = self.dest.puttags(ctags)
460 if nrev and tagsparent:
460 if nrev and tagsparent:
461 # write another hash correspondence to override the previous
461 # write another hash correspondence to override the previous
462 # one so we don't end up with extra tag heads
462 # one so we don't end up with extra tag heads
463 tagsparents = [e for e in self.map.iteritems()
463 tagsparents = [e for e in self.map.iteritems()
464 if e[1] == tagsparent]
464 if e[1] == tagsparent]
465 if tagsparents:
465 if tagsparents:
466 self.map[tagsparents[0][0]] = nrev
466 self.map[tagsparents[0][0]] = nrev
467
467
468 bookmarks = self.source.getbookmarks()
468 bookmarks = self.source.getbookmarks()
469 cbookmarks = {}
469 cbookmarks = {}
470 for k in bookmarks:
470 for k in bookmarks:
471 v = bookmarks[k]
471 v = bookmarks[k]
472 if self.map.get(v, SKIPREV) != SKIPREV:
472 if self.map.get(v, SKIPREV) != SKIPREV:
473 cbookmarks[k] = self.map[v]
473 cbookmarks[k] = self.map[v]
474
474
475 if c and cbookmarks:
475 if c and cbookmarks:
476 self.dest.putbookmarks(cbookmarks)
476 self.dest.putbookmarks(cbookmarks)
477
477
478 self.writeauthormap()
478 self.writeauthormap()
479 finally:
479 finally:
480 self.cleanup()
480 self.cleanup()
481
481
482 def cleanup(self):
482 def cleanup(self):
483 try:
483 try:
484 self.dest.after()
484 self.dest.after()
485 finally:
485 finally:
486 self.source.after()
486 self.source.after()
487 self.map.close()
487 self.map.close()
488
488
489 def convert(ui, src, dest=None, revmapfile=None, **opts):
489 def convert(ui, src, dest=None, revmapfile=None, **opts):
490 global orig_encoding
490 global orig_encoding
491 orig_encoding = encoding.encoding
491 orig_encoding = encoding.encoding
492 encoding.encoding = 'UTF-8'
492 encoding.encoding = 'UTF-8'
493
493
494 # support --authors as an alias for --authormap
494 # support --authors as an alias for --authormap
495 if not opts.get('authormap'):
495 if not opts.get('authormap'):
496 opts['authormap'] = opts.get('authors')
496 opts['authormap'] = opts.get('authors')
497
497
498 if not dest:
498 if not dest:
499 dest = hg.defaultdest(src) + "-hg"
499 dest = hg.defaultdest(src) + "-hg"
500 ui.status(_("assuming destination %s\n") % dest)
500 ui.status(_("assuming destination %s\n") % dest)
501
501
502 destc = convertsink(ui, dest, opts.get('dest_type'))
502 destc = convertsink(ui, dest, opts.get('dest_type'))
503
503
504 try:
504 try:
505 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
505 srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
506 opts.get('rev'))
506 opts.get('rev'))
507 except Exception:
507 except Exception:
508 for path in destc.created:
508 for path in destc.created:
509 shutil.rmtree(path, True)
509 shutil.rmtree(path, True)
510 raise
510 raise
511
511
512 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
512 sortmodes = ('branchsort', 'datesort', 'sourcesort', 'closesort')
513 sortmode = [m for m in sortmodes if opts.get(m)]
513 sortmode = [m for m in sortmodes if opts.get(m)]
514 if len(sortmode) > 1:
514 if len(sortmode) > 1:
515 raise util.Abort(_('more than one sort mode specified'))
515 raise util.Abort(_('more than one sort mode specified'))
516 sortmode = sortmode and sortmode[0] or defaultsort
516 sortmode = sortmode and sortmode[0] or defaultsort
517 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
517 if sortmode == 'sourcesort' and not srcc.hasnativeorder():
518 raise util.Abort(_('--sourcesort is not supported by this data source'))
518 raise util.Abort(_('--sourcesort is not supported by this data source'))
519 if sortmode == 'closesort' and not srcc.hasnativeclose():
519 if sortmode == 'closesort' and not srcc.hasnativeclose():
520 raise util.Abort(_('--closesort is not supported by this data source'))
520 raise util.Abort(_('--closesort is not supported by this data source'))
521
521
522 fmap = opts.get('filemap')
522 fmap = opts.get('filemap')
523 if fmap:
523 if fmap:
524 srcc = filemap.filemap_source(ui, srcc, fmap)
524 srcc = filemap.filemap_source(ui, srcc, fmap)
525 destc.setfilemapmode(True)
525 destc.setfilemapmode(True)
526
526
527 if not revmapfile:
527 if not revmapfile:
528 revmapfile = destc.revmapfile()
528 revmapfile = destc.revmapfile()
529
529
530 c = converter(ui, srcc, destc, revmapfile, opts)
530 c = converter(ui, srcc, destc, revmapfile, opts)
531 c.convert(sortmode)
531 c.convert(sortmode)
532
532
@@ -1,275 +1,277 b''
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
1 # cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os, re, socket, errno
8 import os, re, socket, errno
9 from cStringIO import StringIO
9 from cStringIO import StringIO
10 from mercurial import encoding, util
10 from mercurial import encoding, util
11 from mercurial.i18n import _
11 from mercurial.i18n import _
12
12
13 from common import NoRepo, commit, converter_source, checktool
13 from common import NoRepo, commit, converter_source, checktool
14 from common import makedatetimestamp
14 from common import makedatetimestamp
15 import cvsps
15 import cvsps
16
16
17 class convert_cvs(converter_source):
17 class convert_cvs(converter_source):
18 def __init__(self, ui, path, rev=None):
18 def __init__(self, ui, path, rev=None):
19 super(convert_cvs, self).__init__(ui, path, rev=rev)
19 super(convert_cvs, self).__init__(ui, path, rev=rev)
20
20
21 cvs = os.path.join(path, "CVS")
21 cvs = os.path.join(path, "CVS")
22 if not os.path.exists(cvs):
22 if not os.path.exists(cvs):
23 raise NoRepo(_("%s does not look like a CVS checkout") % path)
23 raise NoRepo(_("%s does not look like a CVS checkout") % path)
24
24
25 checktool('cvs')
25 checktool('cvs')
26
26
27 self.changeset = None
27 self.changeset = None
28 self.files = {}
28 self.files = {}
29 self.tags = {}
29 self.tags = {}
30 self.lastbranch = {}
30 self.lastbranch = {}
31 self.socket = None
31 self.socket = None
32 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
32 self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
33 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
33 self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
34 self.encoding = encoding.encoding
34 self.encoding = encoding.encoding
35
35
36 self._connect()
36 self._connect()
37
37
38 def _parse(self):
38 def _parse(self):
39 if self.changeset is not None:
39 if self.changeset is not None:
40 return
40 return
41 self.changeset = {}
41 self.changeset = {}
42
42
43 maxrev = 0
43 maxrev = 0
44 if self.rev:
44 if self.rev:
45 # TODO: handle tags
45 # TODO: handle tags
46 try:
46 try:
47 # patchset number?
47 # patchset number?
48 maxrev = int(self.rev)
48 maxrev = int(self.rev)
49 except ValueError:
49 except ValueError:
50 raise util.Abort(_('revision %s is not a patchset number')
50 raise util.Abort(_('revision %s is not a patchset number')
51 % self.rev)
51 % self.rev)
52
52
53 d = os.getcwd()
53 d = os.getcwd()
54 try:
54 try:
55 os.chdir(self.path)
55 os.chdir(self.path)
56 id = None
56 id = None
57
57
58 cache = 'update'
58 cache = 'update'
59 if not self.ui.configbool('convert', 'cvsps.cache', True):
59 if not self.ui.configbool('convert', 'cvsps.cache', True):
60 cache = None
60 cache = None
61 db = cvsps.createlog(self.ui, cache=cache)
61 db = cvsps.createlog(self.ui, cache=cache)
62 db = cvsps.createchangeset(self.ui, db,
62 db = cvsps.createchangeset(self.ui, db,
63 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
63 fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
64 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
64 mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
65 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
65 mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
66
66
67 for cs in db:
67 for cs in db:
68 if maxrev and cs.id > maxrev:
68 if maxrev and cs.id > maxrev:
69 break
69 break
70 id = str(cs.id)
70 id = str(cs.id)
71 cs.author = self.recode(cs.author)
71 cs.author = self.recode(cs.author)
72 self.lastbranch[cs.branch] = id
72 self.lastbranch[cs.branch] = id
73 cs.comment = self.recode(cs.comment)
73 cs.comment = self.recode(cs.comment)
74 if self.ui.configbool('convert', 'localtimezone'):
74 if self.ui.configbool('convert', 'localtimezone'):
75 cs.date = makedatetimestamp(cs.date[0])
75 cs.date = makedatetimestamp(cs.date[0])
76 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
76 date = util.datestr(cs.date, '%Y-%m-%d %H:%M:%S %1%2')
77 self.tags.update(dict.fromkeys(cs.tags, id))
77 self.tags.update(dict.fromkeys(cs.tags, id))
78
78
79 files = {}
79 files = {}
80 for f in cs.entries:
80 for f in cs.entries:
81 files[f.file] = "%s%s" % ('.'.join([str(x)
81 files[f.file] = "%s%s" % ('.'.join([str(x)
82 for x in f.revision]),
82 for x in f.revision]),
83 ['', '(DEAD)'][f.dead])
83 ['', '(DEAD)'][f.dead])
84
84
85 # add current commit to set
85 # add current commit to set
86 c = commit(author=cs.author, date=date,
86 c = commit(author=cs.author, date=date,
87 parents=[str(p.id) for p in cs.parents],
87 parents=[str(p.id) for p in cs.parents],
88 desc=cs.comment, branch=cs.branch or '')
88 desc=cs.comment, branch=cs.branch or '')
89 self.changeset[id] = c
89 self.changeset[id] = c
90 self.files[id] = files
90 self.files[id] = files
91
91
92 self.heads = self.lastbranch.values()
92 self.heads = self.lastbranch.values()
93 finally:
93 finally:
94 os.chdir(d)
94 os.chdir(d)
95
95
96 def _connect(self):
96 def _connect(self):
97 root = self.cvsroot
97 root = self.cvsroot
98 conntype = None
98 conntype = None
99 user, host = None, None
99 user, host = None, None
100 cmd = ['cvs', 'server']
100 cmd = ['cvs', 'server']
101
101
102 self.ui.status(_("connecting to %s\n") % root)
102 self.ui.status(_("connecting to %s\n") % root)
103
103
104 if root.startswith(":pserver:"):
104 if root.startswith(":pserver:"):
105 root = root[9:]
105 root = root[9:]
106 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
106 m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
107 root)
107 root)
108 if m:
108 if m:
109 conntype = "pserver"
109 conntype = "pserver"
110 user, passw, serv, port, root = m.groups()
110 user, passw, serv, port, root = m.groups()
111 if not user:
111 if not user:
112 user = "anonymous"
112 user = "anonymous"
113 if not port:
113 if not port:
114 port = 2401
114 port = 2401
115 else:
115 else:
116 port = int(port)
116 port = int(port)
117 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
117 format0 = ":pserver:%s@%s:%s" % (user, serv, root)
118 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
118 format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
119
119
120 if not passw:
120 if not passw:
121 passw = "A"
121 passw = "A"
122 cvspass = os.path.expanduser("~/.cvspass")
122 cvspass = os.path.expanduser("~/.cvspass")
123 try:
123 try:
124 pf = open(cvspass)
124 pf = open(cvspass)
125 for line in pf.read().splitlines():
125 for line in pf.read().splitlines():
126 part1, part2 = line.split(' ', 1)
126 part1, part2 = line.split(' ', 1)
127 # /1 :pserver:user@example.com:2401/cvsroot/foo
127 # /1 :pserver:user@example.com:2401/cvsroot/foo
128 # Ah<Z
128 # Ah<Z
129 if part1 == '/1':
129 if part1 == '/1':
130 part1, part2 = part2.split(' ', 1)
130 part1, part2 = part2.split(' ', 1)
131 format = format1
131 format = format1
132 # :pserver:user@example.com:/cvsroot/foo Ah<Z
132 # :pserver:user@example.com:/cvsroot/foo Ah<Z
133 else:
133 else:
134 format = format0
134 format = format0
135 if part1 == format:
135 if part1 == format:
136 passw = part2
136 passw = part2
137 break
137 break
138 pf.close()
138 pf.close()
139 except IOError, inst:
139 except IOError, inst:
140 if inst.errno != errno.ENOENT:
140 if inst.errno != errno.ENOENT:
141 if not getattr(inst, 'filename', None):
141 if not getattr(inst, 'filename', None):
142 inst.filename = cvspass
142 inst.filename = cvspass
143 raise
143 raise
144
144
145 sck = socket.socket()
145 sck = socket.socket()
146 sck.connect((serv, port))
146 sck.connect((serv, port))
147 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
147 sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
148 "END AUTH REQUEST", ""]))
148 "END AUTH REQUEST", ""]))
149 if sck.recv(128) != "I LOVE YOU\n":
149 if sck.recv(128) != "I LOVE YOU\n":
150 raise util.Abort(_("CVS pserver authentication failed"))
150 raise util.Abort(_("CVS pserver authentication failed"))
151
151
152 self.writep = self.readp = sck.makefile('r+')
152 self.writep = self.readp = sck.makefile('r+')
153
153
154 if not conntype and root.startswith(":local:"):
154 if not conntype and root.startswith(":local:"):
155 conntype = "local"
155 conntype = "local"
156 root = root[7:]
156 root = root[7:]
157
157
158 if not conntype:
158 if not conntype:
159 # :ext:user@host/home/user/path/to/cvsroot
159 # :ext:user@host/home/user/path/to/cvsroot
160 if root.startswith(":ext:"):
160 if root.startswith(":ext:"):
161 root = root[5:]
161 root = root[5:]
162 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
162 m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
163 # Do not take Windows path "c:\foo\bar" for a connection strings
163 # Do not take Windows path "c:\foo\bar" for a connection strings
164 if os.path.isdir(root) or not m:
164 if os.path.isdir(root) or not m:
165 conntype = "local"
165 conntype = "local"
166 else:
166 else:
167 conntype = "rsh"
167 conntype = "rsh"
168 user, host, root = m.group(1), m.group(2), m.group(3)
168 user, host, root = m.group(1), m.group(2), m.group(3)
169
169
170 if conntype != "pserver":
170 if conntype != "pserver":
171 if conntype == "rsh":
171 if conntype == "rsh":
172 rsh = os.environ.get("CVS_RSH") or "ssh"
172 rsh = os.environ.get("CVS_RSH") or "ssh"
173 if user:
173 if user:
174 cmd = [rsh, '-l', user, host] + cmd
174 cmd = [rsh, '-l', user, host] + cmd
175 else:
175 else:
176 cmd = [rsh, host] + cmd
176 cmd = [rsh, host] + cmd
177
177
178 # popen2 does not support argument lists under Windows
178 # popen2 does not support argument lists under Windows
179 cmd = [util.shellquote(arg) for arg in cmd]
179 cmd = [util.shellquote(arg) for arg in cmd]
180 cmd = util.quotecommand(' '.join(cmd))
180 cmd = util.quotecommand(' '.join(cmd))
181 self.writep, self.readp = util.popen2(cmd)
181 self.writep, self.readp = util.popen2(cmd)
182
182
183 self.realroot = root
183 self.realroot = root
184
184
185 self.writep.write("Root %s\n" % root)
185 self.writep.write("Root %s\n" % root)
186 self.writep.write("Valid-responses ok error Valid-requests Mode"
186 self.writep.write("Valid-responses ok error Valid-requests Mode"
187 " M Mbinary E Checked-in Created Updated"
187 " M Mbinary E Checked-in Created Updated"
188 " Merged Removed\n")
188 " Merged Removed\n")
189 self.writep.write("valid-requests\n")
189 self.writep.write("valid-requests\n")
190 self.writep.flush()
190 self.writep.flush()
191 r = self.readp.readline()
191 r = self.readp.readline()
192 if not r.startswith("Valid-requests"):
192 if not r.startswith("Valid-requests"):
193 raise util.Abort(_('unexpected response from CVS server '
193 raise util.Abort(_('unexpected response from CVS server '
194 '(expected "Valid-requests", but got %r)')
194 '(expected "Valid-requests", but got %r)')
195 % r)
195 % r)
196 if "UseUnchanged" in r:
196 if "UseUnchanged" in r:
197 self.writep.write("UseUnchanged\n")
197 self.writep.write("UseUnchanged\n")
198 self.writep.flush()
198 self.writep.flush()
199 r = self.readp.readline()
199 r = self.readp.readline()
200
200
201 def getheads(self):
201 def getheads(self):
202 self._parse()
202 self._parse()
203 return self.heads
203 return self.heads
204
204
205 def getfile(self, name, rev):
205 def getfile(self, name, rev):
206
206
207 def chunkedread(fp, count):
207 def chunkedread(fp, count):
208 # file-objects returned by socket.makefile() do not handle
208 # file-objects returned by socket.makefile() do not handle
209 # large read() requests very well.
209 # large read() requests very well.
210 chunksize = 65536
210 chunksize = 65536
211 output = StringIO()
211 output = StringIO()
212 while count > 0:
212 while count > 0:
213 data = fp.read(min(count, chunksize))
213 data = fp.read(min(count, chunksize))
214 if not data:
214 if not data:
215 raise util.Abort(_("%d bytes missing from remote file")
215 raise util.Abort(_("%d bytes missing from remote file")
216 % count)
216 % count)
217 count -= len(data)
217 count -= len(data)
218 output.write(data)
218 output.write(data)
219 return output.getvalue()
219 return output.getvalue()
220
220
221 self._parse()
221 self._parse()
222 if rev.endswith("(DEAD)"):
222 if rev.endswith("(DEAD)"):
223 return None, None
223 return None, None
224
224
225 args = ("-N -P -kk -r %s --" % rev).split()
225 args = ("-N -P -kk -r %s --" % rev).split()
226 args.append(self.cvsrepo + '/' + name)
226 args.append(self.cvsrepo + '/' + name)
227 for x in args:
227 for x in args:
228 self.writep.write("Argument %s\n" % x)
228 self.writep.write("Argument %s\n" % x)
229 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
229 self.writep.write("Directory .\n%s\nco\n" % self.realroot)
230 self.writep.flush()
230 self.writep.flush()
231
231
232 data = ""
232 data = ""
233 mode = None
233 mode = None
234 while True:
234 while True:
235 line = self.readp.readline()
235 line = self.readp.readline()
236 if line.startswith("Created ") or line.startswith("Updated "):
236 if line.startswith("Created ") or line.startswith("Updated "):
237 self.readp.readline() # path
237 self.readp.readline() # path
238 self.readp.readline() # entries
238 self.readp.readline() # entries
239 mode = self.readp.readline()[:-1]
239 mode = self.readp.readline()[:-1]
240 count = int(self.readp.readline()[:-1])
240 count = int(self.readp.readline()[:-1])
241 data = chunkedread(self.readp, count)
241 data = chunkedread(self.readp, count)
242 elif line.startswith(" "):
242 elif line.startswith(" "):
243 data += line[1:]
243 data += line[1:]
244 elif line.startswith("M "):
244 elif line.startswith("M "):
245 pass
245 pass
246 elif line.startswith("Mbinary "):
246 elif line.startswith("Mbinary "):
247 count = int(self.readp.readline()[:-1])
247 count = int(self.readp.readline()[:-1])
248 data = chunkedread(self.readp, count)
248 data = chunkedread(self.readp, count)
249 else:
249 else:
250 if line == "ok\n":
250 if line == "ok\n":
251 if mode is None:
251 if mode is None:
252 raise util.Abort(_('malformed response from CVS'))
252 raise util.Abort(_('malformed response from CVS'))
253 return (data, "x" in mode and "x" or "")
253 return (data, "x" in mode and "x" or "")
254 elif line.startswith("E "):
254 elif line.startswith("E "):
255 self.ui.warn(_("cvs server: %s\n") % line[2:])
255 self.ui.warn(_("cvs server: %s\n") % line[2:])
256 elif line.startswith("Remove"):
256 elif line.startswith("Remove"):
257 self.readp.readline()
257 self.readp.readline()
258 else:
258 else:
259 raise util.Abort(_("unknown CVS response: %s") % line)
259 raise util.Abort(_("unknown CVS response: %s") % line)
260
260
261 def getchanges(self, rev):
261 def getchanges(self, rev, full):
262 if full:
263 raise util.Abort(_("convert from cvs do not support --full"))
262 self._parse()
264 self._parse()
263 return sorted(self.files[rev].iteritems()), {}
265 return sorted(self.files[rev].iteritems()), {}
264
266
265 def getcommit(self, rev):
267 def getcommit(self, rev):
266 self._parse()
268 self._parse()
267 return self.changeset[rev]
269 return self.changeset[rev]
268
270
269 def gettags(self):
271 def gettags(self):
270 self._parse()
272 self._parse()
271 return self.tags
273 return self.tags
272
274
273 def getchangedfiles(self, rev, i):
275 def getchangedfiles(self, rev, i):
274 self._parse()
276 self._parse()
275 return sorted(self.files[rev])
277 return sorted(self.files[rev])
@@ -1,206 +1,208 b''
1 # darcs.py - darcs support for the convert extension
1 # darcs.py - darcs support for the convert extension
2 #
2 #
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from common import NoRepo, checktool, commandline, commit, converter_source
8 from common import NoRepo, checktool, commandline, commit, converter_source
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial import util
10 from mercurial import util
11 import os, shutil, tempfile, re, errno
11 import os, shutil, tempfile, re, errno
12
12
13 # The naming drift of ElementTree is fun!
13 # The naming drift of ElementTree is fun!
14
14
15 try:
15 try:
16 from xml.etree.cElementTree import ElementTree, XMLParser
16 from xml.etree.cElementTree import ElementTree, XMLParser
17 except ImportError:
17 except ImportError:
18 try:
18 try:
19 from xml.etree.ElementTree import ElementTree, XMLParser
19 from xml.etree.ElementTree import ElementTree, XMLParser
20 except ImportError:
20 except ImportError:
21 try:
21 try:
22 from elementtree.cElementTree import ElementTree, XMLParser
22 from elementtree.cElementTree import ElementTree, XMLParser
23 except ImportError:
23 except ImportError:
24 try:
24 try:
25 from elementtree.ElementTree import ElementTree, XMLParser
25 from elementtree.ElementTree import ElementTree, XMLParser
26 except ImportError:
26 except ImportError:
27 pass
27 pass
28
28
29 class darcs_source(converter_source, commandline):
29 class darcs_source(converter_source, commandline):
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 converter_source.__init__(self, ui, path, rev=rev)
31 converter_source.__init__(self, ui, path, rev=rev)
32 commandline.__init__(self, ui, 'darcs')
32 commandline.__init__(self, ui, 'darcs')
33
33
34 # check for _darcs, ElementTree so that we can easily skip
34 # check for _darcs, ElementTree so that we can easily skip
35 # test-convert-darcs if ElementTree is not around
35 # test-convert-darcs if ElementTree is not around
36 if not os.path.exists(os.path.join(path, '_darcs')):
36 if not os.path.exists(os.path.join(path, '_darcs')):
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
37 raise NoRepo(_("%s does not look like a darcs repository") % path)
38
38
39 checktool('darcs')
39 checktool('darcs')
40 version = self.run0('--version').splitlines()[0].strip()
40 version = self.run0('--version').splitlines()[0].strip()
41 if version < '2.1':
41 if version < '2.1':
42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
42 raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
43 version)
43 version)
44
44
45 if "ElementTree" not in globals():
45 if "ElementTree" not in globals():
46 raise util.Abort(_("Python ElementTree module is not available"))
46 raise util.Abort(_("Python ElementTree module is not available"))
47
47
48 self.path = os.path.realpath(path)
48 self.path = os.path.realpath(path)
49
49
50 self.lastrev = None
50 self.lastrev = None
51 self.changes = {}
51 self.changes = {}
52 self.parents = {}
52 self.parents = {}
53 self.tags = {}
53 self.tags = {}
54
54
55 # Check darcs repository format
55 # Check darcs repository format
56 format = self.format()
56 format = self.format()
57 if format:
57 if format:
58 if format in ('darcs-1.0', 'hashed'):
58 if format in ('darcs-1.0', 'hashed'):
59 raise NoRepo(_("%s repository format is unsupported, "
59 raise NoRepo(_("%s repository format is unsupported, "
60 "please upgrade") % format)
60 "please upgrade") % format)
61 else:
61 else:
62 self.ui.warn(_('failed to detect repository format!'))
62 self.ui.warn(_('failed to detect repository format!'))
63
63
64 def before(self):
64 def before(self):
65 self.tmppath = tempfile.mkdtemp(
65 self.tmppath = tempfile.mkdtemp(
66 prefix='convert-' + os.path.basename(self.path) + '-')
66 prefix='convert-' + os.path.basename(self.path) + '-')
67 output, status = self.run('init', repodir=self.tmppath)
67 output, status = self.run('init', repodir=self.tmppath)
68 self.checkexit(status)
68 self.checkexit(status)
69
69
70 tree = self.xml('changes', xml_output=True, summary=True,
70 tree = self.xml('changes', xml_output=True, summary=True,
71 repodir=self.path)
71 repodir=self.path)
72 tagname = None
72 tagname = None
73 child = None
73 child = None
74 for elt in tree.findall('patch'):
74 for elt in tree.findall('patch'):
75 node = elt.get('hash')
75 node = elt.get('hash')
76 name = elt.findtext('name', '')
76 name = elt.findtext('name', '')
77 if name.startswith('TAG '):
77 if name.startswith('TAG '):
78 tagname = name[4:].strip()
78 tagname = name[4:].strip()
79 elif tagname is not None:
79 elif tagname is not None:
80 self.tags[tagname] = node
80 self.tags[tagname] = node
81 tagname = None
81 tagname = None
82 self.changes[node] = elt
82 self.changes[node] = elt
83 self.parents[child] = [node]
83 self.parents[child] = [node]
84 child = node
84 child = node
85 self.parents[child] = []
85 self.parents[child] = []
86
86
87 def after(self):
87 def after(self):
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
88 self.ui.debug('cleaning up %s\n' % self.tmppath)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
89 shutil.rmtree(self.tmppath, ignore_errors=True)
90
90
91 def recode(self, s, encoding=None):
91 def recode(self, s, encoding=None):
92 if isinstance(s, unicode):
92 if isinstance(s, unicode):
93 # XMLParser returns unicode objects for anything it can't
93 # XMLParser returns unicode objects for anything it can't
94 # encode into ASCII. We convert them back to str to get
94 # encode into ASCII. We convert them back to str to get
95 # recode's normal conversion behavior.
95 # recode's normal conversion behavior.
96 s = s.encode('latin-1')
96 s = s.encode('latin-1')
97 return super(darcs_source, self).recode(s, encoding)
97 return super(darcs_source, self).recode(s, encoding)
98
98
99 def xml(self, cmd, **kwargs):
99 def xml(self, cmd, **kwargs):
100 # NOTE: darcs is currently encoding agnostic and will print
100 # NOTE: darcs is currently encoding agnostic and will print
101 # patch metadata byte-for-byte, even in the XML changelog.
101 # patch metadata byte-for-byte, even in the XML changelog.
102 etree = ElementTree()
102 etree = ElementTree()
103 # While we are decoding the XML as latin-1 to be as liberal as
103 # While we are decoding the XML as latin-1 to be as liberal as
104 # possible, etree will still raise an exception if any
104 # possible, etree will still raise an exception if any
105 # non-printable characters are in the XML changelog.
105 # non-printable characters are in the XML changelog.
106 parser = XMLParser(encoding='latin-1')
106 parser = XMLParser(encoding='latin-1')
107 p = self._run(cmd, **kwargs)
107 p = self._run(cmd, **kwargs)
108 etree.parse(p.stdout, parser=parser)
108 etree.parse(p.stdout, parser=parser)
109 p.wait()
109 p.wait()
110 self.checkexit(p.returncode)
110 self.checkexit(p.returncode)
111 return etree.getroot()
111 return etree.getroot()
112
112
113 def format(self):
113 def format(self):
114 output, status = self.run('show', 'repo', no_files=True,
114 output, status = self.run('show', 'repo', no_files=True,
115 repodir=self.path)
115 repodir=self.path)
116 self.checkexit(status)
116 self.checkexit(status)
117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
117 m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
118 if not m:
118 if not m:
119 return None
119 return None
120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
120 return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
121
121
122 def manifest(self):
122 def manifest(self):
123 man = []
123 man = []
124 output, status = self.run('show', 'files', no_directories=True,
124 output, status = self.run('show', 'files', no_directories=True,
125 repodir=self.tmppath)
125 repodir=self.tmppath)
126 self.checkexit(status)
126 self.checkexit(status)
127 for line in output.split('\n'):
127 for line in output.split('\n'):
128 path = line[2:]
128 path = line[2:]
129 if path:
129 if path:
130 man.append(path)
130 man.append(path)
131 return man
131 return man
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getcommit(self, rev):
136 def getcommit(self, rev):
137 elt = self.changes[rev]
137 elt = self.changes[rev]
138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
138 date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
139 desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
140 # etree can return unicode objects for name, comment, and author,
140 # etree can return unicode objects for name, comment, and author,
141 # so recode() is used to ensure str objects are emitted.
141 # so recode() is used to ensure str objects are emitted.
142 return commit(author=self.recode(elt.get('author')),
142 return commit(author=self.recode(elt.get('author')),
143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
143 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
144 desc=self.recode(desc).strip(),
144 desc=self.recode(desc).strip(),
145 parents=self.parents[rev])
145 parents=self.parents[rev])
146
146
147 def pull(self, rev):
147 def pull(self, rev):
148 output, status = self.run('pull', self.path, all=True,
148 output, status = self.run('pull', self.path, all=True,
149 match='hash %s' % rev,
149 match='hash %s' % rev,
150 no_test=True, no_posthook=True,
150 no_test=True, no_posthook=True,
151 external_merge='/bin/false',
151 external_merge='/bin/false',
152 repodir=self.tmppath)
152 repodir=self.tmppath)
153 if status:
153 if status:
154 if output.find('We have conflicts in') == -1:
154 if output.find('We have conflicts in') == -1:
155 self.checkexit(status, output)
155 self.checkexit(status, output)
156 output, status = self.run('revert', all=True, repodir=self.tmppath)
156 output, status = self.run('revert', all=True, repodir=self.tmppath)
157 self.checkexit(status, output)
157 self.checkexit(status, output)
158
158
159 def getchanges(self, rev):
159 def getchanges(self, rev, full):
160 if full:
161 raise util.Abort(_("convert from darcs do not support --full"))
160 copies = {}
162 copies = {}
161 changes = []
163 changes = []
162 man = None
164 man = None
163 for elt in self.changes[rev].find('summary').getchildren():
165 for elt in self.changes[rev].find('summary').getchildren():
164 if elt.tag in ('add_directory', 'remove_directory'):
166 if elt.tag in ('add_directory', 'remove_directory'):
165 continue
167 continue
166 if elt.tag == 'move':
168 if elt.tag == 'move':
167 if man is None:
169 if man is None:
168 man = self.manifest()
170 man = self.manifest()
169 source, dest = elt.get('from'), elt.get('to')
171 source, dest = elt.get('from'), elt.get('to')
170 if source in man:
172 if source in man:
171 # File move
173 # File move
172 changes.append((source, rev))
174 changes.append((source, rev))
173 changes.append((dest, rev))
175 changes.append((dest, rev))
174 copies[dest] = source
176 copies[dest] = source
175 else:
177 else:
176 # Directory move, deduce file moves from manifest
178 # Directory move, deduce file moves from manifest
177 source = source + '/'
179 source = source + '/'
178 for f in man:
180 for f in man:
179 if not f.startswith(source):
181 if not f.startswith(source):
180 continue
182 continue
181 fdest = dest + '/' + f[len(source):]
183 fdest = dest + '/' + f[len(source):]
182 changes.append((f, rev))
184 changes.append((f, rev))
183 changes.append((fdest, rev))
185 changes.append((fdest, rev))
184 copies[fdest] = f
186 copies[fdest] = f
185 else:
187 else:
186 changes.append((elt.text.strip(), rev))
188 changes.append((elt.text.strip(), rev))
187 self.pull(rev)
189 self.pull(rev)
188 self.lastrev = rev
190 self.lastrev = rev
189 return sorted(changes), copies
191 return sorted(changes), copies
190
192
191 def getfile(self, name, rev):
193 def getfile(self, name, rev):
192 if rev != self.lastrev:
194 if rev != self.lastrev:
193 raise util.Abort(_('internal calling inconsistency'))
195 raise util.Abort(_('internal calling inconsistency'))
194 path = os.path.join(self.tmppath, name)
196 path = os.path.join(self.tmppath, name)
195 try:
197 try:
196 data = util.readfile(path)
198 data = util.readfile(path)
197 mode = os.lstat(path).st_mode
199 mode = os.lstat(path).st_mode
198 except IOError, inst:
200 except IOError, inst:
199 if inst.errno == errno.ENOENT:
201 if inst.errno == errno.ENOENT:
200 return None, None
202 return None, None
201 raise
203 raise
202 mode = (mode & 0111) and 'x' or ''
204 mode = (mode & 0111) and 'x' or ''
203 return data, mode
205 return data, mode
204
206
205 def gettags(self):
207 def gettags(self):
206 return self.tags
208 return self.tags
@@ -1,422 +1,422 b''
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
1 # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
2 # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
3 #
3 #
4 # This software may be used and distributed according to the terms of the
4 # This software may be used and distributed according to the terms of the
5 # GNU General Public License version 2 or any later version.
5 # GNU General Public License version 2 or any later version.
6
6
7 import posixpath
7 import posixpath
8 import shlex
8 import shlex
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10 from mercurial import util, error
10 from mercurial import util, error
11 from common import SKIPREV, converter_source
11 from common import SKIPREV, converter_source
12
12
13 def rpairs(path):
13 def rpairs(path):
14 '''Yield tuples with path split at '/', starting with the full path.
14 '''Yield tuples with path split at '/', starting with the full path.
15 No leading, trailing or double '/', please.
15 No leading, trailing or double '/', please.
16 >>> for x in rpairs('foo/bar/baz'): print x
16 >>> for x in rpairs('foo/bar/baz'): print x
17 ('foo/bar/baz', '')
17 ('foo/bar/baz', '')
18 ('foo/bar', 'baz')
18 ('foo/bar', 'baz')
19 ('foo', 'bar/baz')
19 ('foo', 'bar/baz')
20 ('.', 'foo/bar/baz')
20 ('.', 'foo/bar/baz')
21 '''
21 '''
22 i = len(path)
22 i = len(path)
23 while i != -1:
23 while i != -1:
24 yield path[:i], path[i + 1:]
24 yield path[:i], path[i + 1:]
25 i = path.rfind('/', 0, i)
25 i = path.rfind('/', 0, i)
26 yield '.', path
26 yield '.', path
27
27
28 def normalize(path):
28 def normalize(path):
29 ''' We use posixpath.normpath to support cross-platform path format.
29 ''' We use posixpath.normpath to support cross-platform path format.
30 However, it doesn't handle None input. So we wrap it up. '''
30 However, it doesn't handle None input. So we wrap it up. '''
31 if path is None:
31 if path is None:
32 return None
32 return None
33 return posixpath.normpath(path)
33 return posixpath.normpath(path)
34
34
35 class filemapper(object):
35 class filemapper(object):
36 '''Map and filter filenames when importing.
36 '''Map and filter filenames when importing.
37 A name can be mapped to itself, a new name, or None (omit from new
37 A name can be mapped to itself, a new name, or None (omit from new
38 repository).'''
38 repository).'''
39
39
40 def __init__(self, ui, path=None):
40 def __init__(self, ui, path=None):
41 self.ui = ui
41 self.ui = ui
42 self.include = {}
42 self.include = {}
43 self.exclude = {}
43 self.exclude = {}
44 self.rename = {}
44 self.rename = {}
45 if path:
45 if path:
46 if self.parse(path):
46 if self.parse(path):
47 raise util.Abort(_('errors in filemap'))
47 raise util.Abort(_('errors in filemap'))
48
48
49 def parse(self, path):
49 def parse(self, path):
50 errs = 0
50 errs = 0
51 def check(name, mapping, listname):
51 def check(name, mapping, listname):
52 if not name:
52 if not name:
53 self.ui.warn(_('%s:%d: path to %s is missing\n') %
53 self.ui.warn(_('%s:%d: path to %s is missing\n') %
54 (lex.infile, lex.lineno, listname))
54 (lex.infile, lex.lineno, listname))
55 return 1
55 return 1
56 if name in mapping:
56 if name in mapping:
57 self.ui.warn(_('%s:%d: %r already in %s list\n') %
57 self.ui.warn(_('%s:%d: %r already in %s list\n') %
58 (lex.infile, lex.lineno, name, listname))
58 (lex.infile, lex.lineno, name, listname))
59 return 1
59 return 1
60 if (name.startswith('/') or
60 if (name.startswith('/') or
61 name.endswith('/') or
61 name.endswith('/') or
62 '//' in name):
62 '//' in name):
63 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
63 self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
64 (lex.infile, lex.lineno, listname, name))
64 (lex.infile, lex.lineno, listname, name))
65 return 1
65 return 1
66 return 0
66 return 0
67 lex = shlex.shlex(open(path), path, True)
67 lex = shlex.shlex(open(path), path, True)
68 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
68 lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
69 cmd = lex.get_token()
69 cmd = lex.get_token()
70 while cmd:
70 while cmd:
71 if cmd == 'include':
71 if cmd == 'include':
72 name = normalize(lex.get_token())
72 name = normalize(lex.get_token())
73 errs += check(name, self.exclude, 'exclude')
73 errs += check(name, self.exclude, 'exclude')
74 self.include[name] = name
74 self.include[name] = name
75 elif cmd == 'exclude':
75 elif cmd == 'exclude':
76 name = normalize(lex.get_token())
76 name = normalize(lex.get_token())
77 errs += check(name, self.include, 'include')
77 errs += check(name, self.include, 'include')
78 errs += check(name, self.rename, 'rename')
78 errs += check(name, self.rename, 'rename')
79 self.exclude[name] = name
79 self.exclude[name] = name
80 elif cmd == 'rename':
80 elif cmd == 'rename':
81 src = normalize(lex.get_token())
81 src = normalize(lex.get_token())
82 dest = normalize(lex.get_token())
82 dest = normalize(lex.get_token())
83 errs += check(src, self.exclude, 'exclude')
83 errs += check(src, self.exclude, 'exclude')
84 self.rename[src] = dest
84 self.rename[src] = dest
85 elif cmd == 'source':
85 elif cmd == 'source':
86 errs += self.parse(normalize(lex.get_token()))
86 errs += self.parse(normalize(lex.get_token()))
87 else:
87 else:
88 self.ui.warn(_('%s:%d: unknown directive %r\n') %
88 self.ui.warn(_('%s:%d: unknown directive %r\n') %
89 (lex.infile, lex.lineno, cmd))
89 (lex.infile, lex.lineno, cmd))
90 errs += 1
90 errs += 1
91 cmd = lex.get_token()
91 cmd = lex.get_token()
92 return errs
92 return errs
93
93
94 def lookup(self, name, mapping):
94 def lookup(self, name, mapping):
95 name = normalize(name)
95 name = normalize(name)
96 for pre, suf in rpairs(name):
96 for pre, suf in rpairs(name):
97 try:
97 try:
98 return mapping[pre], pre, suf
98 return mapping[pre], pre, suf
99 except KeyError:
99 except KeyError:
100 pass
100 pass
101 return '', name, ''
101 return '', name, ''
102
102
103 def __call__(self, name):
103 def __call__(self, name):
104 if self.include:
104 if self.include:
105 inc = self.lookup(name, self.include)[0]
105 inc = self.lookup(name, self.include)[0]
106 else:
106 else:
107 inc = name
107 inc = name
108 if self.exclude:
108 if self.exclude:
109 exc = self.lookup(name, self.exclude)[0]
109 exc = self.lookup(name, self.exclude)[0]
110 else:
110 else:
111 exc = ''
111 exc = ''
112 if (not self.include and exc) or (len(inc) <= len(exc)):
112 if (not self.include and exc) or (len(inc) <= len(exc)):
113 return None
113 return None
114 newpre, pre, suf = self.lookup(name, self.rename)
114 newpre, pre, suf = self.lookup(name, self.rename)
115 if newpre:
115 if newpre:
116 if newpre == '.':
116 if newpre == '.':
117 return suf
117 return suf
118 if suf:
118 if suf:
119 if newpre.endswith('/'):
119 if newpre.endswith('/'):
120 return newpre + suf
120 return newpre + suf
121 return newpre + '/' + suf
121 return newpre + '/' + suf
122 return newpre
122 return newpre
123 return name
123 return name
124
124
125 def active(self):
125 def active(self):
126 return bool(self.include or self.exclude or self.rename)
126 return bool(self.include or self.exclude or self.rename)
127
127
128 # This class does two additional things compared to a regular source:
128 # This class does two additional things compared to a regular source:
129 #
129 #
130 # - Filter and rename files. This is mostly wrapped by the filemapper
130 # - Filter and rename files. This is mostly wrapped by the filemapper
131 # class above. We hide the original filename in the revision that is
131 # class above. We hide the original filename in the revision that is
132 # returned by getchanges to be able to find things later in getfile.
132 # returned by getchanges to be able to find things later in getfile.
133 #
133 #
134 # - Return only revisions that matter for the files we're interested in.
134 # - Return only revisions that matter for the files we're interested in.
135 # This involves rewriting the parents of the original revision to
135 # This involves rewriting the parents of the original revision to
136 # create a graph that is restricted to those revisions.
136 # create a graph that is restricted to those revisions.
137 #
137 #
138 # This set of revisions includes not only revisions that directly
138 # This set of revisions includes not only revisions that directly
139 # touch files we're interested in, but also merges that merge two
139 # touch files we're interested in, but also merges that merge two
140 # or more interesting revisions.
140 # or more interesting revisions.
141
141
142 class filemap_source(converter_source):
142 class filemap_source(converter_source):
143 def __init__(self, ui, baseconverter, filemap):
143 def __init__(self, ui, baseconverter, filemap):
144 super(filemap_source, self).__init__(ui)
144 super(filemap_source, self).__init__(ui)
145 self.base = baseconverter
145 self.base = baseconverter
146 self.filemapper = filemapper(ui, filemap)
146 self.filemapper = filemapper(ui, filemap)
147 self.commits = {}
147 self.commits = {}
148 # if a revision rev has parent p in the original revision graph, then
148 # if a revision rev has parent p in the original revision graph, then
149 # rev will have parent self.parentmap[p] in the restricted graph.
149 # rev will have parent self.parentmap[p] in the restricted graph.
150 self.parentmap = {}
150 self.parentmap = {}
151 # self.wantedancestors[rev] is the set of all ancestors of rev that
151 # self.wantedancestors[rev] is the set of all ancestors of rev that
152 # are in the restricted graph.
152 # are in the restricted graph.
153 self.wantedancestors = {}
153 self.wantedancestors = {}
154 self.convertedorder = None
154 self.convertedorder = None
155 self._rebuilt = False
155 self._rebuilt = False
156 self.origparents = {}
156 self.origparents = {}
157 self.children = {}
157 self.children = {}
158 self.seenchildren = {}
158 self.seenchildren = {}
159
159
160 def before(self):
160 def before(self):
161 self.base.before()
161 self.base.before()
162
162
163 def after(self):
163 def after(self):
164 self.base.after()
164 self.base.after()
165
165
166 def setrevmap(self, revmap):
166 def setrevmap(self, revmap):
167 # rebuild our state to make things restartable
167 # rebuild our state to make things restartable
168 #
168 #
169 # To avoid calling getcommit for every revision that has already
169 # To avoid calling getcommit for every revision that has already
170 # been converted, we rebuild only the parentmap, delaying the
170 # been converted, we rebuild only the parentmap, delaying the
171 # rebuild of wantedancestors until we need it (i.e. until a
171 # rebuild of wantedancestors until we need it (i.e. until a
172 # merge).
172 # merge).
173 #
173 #
174 # We assume the order argument lists the revisions in
174 # We assume the order argument lists the revisions in
175 # topological order, so that we can infer which revisions were
175 # topological order, so that we can infer which revisions were
176 # wanted by previous runs.
176 # wanted by previous runs.
177 self._rebuilt = not revmap
177 self._rebuilt = not revmap
178 seen = {SKIPREV: SKIPREV}
178 seen = {SKIPREV: SKIPREV}
179 dummyset = set()
179 dummyset = set()
180 converted = []
180 converted = []
181 for rev in revmap.order:
181 for rev in revmap.order:
182 mapped = revmap[rev]
182 mapped = revmap[rev]
183 wanted = mapped not in seen
183 wanted = mapped not in seen
184 if wanted:
184 if wanted:
185 seen[mapped] = rev
185 seen[mapped] = rev
186 self.parentmap[rev] = rev
186 self.parentmap[rev] = rev
187 else:
187 else:
188 self.parentmap[rev] = seen[mapped]
188 self.parentmap[rev] = seen[mapped]
189 self.wantedancestors[rev] = dummyset
189 self.wantedancestors[rev] = dummyset
190 arg = seen[mapped]
190 arg = seen[mapped]
191 if arg == SKIPREV:
191 if arg == SKIPREV:
192 arg = None
192 arg = None
193 converted.append((rev, wanted, arg))
193 converted.append((rev, wanted, arg))
194 self.convertedorder = converted
194 self.convertedorder = converted
195 return self.base.setrevmap(revmap)
195 return self.base.setrevmap(revmap)
196
196
197 def rebuild(self):
197 def rebuild(self):
198 if self._rebuilt:
198 if self._rebuilt:
199 return True
199 return True
200 self._rebuilt = True
200 self._rebuilt = True
201 self.parentmap.clear()
201 self.parentmap.clear()
202 self.wantedancestors.clear()
202 self.wantedancestors.clear()
203 self.seenchildren.clear()
203 self.seenchildren.clear()
204 for rev, wanted, arg in self.convertedorder:
204 for rev, wanted, arg in self.convertedorder:
205 if rev not in self.origparents:
205 if rev not in self.origparents:
206 try:
206 try:
207 self.origparents[rev] = self.getcommit(rev).parents
207 self.origparents[rev] = self.getcommit(rev).parents
208 except error.RepoLookupError:
208 except error.RepoLookupError:
209 self.ui.debug("unknown revmap source: %s\n" % rev)
209 self.ui.debug("unknown revmap source: %s\n" % rev)
210 continue
210 continue
211 if arg is not None:
211 if arg is not None:
212 self.children[arg] = self.children.get(arg, 0) + 1
212 self.children[arg] = self.children.get(arg, 0) + 1
213
213
214 for rev, wanted, arg in self.convertedorder:
214 for rev, wanted, arg in self.convertedorder:
215 try:
215 try:
216 parents = self.origparents[rev]
216 parents = self.origparents[rev]
217 except KeyError:
217 except KeyError:
218 continue # unknown revmap source
218 continue # unknown revmap source
219 if wanted:
219 if wanted:
220 self.mark_wanted(rev, parents)
220 self.mark_wanted(rev, parents)
221 else:
221 else:
222 self.mark_not_wanted(rev, arg)
222 self.mark_not_wanted(rev, arg)
223 self._discard(arg, *parents)
223 self._discard(arg, *parents)
224
224
225 return True
225 return True
226
226
227 def getheads(self):
227 def getheads(self):
228 return self.base.getheads()
228 return self.base.getheads()
229
229
230 def getcommit(self, rev):
230 def getcommit(self, rev):
231 # We want to save a reference to the commit objects to be able
231 # We want to save a reference to the commit objects to be able
232 # to rewrite their parents later on.
232 # to rewrite their parents later on.
233 c = self.commits[rev] = self.base.getcommit(rev)
233 c = self.commits[rev] = self.base.getcommit(rev)
234 for p in c.parents:
234 for p in c.parents:
235 self.children[p] = self.children.get(p, 0) + 1
235 self.children[p] = self.children.get(p, 0) + 1
236 return c
236 return c
237
237
238 def _cachedcommit(self, rev):
238 def _cachedcommit(self, rev):
239 if rev in self.commits:
239 if rev in self.commits:
240 return self.commits[rev]
240 return self.commits[rev]
241 return self.base.getcommit(rev)
241 return self.base.getcommit(rev)
242
242
243 def _discard(self, *revs):
243 def _discard(self, *revs):
244 for r in revs:
244 for r in revs:
245 if r is None:
245 if r is None:
246 continue
246 continue
247 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
247 self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
248 if self.seenchildren[r] == self.children[r]:
248 if self.seenchildren[r] == self.children[r]:
249 self.wantedancestors.pop(r, None)
249 self.wantedancestors.pop(r, None)
250 self.parentmap.pop(r, None)
250 self.parentmap.pop(r, None)
251 del self.seenchildren[r]
251 del self.seenchildren[r]
252 if self._rebuilt:
252 if self._rebuilt:
253 del self.children[r]
253 del self.children[r]
254
254
255 def wanted(self, rev, i):
255 def wanted(self, rev, i):
256 # Return True if we're directly interested in rev.
256 # Return True if we're directly interested in rev.
257 #
257 #
258 # i is an index selecting one of the parents of rev (if rev
258 # i is an index selecting one of the parents of rev (if rev
259 # has no parents, i is None). getchangedfiles will give us
259 # has no parents, i is None). getchangedfiles will give us
260 # the list of files that are different in rev and in the parent
260 # the list of files that are different in rev and in the parent
261 # indicated by i. If we're interested in any of these files,
261 # indicated by i. If we're interested in any of these files,
262 # we're interested in rev.
262 # we're interested in rev.
263 try:
263 try:
264 files = self.base.getchangedfiles(rev, i)
264 files = self.base.getchangedfiles(rev, i)
265 except NotImplementedError:
265 except NotImplementedError:
266 raise util.Abort(_("source repository doesn't support --filemap"))
266 raise util.Abort(_("source repository doesn't support --filemap"))
267 for f in files:
267 for f in files:
268 if self.filemapper(f):
268 if self.filemapper(f):
269 return True
269 return True
270 return False
270 return False
271
271
272 def mark_not_wanted(self, rev, p):
272 def mark_not_wanted(self, rev, p):
273 # Mark rev as not interesting and update data structures.
273 # Mark rev as not interesting and update data structures.
274
274
275 if p is None:
275 if p is None:
276 # A root revision. Use SKIPREV to indicate that it doesn't
276 # A root revision. Use SKIPREV to indicate that it doesn't
277 # map to any revision in the restricted graph. Put SKIPREV
277 # map to any revision in the restricted graph. Put SKIPREV
278 # in the set of wanted ancestors to simplify code elsewhere
278 # in the set of wanted ancestors to simplify code elsewhere
279 self.parentmap[rev] = SKIPREV
279 self.parentmap[rev] = SKIPREV
280 self.wantedancestors[rev] = set((SKIPREV,))
280 self.wantedancestors[rev] = set((SKIPREV,))
281 return
281 return
282
282
283 # Reuse the data from our parent.
283 # Reuse the data from our parent.
284 self.parentmap[rev] = self.parentmap[p]
284 self.parentmap[rev] = self.parentmap[p]
285 self.wantedancestors[rev] = self.wantedancestors[p]
285 self.wantedancestors[rev] = self.wantedancestors[p]
286
286
287 def mark_wanted(self, rev, parents):
287 def mark_wanted(self, rev, parents):
288 # Mark rev ss wanted and update data structures.
288 # Mark rev ss wanted and update data structures.
289
289
290 # rev will be in the restricted graph, so children of rev in
290 # rev will be in the restricted graph, so children of rev in
291 # the original graph should still have rev as a parent in the
291 # the original graph should still have rev as a parent in the
292 # restricted graph.
292 # restricted graph.
293 self.parentmap[rev] = rev
293 self.parentmap[rev] = rev
294
294
295 # The set of wanted ancestors of rev is the union of the sets
295 # The set of wanted ancestors of rev is the union of the sets
296 # of wanted ancestors of its parents. Plus rev itself.
296 # of wanted ancestors of its parents. Plus rev itself.
297 wrev = set()
297 wrev = set()
298 for p in parents:
298 for p in parents:
299 if p in self.wantedancestors:
299 if p in self.wantedancestors:
300 wrev.update(self.wantedancestors[p])
300 wrev.update(self.wantedancestors[p])
301 else:
301 else:
302 self.ui.warn(_('warning: %s parent %s is missing\n') %
302 self.ui.warn(_('warning: %s parent %s is missing\n') %
303 (rev, p))
303 (rev, p))
304 wrev.add(rev)
304 wrev.add(rev)
305 self.wantedancestors[rev] = wrev
305 self.wantedancestors[rev] = wrev
306
306
307 def getchanges(self, rev):
307 def getchanges(self, rev, full):
308 parents = self.commits[rev].parents
308 parents = self.commits[rev].parents
309 if len(parents) > 1:
309 if len(parents) > 1:
310 self.rebuild()
310 self.rebuild()
311
311
312 # To decide whether we're interested in rev we:
312 # To decide whether we're interested in rev we:
313 #
313 #
314 # - calculate what parents rev will have if it turns out we're
314 # - calculate what parents rev will have if it turns out we're
315 # interested in it. If it's going to have more than 1 parent,
315 # interested in it. If it's going to have more than 1 parent,
316 # we're interested in it.
316 # we're interested in it.
317 #
317 #
318 # - otherwise, we'll compare it with the single parent we found.
318 # - otherwise, we'll compare it with the single parent we found.
319 # If any of the files we're interested in is different in the
319 # If any of the files we're interested in is different in the
320 # the two revisions, we're interested in rev.
320 # the two revisions, we're interested in rev.
321
321
322 # A parent p is interesting if its mapped version (self.parentmap[p]):
322 # A parent p is interesting if its mapped version (self.parentmap[p]):
323 # - is not SKIPREV
323 # - is not SKIPREV
324 # - is still not in the list of parents (we don't want duplicates)
324 # - is still not in the list of parents (we don't want duplicates)
325 # - is not an ancestor of the mapped versions of the other parents or
325 # - is not an ancestor of the mapped versions of the other parents or
326 # there is no parent in the same branch than the current revision.
326 # there is no parent in the same branch than the current revision.
327 mparents = []
327 mparents = []
328 knownparents = set()
328 knownparents = set()
329 branch = self.commits[rev].branch
329 branch = self.commits[rev].branch
330 hasbranchparent = False
330 hasbranchparent = False
331 for i, p1 in enumerate(parents):
331 for i, p1 in enumerate(parents):
332 mp1 = self.parentmap[p1]
332 mp1 = self.parentmap[p1]
333 if mp1 == SKIPREV or mp1 in knownparents:
333 if mp1 == SKIPREV or mp1 in knownparents:
334 continue
334 continue
335 isancestor = util.any(p2 for p2 in parents
335 isancestor = util.any(p2 for p2 in parents
336 if p1 != p2 and mp1 != self.parentmap[p2]
336 if p1 != p2 and mp1 != self.parentmap[p2]
337 and mp1 in self.wantedancestors[p2])
337 and mp1 in self.wantedancestors[p2])
338 if not isancestor and not hasbranchparent and len(parents) > 1:
338 if not isancestor and not hasbranchparent and len(parents) > 1:
339 # This could be expensive, avoid unnecessary calls.
339 # This could be expensive, avoid unnecessary calls.
340 if self._cachedcommit(p1).branch == branch:
340 if self._cachedcommit(p1).branch == branch:
341 hasbranchparent = True
341 hasbranchparent = True
342 mparents.append((p1, mp1, i, isancestor))
342 mparents.append((p1, mp1, i, isancestor))
343 knownparents.add(mp1)
343 knownparents.add(mp1)
344 # Discard parents ancestors of other parents if there is a
344 # Discard parents ancestors of other parents if there is a
345 # non-ancestor one on the same branch than current revision.
345 # non-ancestor one on the same branch than current revision.
346 if hasbranchparent:
346 if hasbranchparent:
347 mparents = [p for p in mparents if not p[3]]
347 mparents = [p for p in mparents if not p[3]]
348 wp = None
348 wp = None
349 if mparents:
349 if mparents:
350 wp = max(p[2] for p in mparents)
350 wp = max(p[2] for p in mparents)
351 mparents = [p[1] for p in mparents]
351 mparents = [p[1] for p in mparents]
352 elif parents:
352 elif parents:
353 wp = 0
353 wp = 0
354
354
355 self.origparents[rev] = parents
355 self.origparents[rev] = parents
356
356
357 closed = False
357 closed = False
358 if 'close' in self.commits[rev].extra:
358 if 'close' in self.commits[rev].extra:
359 # A branch closing revision is only useful if one of its
359 # A branch closing revision is only useful if one of its
360 # parents belong to the branch being closed
360 # parents belong to the branch being closed
361 pbranches = [self._cachedcommit(p).branch for p in mparents]
361 pbranches = [self._cachedcommit(p).branch for p in mparents]
362 if branch in pbranches:
362 if branch in pbranches:
363 closed = True
363 closed = True
364
364
365 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
365 if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
366 # We don't want this revision.
366 # We don't want this revision.
367 # Update our state and tell the convert process to map this
367 # Update our state and tell the convert process to map this
368 # revision to the same revision its parent as mapped to.
368 # revision to the same revision its parent as mapped to.
369 p = None
369 p = None
370 if parents:
370 if parents:
371 p = parents[wp]
371 p = parents[wp]
372 self.mark_not_wanted(rev, p)
372 self.mark_not_wanted(rev, p)
373 self.convertedorder.append((rev, False, p))
373 self.convertedorder.append((rev, False, p))
374 self._discard(*parents)
374 self._discard(*parents)
375 return self.parentmap[rev]
375 return self.parentmap[rev]
376
376
377 # We want this revision.
377 # We want this revision.
378 # Rewrite the parents of the commit object
378 # Rewrite the parents of the commit object
379 self.commits[rev].parents = mparents
379 self.commits[rev].parents = mparents
380 self.mark_wanted(rev, parents)
380 self.mark_wanted(rev, parents)
381 self.convertedorder.append((rev, True, None))
381 self.convertedorder.append((rev, True, None))
382 self._discard(*parents)
382 self._discard(*parents)
383
383
384 # Get the real changes and do the filtering/mapping. To be
384 # Get the real changes and do the filtering/mapping. To be
385 # able to get the files later on in getfile, we hide the
385 # able to get the files later on in getfile, we hide the
386 # original filename in the rev part of the return value.
386 # original filename in the rev part of the return value.
387 changes, copies = self.base.getchanges(rev)
387 changes, copies = self.base.getchanges(rev, full)
388 files = {}
388 files = {}
389 for f, r in changes:
389 for f, r in changes:
390 newf = self.filemapper(f)
390 newf = self.filemapper(f)
391 if newf and (newf != f or newf not in files):
391 if newf and (newf != f or newf not in files):
392 files[newf] = (f, r)
392 files[newf] = (f, r)
393 files = sorted(files.items())
393 files = sorted(files.items())
394
394
395 ncopies = {}
395 ncopies = {}
396 for c in copies:
396 for c in copies:
397 newc = self.filemapper(c)
397 newc = self.filemapper(c)
398 if newc:
398 if newc:
399 newsource = self.filemapper(copies[c])
399 newsource = self.filemapper(copies[c])
400 if newsource:
400 if newsource:
401 ncopies[newc] = newsource
401 ncopies[newc] = newsource
402
402
403 return files, ncopies
403 return files, ncopies
404
404
405 def getfile(self, name, rev):
405 def getfile(self, name, rev):
406 realname, realrev = rev
406 realname, realrev = rev
407 return self.base.getfile(realname, realrev)
407 return self.base.getfile(realname, realrev)
408
408
409 def gettags(self):
409 def gettags(self):
410 return self.base.gettags()
410 return self.base.gettags()
411
411
412 def hasnativeorder(self):
412 def hasnativeorder(self):
413 return self.base.hasnativeorder()
413 return self.base.hasnativeorder()
414
414
415 def lookuprev(self, rev):
415 def lookuprev(self, rev):
416 return self.base.lookuprev(rev)
416 return self.base.lookuprev(rev)
417
417
418 def getbookmarks(self):
418 def getbookmarks(self):
419 return self.base.getbookmarks()
419 return self.base.getbookmarks()
420
420
421 def converted(self, rev, sinkrev):
421 def converted(self, rev, sinkrev):
422 self.base.converted(rev, sinkrev)
422 self.base.converted(rev, sinkrev)
@@ -1,343 +1,345 b''
1 # git.py - git support for the convert extension
1 # git.py - git support for the convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 import os
8 import os
9 import subprocess
9 import subprocess
10 from mercurial import util, config
10 from mercurial import util, config
11 from mercurial.node import hex, nullid
11 from mercurial.node import hex, nullid
12 from mercurial.i18n import _
12 from mercurial.i18n import _
13
13
14 from common import NoRepo, commit, converter_source, checktool
14 from common import NoRepo, commit, converter_source, checktool
15
15
16 class submodule(object):
16 class submodule(object):
17 def __init__(self, path, node, url):
17 def __init__(self, path, node, url):
18 self.path = path
18 self.path = path
19 self.node = node
19 self.node = node
20 self.url = url
20 self.url = url
21
21
22 def hgsub(self):
22 def hgsub(self):
23 return "%s = [git]%s" % (self.path, self.url)
23 return "%s = [git]%s" % (self.path, self.url)
24
24
25 def hgsubstate(self):
25 def hgsubstate(self):
26 return "%s %s" % (self.node, self.path)
26 return "%s %s" % (self.node, self.path)
27
27
28 class convert_git(converter_source):
28 class convert_git(converter_source):
29 # Windows does not support GIT_DIR= construct while other systems
29 # Windows does not support GIT_DIR= construct while other systems
30 # cannot remove environment variable. Just assume none have
30 # cannot remove environment variable. Just assume none have
31 # both issues.
31 # both issues.
32 if util.safehasattr(os, 'unsetenv'):
32 if util.safehasattr(os, 'unsetenv'):
33 def gitopen(self, s, err=None):
33 def gitopen(self, s, err=None):
34 prevgitdir = os.environ.get('GIT_DIR')
34 prevgitdir = os.environ.get('GIT_DIR')
35 os.environ['GIT_DIR'] = self.path
35 os.environ['GIT_DIR'] = self.path
36 try:
36 try:
37 if err == subprocess.PIPE:
37 if err == subprocess.PIPE:
38 (stdin, stdout, stderr) = util.popen3(s)
38 (stdin, stdout, stderr) = util.popen3(s)
39 return stdout
39 return stdout
40 elif err == subprocess.STDOUT:
40 elif err == subprocess.STDOUT:
41 return self.popen_with_stderr(s)
41 return self.popen_with_stderr(s)
42 else:
42 else:
43 return util.popen(s, 'rb')
43 return util.popen(s, 'rb')
44 finally:
44 finally:
45 if prevgitdir is None:
45 if prevgitdir is None:
46 del os.environ['GIT_DIR']
46 del os.environ['GIT_DIR']
47 else:
47 else:
48 os.environ['GIT_DIR'] = prevgitdir
48 os.environ['GIT_DIR'] = prevgitdir
49
49
50 def gitpipe(self, s):
50 def gitpipe(self, s):
51 prevgitdir = os.environ.get('GIT_DIR')
51 prevgitdir = os.environ.get('GIT_DIR')
52 os.environ['GIT_DIR'] = self.path
52 os.environ['GIT_DIR'] = self.path
53 try:
53 try:
54 return util.popen3(s)
54 return util.popen3(s)
55 finally:
55 finally:
56 if prevgitdir is None:
56 if prevgitdir is None:
57 del os.environ['GIT_DIR']
57 del os.environ['GIT_DIR']
58 else:
58 else:
59 os.environ['GIT_DIR'] = prevgitdir
59 os.environ['GIT_DIR'] = prevgitdir
60
60
61 else:
61 else:
62 def gitopen(self, s, err=None):
62 def gitopen(self, s, err=None):
63 if err == subprocess.PIPE:
63 if err == subprocess.PIPE:
64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
64 (sin, so, se) = util.popen3('GIT_DIR=%s %s' % (self.path, s))
65 return so
65 return so
66 elif err == subprocess.STDOUT:
66 elif err == subprocess.STDOUT:
67 return self.popen_with_stderr(s)
67 return self.popen_with_stderr(s)
68 else:
68 else:
69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
69 return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
70
70
71 def gitpipe(self, s):
71 def gitpipe(self, s):
72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
72 return util.popen3('GIT_DIR=%s %s' % (self.path, s))
73
73
74 def popen_with_stderr(self, s):
74 def popen_with_stderr(self, s):
75 p = subprocess.Popen(s, shell=True, bufsize=-1,
75 p = subprocess.Popen(s, shell=True, bufsize=-1,
76 close_fds=util.closefds,
76 close_fds=util.closefds,
77 stdin=subprocess.PIPE,
77 stdin=subprocess.PIPE,
78 stdout=subprocess.PIPE,
78 stdout=subprocess.PIPE,
79 stderr=subprocess.STDOUT,
79 stderr=subprocess.STDOUT,
80 universal_newlines=False,
80 universal_newlines=False,
81 env=None)
81 env=None)
82 return p.stdout
82 return p.stdout
83
83
84 def gitread(self, s):
84 def gitread(self, s):
85 fh = self.gitopen(s)
85 fh = self.gitopen(s)
86 data = fh.read()
86 data = fh.read()
87 return data, fh.close()
87 return data, fh.close()
88
88
89 def __init__(self, ui, path, rev=None):
89 def __init__(self, ui, path, rev=None):
90 super(convert_git, self).__init__(ui, path, rev=rev)
90 super(convert_git, self).__init__(ui, path, rev=rev)
91
91
92 if os.path.isdir(path + "/.git"):
92 if os.path.isdir(path + "/.git"):
93 path += "/.git"
93 path += "/.git"
94 if not os.path.exists(path + "/objects"):
94 if not os.path.exists(path + "/objects"):
95 raise NoRepo(_("%s does not look like a Git repository") % path)
95 raise NoRepo(_("%s does not look like a Git repository") % path)
96
96
97 checktool('git', 'git')
97 checktool('git', 'git')
98
98
99 self.path = path
99 self.path = path
100 self.submodules = []
100 self.submodules = []
101
101
102 self.catfilepipe = self.gitpipe('git cat-file --batch')
102 self.catfilepipe = self.gitpipe('git cat-file --batch')
103
103
104 def after(self):
104 def after(self):
105 for f in self.catfilepipe:
105 for f in self.catfilepipe:
106 f.close()
106 f.close()
107
107
108 def getheads(self):
108 def getheads(self):
109 if not self.rev:
109 if not self.rev:
110 heads, ret = self.gitread('git rev-parse --branches --remotes')
110 heads, ret = self.gitread('git rev-parse --branches --remotes')
111 heads = heads.splitlines()
111 heads = heads.splitlines()
112 else:
112 else:
113 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
113 heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
114 heads = [heads[:-1]]
114 heads = [heads[:-1]]
115 if ret:
115 if ret:
116 raise util.Abort(_('cannot retrieve git heads'))
116 raise util.Abort(_('cannot retrieve git heads'))
117 return heads
117 return heads
118
118
119 def catfile(self, rev, type):
119 def catfile(self, rev, type):
120 if rev == hex(nullid):
120 if rev == hex(nullid):
121 raise IOError
121 raise IOError
122 self.catfilepipe[0].write(rev+'\n')
122 self.catfilepipe[0].write(rev+'\n')
123 self.catfilepipe[0].flush()
123 self.catfilepipe[0].flush()
124 info = self.catfilepipe[1].readline().split()
124 info = self.catfilepipe[1].readline().split()
125 if info[1] != type:
125 if info[1] != type:
126 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
126 raise util.Abort(_('cannot read %r object at %s') % (type, rev))
127 size = int(info[2])
127 size = int(info[2])
128 data = self.catfilepipe[1].read(size)
128 data = self.catfilepipe[1].read(size)
129 if len(data) < size:
129 if len(data) < size:
130 raise util.Abort(_('cannot read %r object at %s: unexpected size')
130 raise util.Abort(_('cannot read %r object at %s: unexpected size')
131 % (type, rev))
131 % (type, rev))
132 # read the trailing newline
132 # read the trailing newline
133 self.catfilepipe[1].read(1)
133 self.catfilepipe[1].read(1)
134 return data
134 return data
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev == hex(nullid):
137 if rev == hex(nullid):
138 return None, None
138 return None, None
139 if name == '.hgsub':
139 if name == '.hgsub':
140 data = '\n'.join([m.hgsub() for m in self.submoditer()])
140 data = '\n'.join([m.hgsub() for m in self.submoditer()])
141 mode = ''
141 mode = ''
142 elif name == '.hgsubstate':
142 elif name == '.hgsubstate':
143 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
143 data = '\n'.join([m.hgsubstate() for m in self.submoditer()])
144 mode = ''
144 mode = ''
145 else:
145 else:
146 data = self.catfile(rev, "blob")
146 data = self.catfile(rev, "blob")
147 mode = self.modecache[(name, rev)]
147 mode = self.modecache[(name, rev)]
148 return data, mode
148 return data, mode
149
149
150 def submoditer(self):
150 def submoditer(self):
151 null = hex(nullid)
151 null = hex(nullid)
152 for m in sorted(self.submodules, key=lambda p: p.path):
152 for m in sorted(self.submodules, key=lambda p: p.path):
153 if m.node != null:
153 if m.node != null:
154 yield m
154 yield m
155
155
156 def parsegitmodules(self, content):
156 def parsegitmodules(self, content):
157 """Parse the formatted .gitmodules file, example file format:
157 """Parse the formatted .gitmodules file, example file format:
158 [submodule "sub"]\n
158 [submodule "sub"]\n
159 \tpath = sub\n
159 \tpath = sub\n
160 \turl = git://giturl\n
160 \turl = git://giturl\n
161 """
161 """
162 self.submodules = []
162 self.submodules = []
163 c = config.config()
163 c = config.config()
164 # Each item in .gitmodules starts with \t that cant be parsed
164 # Each item in .gitmodules starts with \t that cant be parsed
165 c.parse('.gitmodules', content.replace('\t',''))
165 c.parse('.gitmodules', content.replace('\t',''))
166 for sec in c.sections():
166 for sec in c.sections():
167 s = c[sec]
167 s = c[sec]
168 if 'url' in s and 'path' in s:
168 if 'url' in s and 'path' in s:
169 self.submodules.append(submodule(s['path'], '', s['url']))
169 self.submodules.append(submodule(s['path'], '', s['url']))
170
170
171 def retrievegitmodules(self, version):
171 def retrievegitmodules(self, version):
172 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
172 modules, ret = self.gitread("git show %s:%s" % (version, '.gitmodules'))
173 if ret:
173 if ret:
174 raise util.Abort(_('cannot read submodules config file in %s') %
174 raise util.Abort(_('cannot read submodules config file in %s') %
175 version)
175 version)
176 self.parsegitmodules(modules)
176 self.parsegitmodules(modules)
177 for m in self.submodules:
177 for m in self.submodules:
178 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
178 node, ret = self.gitread("git rev-parse %s:%s" % (version, m.path))
179 if ret:
179 if ret:
180 continue
180 continue
181 m.node = node.strip()
181 m.node = node.strip()
182
182
183 def getchanges(self, version):
183 def getchanges(self, version, full):
184 if full:
185 raise util.Abort(_("convert from git do not support --full"))
184 self.modecache = {}
186 self.modecache = {}
185 fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
187 fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
186 changes = []
188 changes = []
187 seen = set()
189 seen = set()
188 entry = None
190 entry = None
189 subexists = False
191 subexists = False
190 subdeleted = False
192 subdeleted = False
191 for l in fh.read().split('\x00'):
193 for l in fh.read().split('\x00'):
192 if not entry:
194 if not entry:
193 if not l.startswith(':'):
195 if not l.startswith(':'):
194 continue
196 continue
195 entry = l
197 entry = l
196 continue
198 continue
197 f = l
199 f = l
198 if f not in seen:
200 if f not in seen:
199 seen.add(f)
201 seen.add(f)
200 entry = entry.split()
202 entry = entry.split()
201 h = entry[3]
203 h = entry[3]
202 p = (entry[1] == "100755")
204 p = (entry[1] == "100755")
203 s = (entry[1] == "120000")
205 s = (entry[1] == "120000")
204
206
205 if f == '.gitmodules':
207 if f == '.gitmodules':
206 subexists = True
208 subexists = True
207 if entry[4] == 'D':
209 if entry[4] == 'D':
208 subdeleted = True
210 subdeleted = True
209 changes.append(('.hgsub', hex(nullid)))
211 changes.append(('.hgsub', hex(nullid)))
210 else:
212 else:
211 changes.append(('.hgsub', ''))
213 changes.append(('.hgsub', ''))
212 elif entry[1] == '160000' or entry[0] == ':160000':
214 elif entry[1] == '160000' or entry[0] == ':160000':
213 subexists = True
215 subexists = True
214 else:
216 else:
215 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
217 self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
216 changes.append((f, h))
218 changes.append((f, h))
217 entry = None
219 entry = None
218 if fh.close():
220 if fh.close():
219 raise util.Abort(_('cannot read changes in %s') % version)
221 raise util.Abort(_('cannot read changes in %s') % version)
220
222
221 if subexists:
223 if subexists:
222 if subdeleted:
224 if subdeleted:
223 changes.append(('.hgsubstate', hex(nullid)))
225 changes.append(('.hgsubstate', hex(nullid)))
224 else:
226 else:
225 self.retrievegitmodules(version)
227 self.retrievegitmodules(version)
226 changes.append(('.hgsubstate', ''))
228 changes.append(('.hgsubstate', ''))
227 return (changes, {})
229 return (changes, {})
228
230
229 def getcommit(self, version):
231 def getcommit(self, version):
230 c = self.catfile(version, "commit") # read the commit hash
232 c = self.catfile(version, "commit") # read the commit hash
231 end = c.find("\n\n")
233 end = c.find("\n\n")
232 message = c[end + 2:]
234 message = c[end + 2:]
233 message = self.recode(message)
235 message = self.recode(message)
234 l = c[:end].splitlines()
236 l = c[:end].splitlines()
235 parents = []
237 parents = []
236 author = committer = None
238 author = committer = None
237 for e in l[1:]:
239 for e in l[1:]:
238 n, v = e.split(" ", 1)
240 n, v = e.split(" ", 1)
239 if n == "author":
241 if n == "author":
240 p = v.split()
242 p = v.split()
241 tm, tz = p[-2:]
243 tm, tz = p[-2:]
242 author = " ".join(p[:-2])
244 author = " ".join(p[:-2])
243 if author[0] == "<": author = author[1:-1]
245 if author[0] == "<": author = author[1:-1]
244 author = self.recode(author)
246 author = self.recode(author)
245 if n == "committer":
247 if n == "committer":
246 p = v.split()
248 p = v.split()
247 tm, tz = p[-2:]
249 tm, tz = p[-2:]
248 committer = " ".join(p[:-2])
250 committer = " ".join(p[:-2])
249 if committer[0] == "<": committer = committer[1:-1]
251 if committer[0] == "<": committer = committer[1:-1]
250 committer = self.recode(committer)
252 committer = self.recode(committer)
251 if n == "parent":
253 if n == "parent":
252 parents.append(v)
254 parents.append(v)
253
255
254 if committer and committer != author:
256 if committer and committer != author:
255 message += "\ncommitter: %s\n" % committer
257 message += "\ncommitter: %s\n" % committer
256 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
258 tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
257 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
259 tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
258 date = tm + " " + str(tz)
260 date = tm + " " + str(tz)
259
261
260 c = commit(parents=parents, date=date, author=author, desc=message,
262 c = commit(parents=parents, date=date, author=author, desc=message,
261 rev=version)
263 rev=version)
262 return c
264 return c
263
265
264 def gettags(self):
266 def gettags(self):
265 tags = {}
267 tags = {}
266 alltags = {}
268 alltags = {}
267 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
269 fh = self.gitopen('git ls-remote --tags "%s"' % self.path,
268 err=subprocess.STDOUT)
270 err=subprocess.STDOUT)
269 prefix = 'refs/tags/'
271 prefix = 'refs/tags/'
270
272
271 # Build complete list of tags, both annotated and bare ones
273 # Build complete list of tags, both annotated and bare ones
272 for line in fh:
274 for line in fh:
273 line = line.strip()
275 line = line.strip()
274 if line.startswith("error:") or line.startswith("fatal:"):
276 if line.startswith("error:") or line.startswith("fatal:"):
275 raise util.Abort(_('cannot read tags from %s') % self.path)
277 raise util.Abort(_('cannot read tags from %s') % self.path)
276 node, tag = line.split(None, 1)
278 node, tag = line.split(None, 1)
277 if not tag.startswith(prefix):
279 if not tag.startswith(prefix):
278 continue
280 continue
279 alltags[tag[len(prefix):]] = node
281 alltags[tag[len(prefix):]] = node
280 if fh.close():
282 if fh.close():
281 raise util.Abort(_('cannot read tags from %s') % self.path)
283 raise util.Abort(_('cannot read tags from %s') % self.path)
282
284
283 # Filter out tag objects for annotated tag refs
285 # Filter out tag objects for annotated tag refs
284 for tag in alltags:
286 for tag in alltags:
285 if tag.endswith('^{}'):
287 if tag.endswith('^{}'):
286 tags[tag[:-3]] = alltags[tag]
288 tags[tag[:-3]] = alltags[tag]
287 else:
289 else:
288 if tag + '^{}' in alltags:
290 if tag + '^{}' in alltags:
289 continue
291 continue
290 else:
292 else:
291 tags[tag] = alltags[tag]
293 tags[tag] = alltags[tag]
292
294
293 return tags
295 return tags
294
296
295 def getchangedfiles(self, version, i):
297 def getchangedfiles(self, version, i):
296 changes = []
298 changes = []
297 if i is None:
299 if i is None:
298 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
300 fh = self.gitopen("git diff-tree --root -m -r %s" % version)
299 for l in fh:
301 for l in fh:
300 if "\t" not in l:
302 if "\t" not in l:
301 continue
303 continue
302 m, f = l[:-1].split("\t")
304 m, f = l[:-1].split("\t")
303 changes.append(f)
305 changes.append(f)
304 else:
306 else:
305 fh = self.gitopen('git diff-tree --name-only --root -r %s '
307 fh = self.gitopen('git diff-tree --name-only --root -r %s '
306 '"%s^%s" --' % (version, version, i + 1))
308 '"%s^%s" --' % (version, version, i + 1))
307 changes = [f.rstrip('\n') for f in fh]
309 changes = [f.rstrip('\n') for f in fh]
308 if fh.close():
310 if fh.close():
309 raise util.Abort(_('cannot read changes in %s') % version)
311 raise util.Abort(_('cannot read changes in %s') % version)
310
312
311 return changes
313 return changes
312
314
313 def getbookmarks(self):
315 def getbookmarks(self):
314 bookmarks = {}
316 bookmarks = {}
315
317
316 # Interesting references in git are prefixed
318 # Interesting references in git are prefixed
317 prefix = 'refs/heads/'
319 prefix = 'refs/heads/'
318 prefixlen = len(prefix)
320 prefixlen = len(prefix)
319
321
320 # factor two commands
322 # factor two commands
321 gitcmd = { 'remote/': 'git ls-remote --heads origin',
323 gitcmd = { 'remote/': 'git ls-remote --heads origin',
322 '': 'git show-ref'}
324 '': 'git show-ref'}
323
325
324 # Origin heads
326 # Origin heads
325 for reftype in gitcmd:
327 for reftype in gitcmd:
326 try:
328 try:
327 fh = self.gitopen(gitcmd[reftype], err=subprocess.PIPE)
329 fh = self.gitopen(gitcmd[reftype], err=subprocess.PIPE)
328 for line in fh:
330 for line in fh:
329 line = line.strip()
331 line = line.strip()
330 rev, name = line.split(None, 1)
332 rev, name = line.split(None, 1)
331 if not name.startswith(prefix):
333 if not name.startswith(prefix):
332 continue
334 continue
333 name = '%s%s' % (reftype, name[prefixlen:])
335 name = '%s%s' % (reftype, name[prefixlen:])
334 bookmarks[name] = rev
336 bookmarks[name] = rev
335 except Exception:
337 except Exception:
336 pass
338 pass
337
339
338 return bookmarks
340 return bookmarks
339
341
340 def checkrevformat(self, revstr, mapname='splicemap'):
342 def checkrevformat(self, revstr, mapname='splicemap'):
341 """ git revision string is a 40 byte hex """
343 """ git revision string is a 40 byte hex """
342 self.checkhexformat(revstr, mapname)
344 self.checkhexformat(revstr, mapname)
343
345
@@ -1,337 +1,339 b''
1 # gnuarch.py - GNU Arch support for the convert extension
1 # gnuarch.py - GNU Arch support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
3 # Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
4 # and others
4 # and others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 from common import NoRepo, commandline, commit, converter_source
9 from common import NoRepo, commandline, commit, converter_source
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11 from mercurial import encoding, util
11 from mercurial import encoding, util
12 import os, shutil, tempfile, stat
12 import os, shutil, tempfile, stat
13 from email.Parser import Parser
13 from email.Parser import Parser
14
14
15 class gnuarch_source(converter_source, commandline):
15 class gnuarch_source(converter_source, commandline):
16
16
17 class gnuarch_rev(object):
17 class gnuarch_rev(object):
18 def __init__(self, rev):
18 def __init__(self, rev):
19 self.rev = rev
19 self.rev = rev
20 self.summary = ''
20 self.summary = ''
21 self.date = None
21 self.date = None
22 self.author = ''
22 self.author = ''
23 self.continuationof = None
23 self.continuationof = None
24 self.add_files = []
24 self.add_files = []
25 self.mod_files = []
25 self.mod_files = []
26 self.del_files = []
26 self.del_files = []
27 self.ren_files = {}
27 self.ren_files = {}
28 self.ren_dirs = {}
28 self.ren_dirs = {}
29
29
30 def __init__(self, ui, path, rev=None):
30 def __init__(self, ui, path, rev=None):
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
31 super(gnuarch_source, self).__init__(ui, path, rev=rev)
32
32
33 if not os.path.exists(os.path.join(path, '{arch}')):
33 if not os.path.exists(os.path.join(path, '{arch}')):
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
34 raise NoRepo(_("%s does not look like a GNU Arch repository")
35 % path)
35 % path)
36
36
37 # Could use checktool, but we want to check for baz or tla.
37 # Could use checktool, but we want to check for baz or tla.
38 self.execmd = None
38 self.execmd = None
39 if util.findexe('baz'):
39 if util.findexe('baz'):
40 self.execmd = 'baz'
40 self.execmd = 'baz'
41 else:
41 else:
42 if util.findexe('tla'):
42 if util.findexe('tla'):
43 self.execmd = 'tla'
43 self.execmd = 'tla'
44 else:
44 else:
45 raise util.Abort(_('cannot find a GNU Arch tool'))
45 raise util.Abort(_('cannot find a GNU Arch tool'))
46
46
47 commandline.__init__(self, ui, self.execmd)
47 commandline.__init__(self, ui, self.execmd)
48
48
49 self.path = os.path.realpath(path)
49 self.path = os.path.realpath(path)
50 self.tmppath = None
50 self.tmppath = None
51
51
52 self.treeversion = None
52 self.treeversion = None
53 self.lastrev = None
53 self.lastrev = None
54 self.changes = {}
54 self.changes = {}
55 self.parents = {}
55 self.parents = {}
56 self.tags = {}
56 self.tags = {}
57 self.catlogparser = Parser()
57 self.catlogparser = Parser()
58 self.encoding = encoding.encoding
58 self.encoding = encoding.encoding
59 self.archives = []
59 self.archives = []
60
60
61 def before(self):
61 def before(self):
62 # Get registered archives
62 # Get registered archives
63 self.archives = [i.rstrip('\n')
63 self.archives = [i.rstrip('\n')
64 for i in self.runlines0('archives', '-n')]
64 for i in self.runlines0('archives', '-n')]
65
65
66 if self.execmd == 'tla':
66 if self.execmd == 'tla':
67 output = self.run0('tree-version', self.path)
67 output = self.run0('tree-version', self.path)
68 else:
68 else:
69 output = self.run0('tree-version', '-d', self.path)
69 output = self.run0('tree-version', '-d', self.path)
70 self.treeversion = output.strip()
70 self.treeversion = output.strip()
71
71
72 # Get name of temporary directory
72 # Get name of temporary directory
73 version = self.treeversion.split('/')
73 version = self.treeversion.split('/')
74 self.tmppath = os.path.join(tempfile.gettempdir(),
74 self.tmppath = os.path.join(tempfile.gettempdir(),
75 'hg-%s' % version[1])
75 'hg-%s' % version[1])
76
76
77 # Generate parents dictionary
77 # Generate parents dictionary
78 self.parents[None] = []
78 self.parents[None] = []
79 treeversion = self.treeversion
79 treeversion = self.treeversion
80 child = None
80 child = None
81 while treeversion:
81 while treeversion:
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
82 self.ui.status(_('analyzing tree version %s...\n') % treeversion)
83
83
84 archive = treeversion.split('/')[0]
84 archive = treeversion.split('/')[0]
85 if archive not in self.archives:
85 if archive not in self.archives:
86 self.ui.status(_('tree analysis stopped because it points to '
86 self.ui.status(_('tree analysis stopped because it points to '
87 'an unregistered archive %s...\n') % archive)
87 'an unregistered archive %s...\n') % archive)
88 break
88 break
89
89
90 # Get the complete list of revisions for that tree version
90 # Get the complete list of revisions for that tree version
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
91 output, status = self.runlines('revisions', '-r', '-f', treeversion)
92 self.checkexit(status, 'failed retrieving revisions for %s'
92 self.checkexit(status, 'failed retrieving revisions for %s'
93 % treeversion)
93 % treeversion)
94
94
95 # No new iteration unless a revision has a continuation-of header
95 # No new iteration unless a revision has a continuation-of header
96 treeversion = None
96 treeversion = None
97
97
98 for l in output:
98 for l in output:
99 rev = l.strip()
99 rev = l.strip()
100 self.changes[rev] = self.gnuarch_rev(rev)
100 self.changes[rev] = self.gnuarch_rev(rev)
101 self.parents[rev] = []
101 self.parents[rev] = []
102
102
103 # Read author, date and summary
103 # Read author, date and summary
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
104 catlog, status = self.run('cat-log', '-d', self.path, rev)
105 if status:
105 if status:
106 catlog = self.run0('cat-archive-log', rev)
106 catlog = self.run0('cat-archive-log', rev)
107 self._parsecatlog(catlog, rev)
107 self._parsecatlog(catlog, rev)
108
108
109 # Populate the parents map
109 # Populate the parents map
110 self.parents[child].append(rev)
110 self.parents[child].append(rev)
111
111
112 # Keep track of the current revision as the child of the next
112 # Keep track of the current revision as the child of the next
113 # revision scanned
113 # revision scanned
114 child = rev
114 child = rev
115
115
116 # Check if we have to follow the usual incremental history
116 # Check if we have to follow the usual incremental history
117 # or if we have to 'jump' to a different treeversion given
117 # or if we have to 'jump' to a different treeversion given
118 # by the continuation-of header.
118 # by the continuation-of header.
119 if self.changes[rev].continuationof:
119 if self.changes[rev].continuationof:
120 treeversion = '--'.join(
120 treeversion = '--'.join(
121 self.changes[rev].continuationof.split('--')[:-1])
121 self.changes[rev].continuationof.split('--')[:-1])
122 break
122 break
123
123
124 # If we reached a base-0 revision w/o any continuation-of
124 # If we reached a base-0 revision w/o any continuation-of
125 # header, it means the tree history ends here.
125 # header, it means the tree history ends here.
126 if rev[-6:] == 'base-0':
126 if rev[-6:] == 'base-0':
127 break
127 break
128
128
129 def after(self):
129 def after(self):
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
130 self.ui.debug('cleaning up %s\n' % self.tmppath)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
131 shutil.rmtree(self.tmppath, ignore_errors=True)
132
132
133 def getheads(self):
133 def getheads(self):
134 return self.parents[None]
134 return self.parents[None]
135
135
136 def getfile(self, name, rev):
136 def getfile(self, name, rev):
137 if rev != self.lastrev:
137 if rev != self.lastrev:
138 raise util.Abort(_('internal calling inconsistency'))
138 raise util.Abort(_('internal calling inconsistency'))
139
139
140 if not os.path.lexists(os.path.join(self.tmppath, name)):
140 if not os.path.lexists(os.path.join(self.tmppath, name)):
141 return None, None
141 return None, None
142
142
143 return self._getfile(name, rev)
143 return self._getfile(name, rev)
144
144
145 def getchanges(self, rev):
145 def getchanges(self, rev, full):
146 if full:
147 raise util.Abort(_("convert from arch do not support --full"))
146 self._update(rev)
148 self._update(rev)
147 changes = []
149 changes = []
148 copies = {}
150 copies = {}
149
151
150 for f in self.changes[rev].add_files:
152 for f in self.changes[rev].add_files:
151 changes.append((f, rev))
153 changes.append((f, rev))
152
154
153 for f in self.changes[rev].mod_files:
155 for f in self.changes[rev].mod_files:
154 changes.append((f, rev))
156 changes.append((f, rev))
155
157
156 for f in self.changes[rev].del_files:
158 for f in self.changes[rev].del_files:
157 changes.append((f, rev))
159 changes.append((f, rev))
158
160
159 for src in self.changes[rev].ren_files:
161 for src in self.changes[rev].ren_files:
160 to = self.changes[rev].ren_files[src]
162 to = self.changes[rev].ren_files[src]
161 changes.append((src, rev))
163 changes.append((src, rev))
162 changes.append((to, rev))
164 changes.append((to, rev))
163 copies[to] = src
165 copies[to] = src
164
166
165 for src in self.changes[rev].ren_dirs:
167 for src in self.changes[rev].ren_dirs:
166 to = self.changes[rev].ren_dirs[src]
168 to = self.changes[rev].ren_dirs[src]
167 chgs, cps = self._rendirchanges(src, to)
169 chgs, cps = self._rendirchanges(src, to)
168 changes += [(f, rev) for f in chgs]
170 changes += [(f, rev) for f in chgs]
169 copies.update(cps)
171 copies.update(cps)
170
172
171 self.lastrev = rev
173 self.lastrev = rev
172 return sorted(set(changes)), copies
174 return sorted(set(changes)), copies
173
175
174 def getcommit(self, rev):
176 def getcommit(self, rev):
175 changes = self.changes[rev]
177 changes = self.changes[rev]
176 return commit(author=changes.author, date=changes.date,
178 return commit(author=changes.author, date=changes.date,
177 desc=changes.summary, parents=self.parents[rev], rev=rev)
179 desc=changes.summary, parents=self.parents[rev], rev=rev)
178
180
179 def gettags(self):
181 def gettags(self):
180 return self.tags
182 return self.tags
181
183
182 def _execute(self, cmd, *args, **kwargs):
184 def _execute(self, cmd, *args, **kwargs):
183 cmdline = [self.execmd, cmd]
185 cmdline = [self.execmd, cmd]
184 cmdline += args
186 cmdline += args
185 cmdline = [util.shellquote(arg) for arg in cmdline]
187 cmdline = [util.shellquote(arg) for arg in cmdline]
186 cmdline += ['>', os.devnull, '2>', os.devnull]
188 cmdline += ['>', os.devnull, '2>', os.devnull]
187 cmdline = util.quotecommand(' '.join(cmdline))
189 cmdline = util.quotecommand(' '.join(cmdline))
188 self.ui.debug(cmdline, '\n')
190 self.ui.debug(cmdline, '\n')
189 return os.system(cmdline)
191 return os.system(cmdline)
190
192
191 def _update(self, rev):
193 def _update(self, rev):
192 self.ui.debug('applying revision %s...\n' % rev)
194 self.ui.debug('applying revision %s...\n' % rev)
193 changeset, status = self.runlines('replay', '-d', self.tmppath,
195 changeset, status = self.runlines('replay', '-d', self.tmppath,
194 rev)
196 rev)
195 if status:
197 if status:
196 # Something went wrong while merging (baz or tla
198 # Something went wrong while merging (baz or tla
197 # issue?), get latest revision and try from there
199 # issue?), get latest revision and try from there
198 shutil.rmtree(self.tmppath, ignore_errors=True)
200 shutil.rmtree(self.tmppath, ignore_errors=True)
199 self._obtainrevision(rev)
201 self._obtainrevision(rev)
200 else:
202 else:
201 old_rev = self.parents[rev][0]
203 old_rev = self.parents[rev][0]
202 self.ui.debug('computing changeset between %s and %s...\n'
204 self.ui.debug('computing changeset between %s and %s...\n'
203 % (old_rev, rev))
205 % (old_rev, rev))
204 self._parsechangeset(changeset, rev)
206 self._parsechangeset(changeset, rev)
205
207
206 def _getfile(self, name, rev):
208 def _getfile(self, name, rev):
207 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
209 mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
208 if stat.S_ISLNK(mode):
210 if stat.S_ISLNK(mode):
209 data = os.readlink(os.path.join(self.tmppath, name))
211 data = os.readlink(os.path.join(self.tmppath, name))
210 mode = mode and 'l' or ''
212 mode = mode and 'l' or ''
211 else:
213 else:
212 data = open(os.path.join(self.tmppath, name), 'rb').read()
214 data = open(os.path.join(self.tmppath, name), 'rb').read()
213 mode = (mode & 0111) and 'x' or ''
215 mode = (mode & 0111) and 'x' or ''
214 return data, mode
216 return data, mode
215
217
216 def _exclude(self, name):
218 def _exclude(self, name):
217 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
219 exclude = ['{arch}', '.arch-ids', '.arch-inventory']
218 for exc in exclude:
220 for exc in exclude:
219 if name.find(exc) != -1:
221 if name.find(exc) != -1:
220 return True
222 return True
221 return False
223 return False
222
224
223 def _readcontents(self, path):
225 def _readcontents(self, path):
224 files = []
226 files = []
225 contents = os.listdir(path)
227 contents = os.listdir(path)
226 while len(contents) > 0:
228 while len(contents) > 0:
227 c = contents.pop()
229 c = contents.pop()
228 p = os.path.join(path, c)
230 p = os.path.join(path, c)
229 # os.walk could be used, but here we avoid internal GNU
231 # os.walk could be used, but here we avoid internal GNU
230 # Arch files and directories, thus saving a lot time.
232 # Arch files and directories, thus saving a lot time.
231 if not self._exclude(p):
233 if not self._exclude(p):
232 if os.path.isdir(p):
234 if os.path.isdir(p):
233 contents += [os.path.join(c, f) for f in os.listdir(p)]
235 contents += [os.path.join(c, f) for f in os.listdir(p)]
234 else:
236 else:
235 files.append(c)
237 files.append(c)
236 return files
238 return files
237
239
238 def _rendirchanges(self, src, dest):
240 def _rendirchanges(self, src, dest):
239 changes = []
241 changes = []
240 copies = {}
242 copies = {}
241 files = self._readcontents(os.path.join(self.tmppath, dest))
243 files = self._readcontents(os.path.join(self.tmppath, dest))
242 for f in files:
244 for f in files:
243 s = os.path.join(src, f)
245 s = os.path.join(src, f)
244 d = os.path.join(dest, f)
246 d = os.path.join(dest, f)
245 changes.append(s)
247 changes.append(s)
246 changes.append(d)
248 changes.append(d)
247 copies[d] = s
249 copies[d] = s
248 return changes, copies
250 return changes, copies
249
251
250 def _obtainrevision(self, rev):
252 def _obtainrevision(self, rev):
251 self.ui.debug('obtaining revision %s...\n' % rev)
253 self.ui.debug('obtaining revision %s...\n' % rev)
252 output = self._execute('get', rev, self.tmppath)
254 output = self._execute('get', rev, self.tmppath)
253 self.checkexit(output)
255 self.checkexit(output)
254 self.ui.debug('analyzing revision %s...\n' % rev)
256 self.ui.debug('analyzing revision %s...\n' % rev)
255 files = self._readcontents(self.tmppath)
257 files = self._readcontents(self.tmppath)
256 self.changes[rev].add_files += files
258 self.changes[rev].add_files += files
257
259
258 def _stripbasepath(self, path):
260 def _stripbasepath(self, path):
259 if path.startswith('./'):
261 if path.startswith('./'):
260 return path[2:]
262 return path[2:]
261 return path
263 return path
262
264
263 def _parsecatlog(self, data, rev):
265 def _parsecatlog(self, data, rev):
264 try:
266 try:
265 catlog = self.catlogparser.parsestr(data)
267 catlog = self.catlogparser.parsestr(data)
266
268
267 # Commit date
269 # Commit date
268 self.changes[rev].date = util.datestr(
270 self.changes[rev].date = util.datestr(
269 util.strdate(catlog['Standard-date'],
271 util.strdate(catlog['Standard-date'],
270 '%Y-%m-%d %H:%M:%S'))
272 '%Y-%m-%d %H:%M:%S'))
271
273
272 # Commit author
274 # Commit author
273 self.changes[rev].author = self.recode(catlog['Creator'])
275 self.changes[rev].author = self.recode(catlog['Creator'])
274
276
275 # Commit description
277 # Commit description
276 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
278 self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
277 catlog.get_payload()))
279 catlog.get_payload()))
278 self.changes[rev].summary = self.recode(self.changes[rev].summary)
280 self.changes[rev].summary = self.recode(self.changes[rev].summary)
279
281
280 # Commit revision origin when dealing with a branch or tag
282 # Commit revision origin when dealing with a branch or tag
281 if 'Continuation-of' in catlog:
283 if 'Continuation-of' in catlog:
282 self.changes[rev].continuationof = self.recode(
284 self.changes[rev].continuationof = self.recode(
283 catlog['Continuation-of'])
285 catlog['Continuation-of'])
284 except Exception:
286 except Exception:
285 raise util.Abort(_('could not parse cat-log of %s') % rev)
287 raise util.Abort(_('could not parse cat-log of %s') % rev)
286
288
287 def _parsechangeset(self, data, rev):
289 def _parsechangeset(self, data, rev):
288 for l in data:
290 for l in data:
289 l = l.strip()
291 l = l.strip()
290 # Added file (ignore added directory)
292 # Added file (ignore added directory)
291 if l.startswith('A') and not l.startswith('A/'):
293 if l.startswith('A') and not l.startswith('A/'):
292 file = self._stripbasepath(l[1:].strip())
294 file = self._stripbasepath(l[1:].strip())
293 if not self._exclude(file):
295 if not self._exclude(file):
294 self.changes[rev].add_files.append(file)
296 self.changes[rev].add_files.append(file)
295 # Deleted file (ignore deleted directory)
297 # Deleted file (ignore deleted directory)
296 elif l.startswith('D') and not l.startswith('D/'):
298 elif l.startswith('D') and not l.startswith('D/'):
297 file = self._stripbasepath(l[1:].strip())
299 file = self._stripbasepath(l[1:].strip())
298 if not self._exclude(file):
300 if not self._exclude(file):
299 self.changes[rev].del_files.append(file)
301 self.changes[rev].del_files.append(file)
300 # Modified binary file
302 # Modified binary file
301 elif l.startswith('Mb'):
303 elif l.startswith('Mb'):
302 file = self._stripbasepath(l[2:].strip())
304 file = self._stripbasepath(l[2:].strip())
303 if not self._exclude(file):
305 if not self._exclude(file):
304 self.changes[rev].mod_files.append(file)
306 self.changes[rev].mod_files.append(file)
305 # Modified link
307 # Modified link
306 elif l.startswith('M->'):
308 elif l.startswith('M->'):
307 file = self._stripbasepath(l[3:].strip())
309 file = self._stripbasepath(l[3:].strip())
308 if not self._exclude(file):
310 if not self._exclude(file):
309 self.changes[rev].mod_files.append(file)
311 self.changes[rev].mod_files.append(file)
310 # Modified file
312 # Modified file
311 elif l.startswith('M'):
313 elif l.startswith('M'):
312 file = self._stripbasepath(l[1:].strip())
314 file = self._stripbasepath(l[1:].strip())
313 if not self._exclude(file):
315 if not self._exclude(file):
314 self.changes[rev].mod_files.append(file)
316 self.changes[rev].mod_files.append(file)
315 # Renamed file (or link)
317 # Renamed file (or link)
316 elif l.startswith('=>'):
318 elif l.startswith('=>'):
317 files = l[2:].strip().split(' ')
319 files = l[2:].strip().split(' ')
318 if len(files) == 1:
320 if len(files) == 1:
319 files = l[2:].strip().split('\t')
321 files = l[2:].strip().split('\t')
320 src = self._stripbasepath(files[0])
322 src = self._stripbasepath(files[0])
321 dst = self._stripbasepath(files[1])
323 dst = self._stripbasepath(files[1])
322 if not self._exclude(src) and not self._exclude(dst):
324 if not self._exclude(src) and not self._exclude(dst):
323 self.changes[rev].ren_files[src] = dst
325 self.changes[rev].ren_files[src] = dst
324 # Conversion from file to link or from link to file (modified)
326 # Conversion from file to link or from link to file (modified)
325 elif l.startswith('ch'):
327 elif l.startswith('ch'):
326 file = self._stripbasepath(l[2:].strip())
328 file = self._stripbasepath(l[2:].strip())
327 if not self._exclude(file):
329 if not self._exclude(file):
328 self.changes[rev].mod_files.append(file)
330 self.changes[rev].mod_files.append(file)
329 # Renamed directory
331 # Renamed directory
330 elif l.startswith('/>'):
332 elif l.startswith('/>'):
331 dirs = l[2:].strip().split(' ')
333 dirs = l[2:].strip().split(' ')
332 if len(dirs) == 1:
334 if len(dirs) == 1:
333 dirs = l[2:].strip().split('\t')
335 dirs = l[2:].strip().split('\t')
334 src = self._stripbasepath(dirs[0])
336 src = self._stripbasepath(dirs[0])
335 dst = self._stripbasepath(dirs[1])
337 dst = self._stripbasepath(dirs[1])
336 if not self._exclude(src) and not self._exclude(dst):
338 if not self._exclude(src) and not self._exclude(dst):
337 self.changes[rev].ren_dirs[src] = dst
339 self.changes[rev].ren_dirs[src] = dst
@@ -1,470 +1,476 b''
1 # hg.py - hg backend for convert extension
1 # hg.py - hg backend for convert extension
2 #
2 #
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
3 # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 # Notes for hg->hg conversion:
8 # Notes for hg->hg conversion:
9 #
9 #
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
10 # * Old versions of Mercurial didn't trim the whitespace from the ends
11 # of commit messages, but new versions do. Changesets created by
11 # of commit messages, but new versions do. Changesets created by
12 # those older versions, then converted, may thus have different
12 # those older versions, then converted, may thus have different
13 # hashes for changesets that are otherwise identical.
13 # hashes for changesets that are otherwise identical.
14 #
14 #
15 # * Using "--config convert.hg.saverev=true" will make the source
15 # * Using "--config convert.hg.saverev=true" will make the source
16 # identifier to be stored in the converted revision. This will cause
16 # identifier to be stored in the converted revision. This will cause
17 # the converted revision to have a different identity than the
17 # the converted revision to have a different identity than the
18 # source.
18 # source.
19
19
20
20
21 import os, time, cStringIO
21 import os, time, cStringIO
22 from mercurial.i18n import _
22 from mercurial.i18n import _
23 from mercurial.node import bin, hex, nullid
23 from mercurial.node import bin, hex, nullid
24 from mercurial import hg, util, context, bookmarks, error, scmutil
24 from mercurial import hg, util, context, bookmarks, error, scmutil
25
25
26 from common import NoRepo, commit, converter_source, converter_sink
26 from common import NoRepo, commit, converter_source, converter_sink
27
27
28 import re
28 import re
29 sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
29 sha1re = re.compile(r'\b[0-9a-f]{6,40}\b')
30
30
31 class mercurial_sink(converter_sink):
31 class mercurial_sink(converter_sink):
32 def __init__(self, ui, path):
32 def __init__(self, ui, path):
33 converter_sink.__init__(self, ui, path)
33 converter_sink.__init__(self, ui, path)
34 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
34 self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
35 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
35 self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
36 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
36 self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
37 self.lastbranch = None
37 self.lastbranch = None
38 if os.path.isdir(path) and len(os.listdir(path)) > 0:
38 if os.path.isdir(path) and len(os.listdir(path)) > 0:
39 try:
39 try:
40 self.repo = hg.repository(self.ui, path)
40 self.repo = hg.repository(self.ui, path)
41 if not self.repo.local():
41 if not self.repo.local():
42 raise NoRepo(_('%s is not a local Mercurial repository')
42 raise NoRepo(_('%s is not a local Mercurial repository')
43 % path)
43 % path)
44 except error.RepoError, err:
44 except error.RepoError, err:
45 ui.traceback()
45 ui.traceback()
46 raise NoRepo(err.args[0])
46 raise NoRepo(err.args[0])
47 else:
47 else:
48 try:
48 try:
49 ui.status(_('initializing destination %s repository\n') % path)
49 ui.status(_('initializing destination %s repository\n') % path)
50 self.repo = hg.repository(self.ui, path, create=True)
50 self.repo = hg.repository(self.ui, path, create=True)
51 if not self.repo.local():
51 if not self.repo.local():
52 raise NoRepo(_('%s is not a local Mercurial repository')
52 raise NoRepo(_('%s is not a local Mercurial repository')
53 % path)
53 % path)
54 self.created.append(path)
54 self.created.append(path)
55 except error.RepoError:
55 except error.RepoError:
56 ui.traceback()
56 ui.traceback()
57 raise NoRepo(_("could not create hg repository %s as sink")
57 raise NoRepo(_("could not create hg repository %s as sink")
58 % path)
58 % path)
59 self.lock = None
59 self.lock = None
60 self.wlock = None
60 self.wlock = None
61 self.filemapmode = False
61 self.filemapmode = False
62
62
63 def before(self):
63 def before(self):
64 self.ui.debug('run hg sink pre-conversion action\n')
64 self.ui.debug('run hg sink pre-conversion action\n')
65 self.wlock = self.repo.wlock()
65 self.wlock = self.repo.wlock()
66 self.lock = self.repo.lock()
66 self.lock = self.repo.lock()
67
67
68 def after(self):
68 def after(self):
69 self.ui.debug('run hg sink post-conversion action\n')
69 self.ui.debug('run hg sink post-conversion action\n')
70 if self.lock:
70 if self.lock:
71 self.lock.release()
71 self.lock.release()
72 if self.wlock:
72 if self.wlock:
73 self.wlock.release()
73 self.wlock.release()
74
74
75 def revmapfile(self):
75 def revmapfile(self):
76 return self.repo.join("shamap")
76 return self.repo.join("shamap")
77
77
78 def authorfile(self):
78 def authorfile(self):
79 return self.repo.join("authormap")
79 return self.repo.join("authormap")
80
80
81 def setbranch(self, branch, pbranches):
81 def setbranch(self, branch, pbranches):
82 if not self.clonebranches:
82 if not self.clonebranches:
83 return
83 return
84
84
85 setbranch = (branch != self.lastbranch)
85 setbranch = (branch != self.lastbranch)
86 self.lastbranch = branch
86 self.lastbranch = branch
87 if not branch:
87 if not branch:
88 branch = 'default'
88 branch = 'default'
89 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
89 pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
90 pbranch = pbranches and pbranches[0][1] or 'default'
90 pbranch = pbranches and pbranches[0][1] or 'default'
91
91
92 branchpath = os.path.join(self.path, branch)
92 branchpath = os.path.join(self.path, branch)
93 if setbranch:
93 if setbranch:
94 self.after()
94 self.after()
95 try:
95 try:
96 self.repo = hg.repository(self.ui, branchpath)
96 self.repo = hg.repository(self.ui, branchpath)
97 except Exception:
97 except Exception:
98 self.repo = hg.repository(self.ui, branchpath, create=True)
98 self.repo = hg.repository(self.ui, branchpath, create=True)
99 self.before()
99 self.before()
100
100
101 # pbranches may bring revisions from other branches (merge parents)
101 # pbranches may bring revisions from other branches (merge parents)
102 # Make sure we have them, or pull them.
102 # Make sure we have them, or pull them.
103 missings = {}
103 missings = {}
104 for b in pbranches:
104 for b in pbranches:
105 try:
105 try:
106 self.repo.lookup(b[0])
106 self.repo.lookup(b[0])
107 except Exception:
107 except Exception:
108 missings.setdefault(b[1], []).append(b[0])
108 missings.setdefault(b[1], []).append(b[0])
109
109
110 if missings:
110 if missings:
111 self.after()
111 self.after()
112 for pbranch, heads in sorted(missings.iteritems()):
112 for pbranch, heads in sorted(missings.iteritems()):
113 pbranchpath = os.path.join(self.path, pbranch)
113 pbranchpath = os.path.join(self.path, pbranch)
114 prepo = hg.peer(self.ui, {}, pbranchpath)
114 prepo = hg.peer(self.ui, {}, pbranchpath)
115 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
115 self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
116 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
116 self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
117 self.before()
117 self.before()
118
118
119 def _rewritetags(self, source, revmap, data):
119 def _rewritetags(self, source, revmap, data):
120 fp = cStringIO.StringIO()
120 fp = cStringIO.StringIO()
121 for line in data.splitlines():
121 for line in data.splitlines():
122 s = line.split(' ', 1)
122 s = line.split(' ', 1)
123 if len(s) != 2:
123 if len(s) != 2:
124 continue
124 continue
125 revid = revmap.get(source.lookuprev(s[0]))
125 revid = revmap.get(source.lookuprev(s[0]))
126 if not revid:
126 if not revid:
127 continue
127 continue
128 fp.write('%s %s\n' % (revid, s[1]))
128 fp.write('%s %s\n' % (revid, s[1]))
129 return fp.getvalue()
129 return fp.getvalue()
130
130
131 def putcommit(self, files, copies, parents, commit, source, revmap):
131 def putcommit(self, files, copies, parents, commit, source, revmap, full):
132
133 files = dict(files)
132 files = dict(files)
134 def getfilectx(repo, memctx, f):
133 def getfilectx(repo, memctx, f):
135 v = files[f]
134 try:
135 v = files[f]
136 except KeyError:
137 return None
136 data, mode = source.getfile(f, v)
138 data, mode = source.getfile(f, v)
137 if data is None:
139 if data is None:
138 return None
140 return None
139 if f == '.hgtags':
141 if f == '.hgtags':
140 data = self._rewritetags(source, revmap, data)
142 data = self._rewritetags(source, revmap, data)
141 return context.memfilectx(self.repo, f, data, 'l' in mode,
143 return context.memfilectx(self.repo, f, data, 'l' in mode,
142 'x' in mode, copies.get(f))
144 'x' in mode, copies.get(f))
143
145
144 pl = []
146 pl = []
145 for p in parents:
147 for p in parents:
146 if p not in pl:
148 if p not in pl:
147 pl.append(p)
149 pl.append(p)
148 parents = pl
150 parents = pl
149 nparents = len(parents)
151 nparents = len(parents)
150 if self.filemapmode and nparents == 1:
152 if self.filemapmode and nparents == 1:
151 m1node = self.repo.changelog.read(bin(parents[0]))[0]
153 m1node = self.repo.changelog.read(bin(parents[0]))[0]
152 parent = parents[0]
154 parent = parents[0]
153
155
154 if len(parents) < 2:
156 if len(parents) < 2:
155 parents.append(nullid)
157 parents.append(nullid)
156 if len(parents) < 2:
158 if len(parents) < 2:
157 parents.append(nullid)
159 parents.append(nullid)
158 p2 = parents.pop(0)
160 p2 = parents.pop(0)
159
161
160 text = commit.desc
162 text = commit.desc
161
163
162 sha1s = re.findall(sha1re, text)
164 sha1s = re.findall(sha1re, text)
163 for sha1 in sha1s:
165 for sha1 in sha1s:
164 oldrev = source.lookuprev(sha1)
166 oldrev = source.lookuprev(sha1)
165 newrev = revmap.get(oldrev)
167 newrev = revmap.get(oldrev)
166 if newrev is not None:
168 if newrev is not None:
167 text = text.replace(sha1, newrev[:len(sha1)])
169 text = text.replace(sha1, newrev[:len(sha1)])
168
170
169 extra = commit.extra.copy()
171 extra = commit.extra.copy()
170
172
171 for label in ('source', 'transplant_source', 'rebase_source'):
173 for label in ('source', 'transplant_source', 'rebase_source'):
172 node = extra.get(label)
174 node = extra.get(label)
173
175
174 if node is None:
176 if node is None:
175 continue
177 continue
176
178
177 # Only transplant stores its reference in binary
179 # Only transplant stores its reference in binary
178 if label == 'transplant_source':
180 if label == 'transplant_source':
179 node = hex(node)
181 node = hex(node)
180
182
181 newrev = revmap.get(node)
183 newrev = revmap.get(node)
182 if newrev is not None:
184 if newrev is not None:
183 if label == 'transplant_source':
185 if label == 'transplant_source':
184 newrev = bin(newrev)
186 newrev = bin(newrev)
185
187
186 extra[label] = newrev
188 extra[label] = newrev
187
189
188 if self.branchnames and commit.branch:
190 if self.branchnames and commit.branch:
189 extra['branch'] = commit.branch
191 extra['branch'] = commit.branch
190 if commit.rev:
192 if commit.rev:
191 extra['convert_revision'] = commit.rev
193 extra['convert_revision'] = commit.rev
192
194
193 while parents:
195 while parents:
194 p1 = p2
196 p1 = p2
195 p2 = parents.pop(0)
197 p2 = parents.pop(0)
196 ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
198 fileset = set(files)
199 if full:
200 fileset.update(self.repo[p1], self.repo[p2])
201 ctx = context.memctx(self.repo, (p1, p2), text, fileset,
197 getfilectx, commit.author, commit.date, extra)
202 getfilectx, commit.author, commit.date, extra)
198 self.repo.commitctx(ctx)
203 self.repo.commitctx(ctx)
199 text = "(octopus merge fixup)\n"
204 text = "(octopus merge fixup)\n"
200 p2 = hex(self.repo.changelog.tip())
205 p2 = hex(self.repo.changelog.tip())
201
206
202 if self.filemapmode and nparents == 1:
207 if self.filemapmode and nparents == 1:
203 man = self.repo.manifest
208 man = self.repo.manifest
204 mnode = self.repo.changelog.read(bin(p2))[0]
209 mnode = self.repo.changelog.read(bin(p2))[0]
205 closed = 'close' in commit.extra
210 closed = 'close' in commit.extra
206 if not closed and not man.cmp(m1node, man.revision(mnode)):
211 if not closed and not man.cmp(m1node, man.revision(mnode)):
207 self.ui.status(_("filtering out empty revision\n"))
212 self.ui.status(_("filtering out empty revision\n"))
208 self.repo.rollback(force=True)
213 self.repo.rollback(force=True)
209 return parent
214 return parent
210 return p2
215 return p2
211
216
212 def puttags(self, tags):
217 def puttags(self, tags):
213 try:
218 try:
214 parentctx = self.repo[self.tagsbranch]
219 parentctx = self.repo[self.tagsbranch]
215 tagparent = parentctx.node()
220 tagparent = parentctx.node()
216 except error.RepoError:
221 except error.RepoError:
217 parentctx = None
222 parentctx = None
218 tagparent = nullid
223 tagparent = nullid
219
224
220 oldlines = set()
225 oldlines = set()
221 for branch, heads in self.repo.branchmap().iteritems():
226 for branch, heads in self.repo.branchmap().iteritems():
222 for h in heads:
227 for h in heads:
223 if '.hgtags' in self.repo[h]:
228 if '.hgtags' in self.repo[h]:
224 oldlines.update(
229 oldlines.update(
225 set(self.repo[h]['.hgtags'].data().splitlines(True)))
230 set(self.repo[h]['.hgtags'].data().splitlines(True)))
226 oldlines = sorted(list(oldlines))
231 oldlines = sorted(list(oldlines))
227
232
228 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
233 newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
229 if newlines == oldlines:
234 if newlines == oldlines:
230 return None, None
235 return None, None
231
236
232 # if the old and new tags match, then there is nothing to update
237 # if the old and new tags match, then there is nothing to update
233 oldtags = set()
238 oldtags = set()
234 newtags = set()
239 newtags = set()
235 for line in oldlines:
240 for line in oldlines:
236 s = line.strip().split(' ', 1)
241 s = line.strip().split(' ', 1)
237 if len(s) != 2:
242 if len(s) != 2:
238 continue
243 continue
239 oldtags.add(s[1])
244 oldtags.add(s[1])
240 for line in newlines:
245 for line in newlines:
241 s = line.strip().split(' ', 1)
246 s = line.strip().split(' ', 1)
242 if len(s) != 2:
247 if len(s) != 2:
243 continue
248 continue
244 if s[1] not in oldtags:
249 if s[1] not in oldtags:
245 newtags.add(s[1].strip())
250 newtags.add(s[1].strip())
246
251
247 if not newtags:
252 if not newtags:
248 return None, None
253 return None, None
249
254
250 data = "".join(newlines)
255 data = "".join(newlines)
251 def getfilectx(repo, memctx, f):
256 def getfilectx(repo, memctx, f):
252 return context.memfilectx(repo, f, data, False, False, None)
257 return context.memfilectx(repo, f, data, False, False, None)
253
258
254 self.ui.status(_("updating tags\n"))
259 self.ui.status(_("updating tags\n"))
255 date = "%s 0" % int(time.mktime(time.gmtime()))
260 date = "%s 0" % int(time.mktime(time.gmtime()))
256 extra = {'branch': self.tagsbranch}
261 extra = {'branch': self.tagsbranch}
257 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
262 ctx = context.memctx(self.repo, (tagparent, None), "update tags",
258 [".hgtags"], getfilectx, "convert-repo", date,
263 [".hgtags"], getfilectx, "convert-repo", date,
259 extra)
264 extra)
260 self.repo.commitctx(ctx)
265 self.repo.commitctx(ctx)
261 return hex(self.repo.changelog.tip()), hex(tagparent)
266 return hex(self.repo.changelog.tip()), hex(tagparent)
262
267
263 def setfilemapmode(self, active):
268 def setfilemapmode(self, active):
264 self.filemapmode = active
269 self.filemapmode = active
265
270
266 def putbookmarks(self, updatedbookmark):
271 def putbookmarks(self, updatedbookmark):
267 if not len(updatedbookmark):
272 if not len(updatedbookmark):
268 return
273 return
269
274
270 self.ui.status(_("updating bookmarks\n"))
275 self.ui.status(_("updating bookmarks\n"))
271 destmarks = self.repo._bookmarks
276 destmarks = self.repo._bookmarks
272 for bookmark in updatedbookmark:
277 for bookmark in updatedbookmark:
273 destmarks[bookmark] = bin(updatedbookmark[bookmark])
278 destmarks[bookmark] = bin(updatedbookmark[bookmark])
274 destmarks.write()
279 destmarks.write()
275
280
276 def hascommitfrommap(self, rev):
281 def hascommitfrommap(self, rev):
277 # the exact semantics of clonebranches is unclear so we can't say no
282 # the exact semantics of clonebranches is unclear so we can't say no
278 return rev in self.repo or self.clonebranches
283 return rev in self.repo or self.clonebranches
279
284
280 def hascommitforsplicemap(self, rev):
285 def hascommitforsplicemap(self, rev):
281 if rev not in self.repo and self.clonebranches:
286 if rev not in self.repo and self.clonebranches:
282 raise util.Abort(_('revision %s not found in destination '
287 raise util.Abort(_('revision %s not found in destination '
283 'repository (lookups with clonebranches=true '
288 'repository (lookups with clonebranches=true '
284 'are not implemented)') % rev)
289 'are not implemented)') % rev)
285 return rev in self.repo
290 return rev in self.repo
286
291
287 class mercurial_source(converter_source):
292 class mercurial_source(converter_source):
288 def __init__(self, ui, path, rev=None):
293 def __init__(self, ui, path, rev=None):
289 converter_source.__init__(self, ui, path, rev)
294 converter_source.__init__(self, ui, path, rev)
290 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
295 self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
291 self.ignored = set()
296 self.ignored = set()
292 self.saverev = ui.configbool('convert', 'hg.saverev', False)
297 self.saverev = ui.configbool('convert', 'hg.saverev', False)
293 try:
298 try:
294 self.repo = hg.repository(self.ui, path)
299 self.repo = hg.repository(self.ui, path)
295 # try to provoke an exception if this isn't really a hg
300 # try to provoke an exception if this isn't really a hg
296 # repo, but some other bogus compatible-looking url
301 # repo, but some other bogus compatible-looking url
297 if not self.repo.local():
302 if not self.repo.local():
298 raise error.RepoError
303 raise error.RepoError
299 except error.RepoError:
304 except error.RepoError:
300 ui.traceback()
305 ui.traceback()
301 raise NoRepo(_("%s is not a local Mercurial repository") % path)
306 raise NoRepo(_("%s is not a local Mercurial repository") % path)
302 self.lastrev = None
307 self.lastrev = None
303 self.lastctx = None
308 self.lastctx = None
304 self._changescache = None, None
309 self._changescache = None, None
305 self.convertfp = None
310 self.convertfp = None
306 # Restrict converted revisions to startrev descendants
311 # Restrict converted revisions to startrev descendants
307 startnode = ui.config('convert', 'hg.startrev')
312 startnode = ui.config('convert', 'hg.startrev')
308 hgrevs = ui.config('convert', 'hg.revs')
313 hgrevs = ui.config('convert', 'hg.revs')
309 if hgrevs is None:
314 if hgrevs is None:
310 if startnode is not None:
315 if startnode is not None:
311 try:
316 try:
312 startnode = self.repo.lookup(startnode)
317 startnode = self.repo.lookup(startnode)
313 except error.RepoError:
318 except error.RepoError:
314 raise util.Abort(_('%s is not a valid start revision')
319 raise util.Abort(_('%s is not a valid start revision')
315 % startnode)
320 % startnode)
316 startrev = self.repo.changelog.rev(startnode)
321 startrev = self.repo.changelog.rev(startnode)
317 children = {startnode: 1}
322 children = {startnode: 1}
318 for r in self.repo.changelog.descendants([startrev]):
323 for r in self.repo.changelog.descendants([startrev]):
319 children[self.repo.changelog.node(r)] = 1
324 children[self.repo.changelog.node(r)] = 1
320 self.keep = children.__contains__
325 self.keep = children.__contains__
321 else:
326 else:
322 self.keep = util.always
327 self.keep = util.always
323 if rev:
328 if rev:
324 self._heads = [self.repo[rev].node()]
329 self._heads = [self.repo[rev].node()]
325 else:
330 else:
326 self._heads = self.repo.heads()
331 self._heads = self.repo.heads()
327 else:
332 else:
328 if rev or startnode is not None:
333 if rev or startnode is not None:
329 raise util.Abort(_('hg.revs cannot be combined with '
334 raise util.Abort(_('hg.revs cannot be combined with '
330 'hg.startrev or --rev'))
335 'hg.startrev or --rev'))
331 nodes = set()
336 nodes = set()
332 parents = set()
337 parents = set()
333 for r in scmutil.revrange(self.repo, [hgrevs]):
338 for r in scmutil.revrange(self.repo, [hgrevs]):
334 ctx = self.repo[r]
339 ctx = self.repo[r]
335 nodes.add(ctx.node())
340 nodes.add(ctx.node())
336 parents.update(p.node() for p in ctx.parents())
341 parents.update(p.node() for p in ctx.parents())
337 self.keep = nodes.__contains__
342 self.keep = nodes.__contains__
338 self._heads = nodes - parents
343 self._heads = nodes - parents
339
344
340 def changectx(self, rev):
345 def changectx(self, rev):
341 if self.lastrev != rev:
346 if self.lastrev != rev:
342 self.lastctx = self.repo[rev]
347 self.lastctx = self.repo[rev]
343 self.lastrev = rev
348 self.lastrev = rev
344 return self.lastctx
349 return self.lastctx
345
350
346 def parents(self, ctx):
351 def parents(self, ctx):
347 return [p for p in ctx.parents() if p and self.keep(p.node())]
352 return [p for p in ctx.parents() if p and self.keep(p.node())]
348
353
349 def getheads(self):
354 def getheads(self):
350 return [hex(h) for h in self._heads if self.keep(h)]
355 return [hex(h) for h in self._heads if self.keep(h)]
351
356
352 def getfile(self, name, rev):
357 def getfile(self, name, rev):
353 try:
358 try:
354 fctx = self.changectx(rev)[name]
359 fctx = self.changectx(rev)[name]
355 return fctx.data(), fctx.flags()
360 return fctx.data(), fctx.flags()
356 except error.LookupError:
361 except error.LookupError:
357 return None, None
362 return None, None
358
363
359 def getchanges(self, rev):
364 def getchanges(self, rev, full):
360 ctx = self.changectx(rev)
365 ctx = self.changectx(rev)
361 parents = self.parents(ctx)
366 parents = self.parents(ctx)
362 if not parents:
367 if full or not parents:
363 files = copyfiles = ctx.manifest()
368 files = copyfiles = ctx.manifest()
364 else:
369 if parents:
365 if self._changescache[0] == rev:
370 if self._changescache[0] == rev:
366 m, a, r = self._changescache[1]
371 m, a, r = self._changescache[1]
367 else:
372 else:
368 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
373 m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
369 files = m + a + r
374 if not full:
375 files = m + a + r
370 copyfiles = m + a
376 copyfiles = m + a
371 # getcopies() is also run for roots and before filtering so missing
377 # getcopies() is also run for roots and before filtering so missing
372 # revlogs are detected early
378 # revlogs are detected early
373 copies = self.getcopies(ctx, parents, copyfiles)
379 copies = self.getcopies(ctx, parents, copyfiles)
374 changes = [(f, rev) for f in files if f not in self.ignored]
380 changes = [(f, rev) for f in files if f not in self.ignored]
375 changes.sort()
381 changes.sort()
376 return changes, copies
382 return changes, copies
377
383
378 def getcopies(self, ctx, parents, files):
384 def getcopies(self, ctx, parents, files):
379 copies = {}
385 copies = {}
380 for name in files:
386 for name in files:
381 if name in self.ignored:
387 if name in self.ignored:
382 continue
388 continue
383 try:
389 try:
384 copysource, _copynode = ctx.filectx(name).renamed()
390 copysource, _copynode = ctx.filectx(name).renamed()
385 if copysource in self.ignored:
391 if copysource in self.ignored:
386 continue
392 continue
387 # Ignore copy sources not in parent revisions
393 # Ignore copy sources not in parent revisions
388 found = False
394 found = False
389 for p in parents:
395 for p in parents:
390 if copysource in p:
396 if copysource in p:
391 found = True
397 found = True
392 break
398 break
393 if not found:
399 if not found:
394 continue
400 continue
395 copies[name] = copysource
401 copies[name] = copysource
396 except TypeError:
402 except TypeError:
397 pass
403 pass
398 except error.LookupError, e:
404 except error.LookupError, e:
399 if not self.ignoreerrors:
405 if not self.ignoreerrors:
400 raise
406 raise
401 self.ignored.add(name)
407 self.ignored.add(name)
402 self.ui.warn(_('ignoring: %s\n') % e)
408 self.ui.warn(_('ignoring: %s\n') % e)
403 return copies
409 return copies
404
410
405 def getcommit(self, rev):
411 def getcommit(self, rev):
406 ctx = self.changectx(rev)
412 ctx = self.changectx(rev)
407 parents = [p.hex() for p in self.parents(ctx)]
413 parents = [p.hex() for p in self.parents(ctx)]
408 if self.saverev:
414 if self.saverev:
409 crev = rev
415 crev = rev
410 else:
416 else:
411 crev = None
417 crev = None
412 return commit(author=ctx.user(),
418 return commit(author=ctx.user(),
413 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
419 date=util.datestr(ctx.date(), '%Y-%m-%d %H:%M:%S %1%2'),
414 desc=ctx.description(), rev=crev, parents=parents,
420 desc=ctx.description(), rev=crev, parents=parents,
415 branch=ctx.branch(), extra=ctx.extra(),
421 branch=ctx.branch(), extra=ctx.extra(),
416 sortkey=ctx.rev())
422 sortkey=ctx.rev())
417
423
418 def gettags(self):
424 def gettags(self):
419 # This will get written to .hgtags, filter non global tags out.
425 # This will get written to .hgtags, filter non global tags out.
420 tags = [t for t in self.repo.tagslist()
426 tags = [t for t in self.repo.tagslist()
421 if self.repo.tagtype(t[0]) == 'global']
427 if self.repo.tagtype(t[0]) == 'global']
422 return dict([(name, hex(node)) for name, node in tags
428 return dict([(name, hex(node)) for name, node in tags
423 if self.keep(node)])
429 if self.keep(node)])
424
430
425 def getchangedfiles(self, rev, i):
431 def getchangedfiles(self, rev, i):
426 ctx = self.changectx(rev)
432 ctx = self.changectx(rev)
427 parents = self.parents(ctx)
433 parents = self.parents(ctx)
428 if not parents and i is None:
434 if not parents and i is None:
429 i = 0
435 i = 0
430 changes = [], ctx.manifest().keys(), []
436 changes = [], ctx.manifest().keys(), []
431 else:
437 else:
432 i = i or 0
438 i = i or 0
433 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
439 changes = self.repo.status(parents[i].node(), ctx.node())[:3]
434 changes = [[f for f in l if f not in self.ignored] for l in changes]
440 changes = [[f for f in l if f not in self.ignored] for l in changes]
435
441
436 if i == 0:
442 if i == 0:
437 self._changescache = (rev, changes)
443 self._changescache = (rev, changes)
438
444
439 return changes[0] + changes[1] + changes[2]
445 return changes[0] + changes[1] + changes[2]
440
446
441 def converted(self, rev, destrev):
447 def converted(self, rev, destrev):
442 if self.convertfp is None:
448 if self.convertfp is None:
443 self.convertfp = open(self.repo.join('shamap'), 'a')
449 self.convertfp = open(self.repo.join('shamap'), 'a')
444 self.convertfp.write('%s %s\n' % (destrev, rev))
450 self.convertfp.write('%s %s\n' % (destrev, rev))
445 self.convertfp.flush()
451 self.convertfp.flush()
446
452
447 def before(self):
453 def before(self):
448 self.ui.debug('run hg source pre-conversion action\n')
454 self.ui.debug('run hg source pre-conversion action\n')
449
455
450 def after(self):
456 def after(self):
451 self.ui.debug('run hg source post-conversion action\n')
457 self.ui.debug('run hg source post-conversion action\n')
452
458
453 def hasnativeorder(self):
459 def hasnativeorder(self):
454 return True
460 return True
455
461
456 def hasnativeclose(self):
462 def hasnativeclose(self):
457 return True
463 return True
458
464
459 def lookuprev(self, rev):
465 def lookuprev(self, rev):
460 try:
466 try:
461 return hex(self.repo.lookup(rev))
467 return hex(self.repo.lookup(rev))
462 except error.RepoError:
468 except error.RepoError:
463 return None
469 return None
464
470
465 def getbookmarks(self):
471 def getbookmarks(self):
466 return bookmarks.listbookmarks(self.repo)
472 return bookmarks.listbookmarks(self.repo)
467
473
468 def checkrevformat(self, revstr, mapname='splicemap'):
474 def checkrevformat(self, revstr, mapname='splicemap'):
469 """ Mercurial, revision string is a 40 byte hex """
475 """ Mercurial, revision string is a 40 byte hex """
470 self.checkhexformat(revstr, mapname)
476 self.checkhexformat(revstr, mapname)
@@ -1,359 +1,361 b''
1 # monotone.py - monotone support for the convert extension
1 # monotone.py - monotone support for the convert extension
2 #
2 #
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
3 # Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
4 # others
4 # others
5 #
5 #
6 # This software may be used and distributed according to the terms of the
6 # This software may be used and distributed according to the terms of the
7 # GNU General Public License version 2 or any later version.
7 # GNU General Public License version 2 or any later version.
8
8
9 import os, re
9 import os, re
10 from mercurial import util
10 from mercurial import util
11 from common import NoRepo, commit, converter_source, checktool
11 from common import NoRepo, commit, converter_source, checktool
12 from common import commandline
12 from common import commandline
13 from mercurial.i18n import _
13 from mercurial.i18n import _
14
14
15 class monotone_source(converter_source, commandline):
15 class monotone_source(converter_source, commandline):
16 def __init__(self, ui, path=None, rev=None):
16 def __init__(self, ui, path=None, rev=None):
17 converter_source.__init__(self, ui, path, rev)
17 converter_source.__init__(self, ui, path, rev)
18 commandline.__init__(self, ui, 'mtn')
18 commandline.__init__(self, ui, 'mtn')
19
19
20 self.ui = ui
20 self.ui = ui
21 self.path = path
21 self.path = path
22 self.automatestdio = False
22 self.automatestdio = False
23 self.rev = rev
23 self.rev = rev
24
24
25 norepo = NoRepo(_("%s does not look like a monotone repository")
25 norepo = NoRepo(_("%s does not look like a monotone repository")
26 % path)
26 % path)
27 if not os.path.exists(os.path.join(path, '_MTN')):
27 if not os.path.exists(os.path.join(path, '_MTN')):
28 # Could be a monotone repository (SQLite db file)
28 # Could be a monotone repository (SQLite db file)
29 try:
29 try:
30 f = file(path, 'rb')
30 f = file(path, 'rb')
31 header = f.read(16)
31 header = f.read(16)
32 f.close()
32 f.close()
33 except IOError:
33 except IOError:
34 header = ''
34 header = ''
35 if header != 'SQLite format 3\x00':
35 if header != 'SQLite format 3\x00':
36 raise norepo
36 raise norepo
37
37
38 # regular expressions for parsing monotone output
38 # regular expressions for parsing monotone output
39 space = r'\s*'
39 space = r'\s*'
40 name = r'\s+"((?:\\"|[^"])*)"\s*'
40 name = r'\s+"((?:\\"|[^"])*)"\s*'
41 value = name
41 value = name
42 revision = r'\s+\[(\w+)\]\s*'
42 revision = r'\s+\[(\w+)\]\s*'
43 lines = r'(?:.|\n)+'
43 lines = r'(?:.|\n)+'
44
44
45 self.dir_re = re.compile(space + "dir" + name)
45 self.dir_re = re.compile(space + "dir" + name)
46 self.file_re = re.compile(space + "file" + name +
46 self.file_re = re.compile(space + "file" + name +
47 "content" + revision)
47 "content" + revision)
48 self.add_file_re = re.compile(space + "add_file" + name +
48 self.add_file_re = re.compile(space + "add_file" + name +
49 "content" + revision)
49 "content" + revision)
50 self.patch_re = re.compile(space + "patch" + name +
50 self.patch_re = re.compile(space + "patch" + name +
51 "from" + revision + "to" + revision)
51 "from" + revision + "to" + revision)
52 self.rename_re = re.compile(space + "rename" + name + "to" + name)
52 self.rename_re = re.compile(space + "rename" + name + "to" + name)
53 self.delete_re = re.compile(space + "delete" + name)
53 self.delete_re = re.compile(space + "delete" + name)
54 self.tag_re = re.compile(space + "tag" + name + "revision" +
54 self.tag_re = re.compile(space + "tag" + name + "revision" +
55 revision)
55 revision)
56 self.cert_re = re.compile(lines + space + "name" + name +
56 self.cert_re = re.compile(lines + space + "name" + name +
57 "value" + value)
57 "value" + value)
58
58
59 attr = space + "file" + lines + space + "attr" + space
59 attr = space + "file" + lines + space + "attr" + space
60 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
60 self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
61 space + '"true"')
61 space + '"true"')
62
62
63 # cached data
63 # cached data
64 self.manifest_rev = None
64 self.manifest_rev = None
65 self.manifest = None
65 self.manifest = None
66 self.files = None
66 self.files = None
67 self.dirs = None
67 self.dirs = None
68
68
69 checktool('mtn', abort=False)
69 checktool('mtn', abort=False)
70
70
71 def mtnrun(self, *args, **kwargs):
71 def mtnrun(self, *args, **kwargs):
72 if self.automatestdio:
72 if self.automatestdio:
73 return self.mtnrunstdio(*args, **kwargs)
73 return self.mtnrunstdio(*args, **kwargs)
74 else:
74 else:
75 return self.mtnrunsingle(*args, **kwargs)
75 return self.mtnrunsingle(*args, **kwargs)
76
76
77 def mtnrunsingle(self, *args, **kwargs):
77 def mtnrunsingle(self, *args, **kwargs):
78 kwargs['d'] = self.path
78 kwargs['d'] = self.path
79 return self.run0('automate', *args, **kwargs)
79 return self.run0('automate', *args, **kwargs)
80
80
81 def mtnrunstdio(self, *args, **kwargs):
81 def mtnrunstdio(self, *args, **kwargs):
82 # Prepare the command in automate stdio format
82 # Prepare the command in automate stdio format
83 command = []
83 command = []
84 for k, v in kwargs.iteritems():
84 for k, v in kwargs.iteritems():
85 command.append("%s:%s" % (len(k), k))
85 command.append("%s:%s" % (len(k), k))
86 if v:
86 if v:
87 command.append("%s:%s" % (len(v), v))
87 command.append("%s:%s" % (len(v), v))
88 if command:
88 if command:
89 command.insert(0, 'o')
89 command.insert(0, 'o')
90 command.append('e')
90 command.append('e')
91
91
92 command.append('l')
92 command.append('l')
93 for arg in args:
93 for arg in args:
94 command += "%s:%s" % (len(arg), arg)
94 command += "%s:%s" % (len(arg), arg)
95 command.append('e')
95 command.append('e')
96 command = ''.join(command)
96 command = ''.join(command)
97
97
98 self.ui.debug("mtn: sending '%s'\n" % command)
98 self.ui.debug("mtn: sending '%s'\n" % command)
99 self.mtnwritefp.write(command)
99 self.mtnwritefp.write(command)
100 self.mtnwritefp.flush()
100 self.mtnwritefp.flush()
101
101
102 return self.mtnstdioreadcommandoutput(command)
102 return self.mtnstdioreadcommandoutput(command)
103
103
104 def mtnstdioreadpacket(self):
104 def mtnstdioreadpacket(self):
105 read = None
105 read = None
106 commandnbr = ''
106 commandnbr = ''
107 while read != ':':
107 while read != ':':
108 read = self.mtnreadfp.read(1)
108 read = self.mtnreadfp.read(1)
109 if not read:
109 if not read:
110 raise util.Abort(_('bad mtn packet - no end of commandnbr'))
110 raise util.Abort(_('bad mtn packet - no end of commandnbr'))
111 commandnbr += read
111 commandnbr += read
112 commandnbr = commandnbr[:-1]
112 commandnbr = commandnbr[:-1]
113
113
114 stream = self.mtnreadfp.read(1)
114 stream = self.mtnreadfp.read(1)
115 if stream not in 'mewptl':
115 if stream not in 'mewptl':
116 raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
116 raise util.Abort(_('bad mtn packet - bad stream type %s') % stream)
117
117
118 read = self.mtnreadfp.read(1)
118 read = self.mtnreadfp.read(1)
119 if read != ':':
119 if read != ':':
120 raise util.Abort(_('bad mtn packet - no divider before size'))
120 raise util.Abort(_('bad mtn packet - no divider before size'))
121
121
122 read = None
122 read = None
123 lengthstr = ''
123 lengthstr = ''
124 while read != ':':
124 while read != ':':
125 read = self.mtnreadfp.read(1)
125 read = self.mtnreadfp.read(1)
126 if not read:
126 if not read:
127 raise util.Abort(_('bad mtn packet - no end of packet size'))
127 raise util.Abort(_('bad mtn packet - no end of packet size'))
128 lengthstr += read
128 lengthstr += read
129 try:
129 try:
130 length = long(lengthstr[:-1])
130 length = long(lengthstr[:-1])
131 except TypeError:
131 except TypeError:
132 raise util.Abort(_('bad mtn packet - bad packet size %s')
132 raise util.Abort(_('bad mtn packet - bad packet size %s')
133 % lengthstr)
133 % lengthstr)
134
134
135 read = self.mtnreadfp.read(length)
135 read = self.mtnreadfp.read(length)
136 if len(read) != length:
136 if len(read) != length:
137 raise util.Abort(_("bad mtn packet - unable to read full packet "
137 raise util.Abort(_("bad mtn packet - unable to read full packet "
138 "read %s of %s") % (len(read), length))
138 "read %s of %s") % (len(read), length))
139
139
140 return (commandnbr, stream, length, read)
140 return (commandnbr, stream, length, read)
141
141
142 def mtnstdioreadcommandoutput(self, command):
142 def mtnstdioreadcommandoutput(self, command):
143 retval = []
143 retval = []
144 while True:
144 while True:
145 commandnbr, stream, length, output = self.mtnstdioreadpacket()
145 commandnbr, stream, length, output = self.mtnstdioreadpacket()
146 self.ui.debug('mtn: read packet %s:%s:%s\n' %
146 self.ui.debug('mtn: read packet %s:%s:%s\n' %
147 (commandnbr, stream, length))
147 (commandnbr, stream, length))
148
148
149 if stream == 'l':
149 if stream == 'l':
150 # End of command
150 # End of command
151 if output != '0':
151 if output != '0':
152 raise util.Abort(_("mtn command '%s' returned %s") %
152 raise util.Abort(_("mtn command '%s' returned %s") %
153 (command, output))
153 (command, output))
154 break
154 break
155 elif stream in 'ew':
155 elif stream in 'ew':
156 # Error, warning output
156 # Error, warning output
157 self.ui.warn(_('%s error:\n') % self.command)
157 self.ui.warn(_('%s error:\n') % self.command)
158 self.ui.warn(output)
158 self.ui.warn(output)
159 elif stream == 'p':
159 elif stream == 'p':
160 # Progress messages
160 # Progress messages
161 self.ui.debug('mtn: ' + output)
161 self.ui.debug('mtn: ' + output)
162 elif stream == 'm':
162 elif stream == 'm':
163 # Main stream - command output
163 # Main stream - command output
164 retval.append(output)
164 retval.append(output)
165
165
166 return ''.join(retval)
166 return ''.join(retval)
167
167
168 def mtnloadmanifest(self, rev):
168 def mtnloadmanifest(self, rev):
169 if self.manifest_rev == rev:
169 if self.manifest_rev == rev:
170 return
170 return
171 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
171 self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
172 self.manifest_rev = rev
172 self.manifest_rev = rev
173 self.files = {}
173 self.files = {}
174 self.dirs = {}
174 self.dirs = {}
175
175
176 for e in self.manifest:
176 for e in self.manifest:
177 m = self.file_re.match(e)
177 m = self.file_re.match(e)
178 if m:
178 if m:
179 attr = ""
179 attr = ""
180 name = m.group(1)
180 name = m.group(1)
181 node = m.group(2)
181 node = m.group(2)
182 if self.attr_execute_re.match(e):
182 if self.attr_execute_re.match(e):
183 attr += "x"
183 attr += "x"
184 self.files[name] = (node, attr)
184 self.files[name] = (node, attr)
185 m = self.dir_re.match(e)
185 m = self.dir_re.match(e)
186 if m:
186 if m:
187 self.dirs[m.group(1)] = True
187 self.dirs[m.group(1)] = True
188
188
189 def mtnisfile(self, name, rev):
189 def mtnisfile(self, name, rev):
190 # a non-file could be a directory or a deleted or renamed file
190 # a non-file could be a directory or a deleted or renamed file
191 self.mtnloadmanifest(rev)
191 self.mtnloadmanifest(rev)
192 return name in self.files
192 return name in self.files
193
193
194 def mtnisdir(self, name, rev):
194 def mtnisdir(self, name, rev):
195 self.mtnloadmanifest(rev)
195 self.mtnloadmanifest(rev)
196 return name in self.dirs
196 return name in self.dirs
197
197
198 def mtngetcerts(self, rev):
198 def mtngetcerts(self, rev):
199 certs = {"author":"<missing>", "date":"<missing>",
199 certs = {"author":"<missing>", "date":"<missing>",
200 "changelog":"<missing>", "branch":"<missing>"}
200 "changelog":"<missing>", "branch":"<missing>"}
201 certlist = self.mtnrun("certs", rev)
201 certlist = self.mtnrun("certs", rev)
202 # mtn < 0.45:
202 # mtn < 0.45:
203 # key "test@selenic.com"
203 # key "test@selenic.com"
204 # mtn >= 0.45:
204 # mtn >= 0.45:
205 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
205 # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
206 certlist = re.split('\n\n key ["\[]', certlist)
206 certlist = re.split('\n\n key ["\[]', certlist)
207 for e in certlist:
207 for e in certlist:
208 m = self.cert_re.match(e)
208 m = self.cert_re.match(e)
209 if m:
209 if m:
210 name, value = m.groups()
210 name, value = m.groups()
211 value = value.replace(r'\"', '"')
211 value = value.replace(r'\"', '"')
212 value = value.replace(r'\\', '\\')
212 value = value.replace(r'\\', '\\')
213 certs[name] = value
213 certs[name] = value
214 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
214 # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
215 # and all times are stored in UTC
215 # and all times are stored in UTC
216 certs["date"] = certs["date"].split('.')[0] + " UTC"
216 certs["date"] = certs["date"].split('.')[0] + " UTC"
217 return certs
217 return certs
218
218
219 # implement the converter_source interface:
219 # implement the converter_source interface:
220
220
221 def getheads(self):
221 def getheads(self):
222 if not self.rev:
222 if not self.rev:
223 return self.mtnrun("leaves").splitlines()
223 return self.mtnrun("leaves").splitlines()
224 else:
224 else:
225 return [self.rev]
225 return [self.rev]
226
226
227 def getchanges(self, rev):
227 def getchanges(self, rev, full):
228 if full:
229 raise util.Abort(_("convert from monotone do not support --full"))
228 revision = self.mtnrun("get_revision", rev).split("\n\n")
230 revision = self.mtnrun("get_revision", rev).split("\n\n")
229 files = {}
231 files = {}
230 ignoremove = {}
232 ignoremove = {}
231 renameddirs = []
233 renameddirs = []
232 copies = {}
234 copies = {}
233 for e in revision:
235 for e in revision:
234 m = self.add_file_re.match(e)
236 m = self.add_file_re.match(e)
235 if m:
237 if m:
236 files[m.group(1)] = rev
238 files[m.group(1)] = rev
237 ignoremove[m.group(1)] = rev
239 ignoremove[m.group(1)] = rev
238 m = self.patch_re.match(e)
240 m = self.patch_re.match(e)
239 if m:
241 if m:
240 files[m.group(1)] = rev
242 files[m.group(1)] = rev
241 # Delete/rename is handled later when the convert engine
243 # Delete/rename is handled later when the convert engine
242 # discovers an IOError exception from getfile,
244 # discovers an IOError exception from getfile,
243 # but only if we add the "from" file to the list of changes.
245 # but only if we add the "from" file to the list of changes.
244 m = self.delete_re.match(e)
246 m = self.delete_re.match(e)
245 if m:
247 if m:
246 files[m.group(1)] = rev
248 files[m.group(1)] = rev
247 m = self.rename_re.match(e)
249 m = self.rename_re.match(e)
248 if m:
250 if m:
249 toname = m.group(2)
251 toname = m.group(2)
250 fromname = m.group(1)
252 fromname = m.group(1)
251 if self.mtnisfile(toname, rev):
253 if self.mtnisfile(toname, rev):
252 ignoremove[toname] = 1
254 ignoremove[toname] = 1
253 copies[toname] = fromname
255 copies[toname] = fromname
254 files[toname] = rev
256 files[toname] = rev
255 files[fromname] = rev
257 files[fromname] = rev
256 elif self.mtnisdir(toname, rev):
258 elif self.mtnisdir(toname, rev):
257 renameddirs.append((fromname, toname))
259 renameddirs.append((fromname, toname))
258
260
259 # Directory renames can be handled only once we have recorded
261 # Directory renames can be handled only once we have recorded
260 # all new files
262 # all new files
261 for fromdir, todir in renameddirs:
263 for fromdir, todir in renameddirs:
262 renamed = {}
264 renamed = {}
263 for tofile in self.files:
265 for tofile in self.files:
264 if tofile in ignoremove:
266 if tofile in ignoremove:
265 continue
267 continue
266 if tofile.startswith(todir + '/'):
268 if tofile.startswith(todir + '/'):
267 renamed[tofile] = fromdir + tofile[len(todir):]
269 renamed[tofile] = fromdir + tofile[len(todir):]
268 # Avoid chained moves like:
270 # Avoid chained moves like:
269 # d1(/a) => d3/d1(/a)
271 # d1(/a) => d3/d1(/a)
270 # d2 => d3
272 # d2 => d3
271 ignoremove[tofile] = 1
273 ignoremove[tofile] = 1
272 for tofile, fromfile in renamed.items():
274 for tofile, fromfile in renamed.items():
273 self.ui.debug (_("copying file in renamed directory "
275 self.ui.debug (_("copying file in renamed directory "
274 "from '%s' to '%s'")
276 "from '%s' to '%s'")
275 % (fromfile, tofile), '\n')
277 % (fromfile, tofile), '\n')
276 files[tofile] = rev
278 files[tofile] = rev
277 copies[tofile] = fromfile
279 copies[tofile] = fromfile
278 for fromfile in renamed.values():
280 for fromfile in renamed.values():
279 files[fromfile] = rev
281 files[fromfile] = rev
280
282
281 return (files.items(), copies)
283 return (files.items(), copies)
282
284
283 def getfile(self, name, rev):
285 def getfile(self, name, rev):
284 if not self.mtnisfile(name, rev):
286 if not self.mtnisfile(name, rev):
285 return None, None
287 return None, None
286 try:
288 try:
287 data = self.mtnrun("get_file_of", name, r=rev)
289 data = self.mtnrun("get_file_of", name, r=rev)
288 except Exception:
290 except Exception:
289 return None, None
291 return None, None
290 self.mtnloadmanifest(rev)
292 self.mtnloadmanifest(rev)
291 node, attr = self.files.get(name, (None, ""))
293 node, attr = self.files.get(name, (None, ""))
292 return data, attr
294 return data, attr
293
295
294 def getcommit(self, rev):
296 def getcommit(self, rev):
295 extra = {}
297 extra = {}
296 certs = self.mtngetcerts(rev)
298 certs = self.mtngetcerts(rev)
297 if certs.get('suspend') == certs["branch"]:
299 if certs.get('suspend') == certs["branch"]:
298 extra['close'] = '1'
300 extra['close'] = '1'
299 return commit(
301 return commit(
300 author=certs["author"],
302 author=certs["author"],
301 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
303 date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
302 desc=certs["changelog"],
304 desc=certs["changelog"],
303 rev=rev,
305 rev=rev,
304 parents=self.mtnrun("parents", rev).splitlines(),
306 parents=self.mtnrun("parents", rev).splitlines(),
305 branch=certs["branch"],
307 branch=certs["branch"],
306 extra=extra)
308 extra=extra)
307
309
308 def gettags(self):
310 def gettags(self):
309 tags = {}
311 tags = {}
310 for e in self.mtnrun("tags").split("\n\n"):
312 for e in self.mtnrun("tags").split("\n\n"):
311 m = self.tag_re.match(e)
313 m = self.tag_re.match(e)
312 if m:
314 if m:
313 tags[m.group(1)] = m.group(2)
315 tags[m.group(1)] = m.group(2)
314 return tags
316 return tags
315
317
316 def getchangedfiles(self, rev, i):
318 def getchangedfiles(self, rev, i):
317 # This function is only needed to support --filemap
319 # This function is only needed to support --filemap
318 # ... and we don't support that
320 # ... and we don't support that
319 raise NotImplementedError
321 raise NotImplementedError
320
322
321 def before(self):
323 def before(self):
322 # Check if we have a new enough version to use automate stdio
324 # Check if we have a new enough version to use automate stdio
323 version = 0.0
325 version = 0.0
324 try:
326 try:
325 versionstr = self.mtnrunsingle("interface_version")
327 versionstr = self.mtnrunsingle("interface_version")
326 version = float(versionstr)
328 version = float(versionstr)
327 except Exception:
329 except Exception:
328 raise util.Abort(_("unable to determine mtn automate interface "
330 raise util.Abort(_("unable to determine mtn automate interface "
329 "version"))
331 "version"))
330
332
331 if version >= 12.0:
333 if version >= 12.0:
332 self.automatestdio = True
334 self.automatestdio = True
333 self.ui.debug("mtn automate version %s - using automate stdio\n" %
335 self.ui.debug("mtn automate version %s - using automate stdio\n" %
334 version)
336 version)
335
337
336 # launch the long-running automate stdio process
338 # launch the long-running automate stdio process
337 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
339 self.mtnwritefp, self.mtnreadfp = self._run2('automate', 'stdio',
338 '-d', self.path)
340 '-d', self.path)
339 # read the headers
341 # read the headers
340 read = self.mtnreadfp.readline()
342 read = self.mtnreadfp.readline()
341 if read != 'format-version: 2\n':
343 if read != 'format-version: 2\n':
342 raise util.Abort(_('mtn automate stdio header unexpected: %s')
344 raise util.Abort(_('mtn automate stdio header unexpected: %s')
343 % read)
345 % read)
344 while read != '\n':
346 while read != '\n':
345 read = self.mtnreadfp.readline()
347 read = self.mtnreadfp.readline()
346 if not read:
348 if not read:
347 raise util.Abort(_("failed to reach end of mtn automate "
349 raise util.Abort(_("failed to reach end of mtn automate "
348 "stdio headers"))
350 "stdio headers"))
349 else:
351 else:
350 self.ui.debug("mtn automate version %s - not using automate stdio "
352 self.ui.debug("mtn automate version %s - not using automate stdio "
351 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
353 "(automate >= 12.0 - mtn >= 0.46 is needed)\n" % version)
352
354
353 def after(self):
355 def after(self):
354 if self.automatestdio:
356 if self.automatestdio:
355 self.mtnwritefp.close()
357 self.mtnwritefp.close()
356 self.mtnwritefp = None
358 self.mtnwritefp = None
357 self.mtnreadfp.close()
359 self.mtnreadfp.close()
358 self.mtnreadfp = None
360 self.mtnreadfp = None
359
361
@@ -1,205 +1,207 b''
1 # Perforce source for convert extension.
1 # Perforce source for convert extension.
2 #
2 #
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
3 # Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from mercurial import util
8 from mercurial import util
9 from mercurial.i18n import _
9 from mercurial.i18n import _
10
10
11 from common import commit, converter_source, checktool, NoRepo
11 from common import commit, converter_source, checktool, NoRepo
12 import marshal
12 import marshal
13 import re
13 import re
14
14
15 def loaditer(f):
15 def loaditer(f):
16 "Yield the dictionary objects generated by p4"
16 "Yield the dictionary objects generated by p4"
17 try:
17 try:
18 while True:
18 while True:
19 d = marshal.load(f)
19 d = marshal.load(f)
20 if not d:
20 if not d:
21 break
21 break
22 yield d
22 yield d
23 except EOFError:
23 except EOFError:
24 pass
24 pass
25
25
26 class p4_source(converter_source):
26 class p4_source(converter_source):
27 def __init__(self, ui, path, rev=None):
27 def __init__(self, ui, path, rev=None):
28 super(p4_source, self).__init__(ui, path, rev=rev)
28 super(p4_source, self).__init__(ui, path, rev=rev)
29
29
30 if "/" in path and not path.startswith('//'):
30 if "/" in path and not path.startswith('//'):
31 raise NoRepo(_('%s does not look like a P4 repository') % path)
31 raise NoRepo(_('%s does not look like a P4 repository') % path)
32
32
33 checktool('p4', abort=False)
33 checktool('p4', abort=False)
34
34
35 self.p4changes = {}
35 self.p4changes = {}
36 self.heads = {}
36 self.heads = {}
37 self.changeset = {}
37 self.changeset = {}
38 self.files = {}
38 self.files = {}
39 self.tags = {}
39 self.tags = {}
40 self.lastbranch = {}
40 self.lastbranch = {}
41 self.parent = {}
41 self.parent = {}
42 self.encoding = "latin_1"
42 self.encoding = "latin_1"
43 self.depotname = {} # mapping from local name to depot name
43 self.depotname = {} # mapping from local name to depot name
44 self.re_type = re.compile(
44 self.re_type = re.compile(
45 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
45 "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
46 "(\+\w+)?$")
46 "(\+\w+)?$")
47 self.re_keywords = re.compile(
47 self.re_keywords = re.compile(
48 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
48 r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
49 r":[^$\n]*\$")
49 r":[^$\n]*\$")
50 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
50 self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
51
51
52 self._parse(ui, path)
52 self._parse(ui, path)
53
53
54 def _parse_view(self, path):
54 def _parse_view(self, path):
55 "Read changes affecting the path"
55 "Read changes affecting the path"
56 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
56 cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
57 stdout = util.popen(cmd, mode='rb')
57 stdout = util.popen(cmd, mode='rb')
58 for d in loaditer(stdout):
58 for d in loaditer(stdout):
59 c = d.get("change", None)
59 c = d.get("change", None)
60 if c:
60 if c:
61 self.p4changes[c] = True
61 self.p4changes[c] = True
62
62
63 def _parse(self, ui, path):
63 def _parse(self, ui, path):
64 "Prepare list of P4 filenames and revisions to import"
64 "Prepare list of P4 filenames and revisions to import"
65 ui.status(_('reading p4 views\n'))
65 ui.status(_('reading p4 views\n'))
66
66
67 # read client spec or view
67 # read client spec or view
68 if "/" in path:
68 if "/" in path:
69 self._parse_view(path)
69 self._parse_view(path)
70 if path.startswith("//") and path.endswith("/..."):
70 if path.startswith("//") and path.endswith("/..."):
71 views = {path[:-3]:""}
71 views = {path[:-3]:""}
72 else:
72 else:
73 views = {"//": ""}
73 views = {"//": ""}
74 else:
74 else:
75 cmd = 'p4 -G client -o %s' % util.shellquote(path)
75 cmd = 'p4 -G client -o %s' % util.shellquote(path)
76 clientspec = marshal.load(util.popen(cmd, mode='rb'))
76 clientspec = marshal.load(util.popen(cmd, mode='rb'))
77
77
78 views = {}
78 views = {}
79 for client in clientspec:
79 for client in clientspec:
80 if client.startswith("View"):
80 if client.startswith("View"):
81 sview, cview = clientspec[client].split()
81 sview, cview = clientspec[client].split()
82 self._parse_view(sview)
82 self._parse_view(sview)
83 if sview.endswith("...") and cview.endswith("..."):
83 if sview.endswith("...") and cview.endswith("..."):
84 sview = sview[:-3]
84 sview = sview[:-3]
85 cview = cview[:-3]
85 cview = cview[:-3]
86 cview = cview[2:]
86 cview = cview[2:]
87 cview = cview[cview.find("/") + 1:]
87 cview = cview[cview.find("/") + 1:]
88 views[sview] = cview
88 views[sview] = cview
89
89
90 # list of changes that affect our source files
90 # list of changes that affect our source files
91 self.p4changes = self.p4changes.keys()
91 self.p4changes = self.p4changes.keys()
92 self.p4changes.sort(key=int)
92 self.p4changes.sort(key=int)
93
93
94 # list with depot pathnames, longest first
94 # list with depot pathnames, longest first
95 vieworder = views.keys()
95 vieworder = views.keys()
96 vieworder.sort(key=len, reverse=True)
96 vieworder.sort(key=len, reverse=True)
97
97
98 # handle revision limiting
98 # handle revision limiting
99 startrev = self.ui.config('convert', 'p4.startrev', default=0)
99 startrev = self.ui.config('convert', 'p4.startrev', default=0)
100 self.p4changes = [x for x in self.p4changes
100 self.p4changes = [x for x in self.p4changes
101 if ((not startrev or int(x) >= int(startrev)) and
101 if ((not startrev or int(x) >= int(startrev)) and
102 (not self.rev or int(x) <= int(self.rev)))]
102 (not self.rev or int(x) <= int(self.rev)))]
103
103
104 # now read the full changelists to get the list of file revisions
104 # now read the full changelists to get the list of file revisions
105 ui.status(_('collecting p4 changelists\n'))
105 ui.status(_('collecting p4 changelists\n'))
106 lastid = None
106 lastid = None
107 for change in self.p4changes:
107 for change in self.p4changes:
108 cmd = "p4 -G describe -s %s" % change
108 cmd = "p4 -G describe -s %s" % change
109 stdout = util.popen(cmd, mode='rb')
109 stdout = util.popen(cmd, mode='rb')
110 d = marshal.load(stdout)
110 d = marshal.load(stdout)
111 desc = self.recode(d.get("desc", ""))
111 desc = self.recode(d.get("desc", ""))
112 shortdesc = desc.split("\n", 1)[0]
112 shortdesc = desc.split("\n", 1)[0]
113 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
113 t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
114 ui.status(util.ellipsis(t, 80) + '\n')
114 ui.status(util.ellipsis(t, 80) + '\n')
115
115
116 if lastid:
116 if lastid:
117 parents = [lastid]
117 parents = [lastid]
118 else:
118 else:
119 parents = []
119 parents = []
120
120
121 date = (int(d["time"]), 0) # timezone not set
121 date = (int(d["time"]), 0) # timezone not set
122 c = commit(author=self.recode(d["user"]),
122 c = commit(author=self.recode(d["user"]),
123 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
123 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
124 parents=parents, desc=desc, branch='',
124 parents=parents, desc=desc, branch='',
125 extra={"p4": change})
125 extra={"p4": change})
126
126
127 files = []
127 files = []
128 i = 0
128 i = 0
129 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
129 while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
130 oldname = d["depotFile%d" % i]
130 oldname = d["depotFile%d" % i]
131 filename = None
131 filename = None
132 for v in vieworder:
132 for v in vieworder:
133 if oldname.startswith(v):
133 if oldname.startswith(v):
134 filename = views[v] + oldname[len(v):]
134 filename = views[v] + oldname[len(v):]
135 break
135 break
136 if filename:
136 if filename:
137 files.append((filename, d["rev%d" % i]))
137 files.append((filename, d["rev%d" % i]))
138 self.depotname[filename] = oldname
138 self.depotname[filename] = oldname
139 i += 1
139 i += 1
140 self.changeset[change] = c
140 self.changeset[change] = c
141 self.files[change] = files
141 self.files[change] = files
142 lastid = change
142 lastid = change
143
143
144 if lastid:
144 if lastid:
145 self.heads = [lastid]
145 self.heads = [lastid]
146
146
147 def getheads(self):
147 def getheads(self):
148 return self.heads
148 return self.heads
149
149
150 def getfile(self, name, rev):
150 def getfile(self, name, rev):
151 cmd = 'p4 -G print %s' \
151 cmd = 'p4 -G print %s' \
152 % util.shellquote("%s#%s" % (self.depotname[name], rev))
152 % util.shellquote("%s#%s" % (self.depotname[name], rev))
153 stdout = util.popen(cmd, mode='rb')
153 stdout = util.popen(cmd, mode='rb')
154
154
155 mode = None
155 mode = None
156 contents = ""
156 contents = ""
157 keywords = None
157 keywords = None
158
158
159 for d in loaditer(stdout):
159 for d in loaditer(stdout):
160 code = d["code"]
160 code = d["code"]
161 data = d.get("data")
161 data = d.get("data")
162
162
163 if code == "error":
163 if code == "error":
164 raise IOError(d["generic"], data)
164 raise IOError(d["generic"], data)
165
165
166 elif code == "stat":
166 elif code == "stat":
167 if d.get("action") == "purge":
167 if d.get("action") == "purge":
168 return None, None
168 return None, None
169 p4type = self.re_type.match(d["type"])
169 p4type = self.re_type.match(d["type"])
170 if p4type:
170 if p4type:
171 mode = ""
171 mode = ""
172 flags = (p4type.group(1) or "") + (p4type.group(3) or "")
172 flags = (p4type.group(1) or "") + (p4type.group(3) or "")
173 if "x" in flags:
173 if "x" in flags:
174 mode = "x"
174 mode = "x"
175 if p4type.group(2) == "symlink":
175 if p4type.group(2) == "symlink":
176 mode = "l"
176 mode = "l"
177 if "ko" in flags:
177 if "ko" in flags:
178 keywords = self.re_keywords_old
178 keywords = self.re_keywords_old
179 elif "k" in flags:
179 elif "k" in flags:
180 keywords = self.re_keywords
180 keywords = self.re_keywords
181
181
182 elif code == "text" or code == "binary":
182 elif code == "text" or code == "binary":
183 contents += data
183 contents += data
184
184
185 if mode is None:
185 if mode is None:
186 return None, None
186 return None, None
187
187
188 if keywords:
188 if keywords:
189 contents = keywords.sub("$\\1$", contents)
189 contents = keywords.sub("$\\1$", contents)
190 if mode == "l" and contents.endswith("\n"):
190 if mode == "l" and contents.endswith("\n"):
191 contents = contents[:-1]
191 contents = contents[:-1]
192
192
193 return contents, mode
193 return contents, mode
194
194
195 def getchanges(self, rev):
195 def getchanges(self, rev, full):
196 if full:
197 raise util.Abort(_("convert from p4 do not support --full"))
196 return self.files[rev], {}
198 return self.files[rev], {}
197
199
198 def getcommit(self, rev):
200 def getcommit(self, rev):
199 return self.changeset[rev]
201 return self.changeset[rev]
200
202
201 def gettags(self):
203 def gettags(self):
202 return self.tags
204 return self.tags
203
205
204 def getchangedfiles(self, rev, i):
206 def getchangedfiles(self, rev, i):
205 return sorted([x[0] for x in self.files[rev]])
207 return sorted([x[0] for x in self.files[rev]])
@@ -1,1308 +1,1310 b''
1 # Subversion 1.4/1.5 Python API backend
1 # Subversion 1.4/1.5 Python API backend
2 #
2 #
3 # Copyright(C) 2007 Daniel Holth et al
3 # Copyright(C) 2007 Daniel Holth et al
4
4
5 import os, re, sys, tempfile, urllib, urllib2
5 import os, re, sys, tempfile, urllib, urllib2
6 import xml.dom.minidom
6 import xml.dom.minidom
7 import cPickle as pickle
7 import cPickle as pickle
8
8
9 from mercurial import strutil, scmutil, util, encoding
9 from mercurial import strutil, scmutil, util, encoding
10 from mercurial.i18n import _
10 from mercurial.i18n import _
11
11
12 propertycache = util.propertycache
12 propertycache = util.propertycache
13
13
14 # Subversion stuff. Works best with very recent Python SVN bindings
14 # Subversion stuff. Works best with very recent Python SVN bindings
15 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
15 # e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
16 # these bindings.
16 # these bindings.
17
17
18 from cStringIO import StringIO
18 from cStringIO import StringIO
19
19
20 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
20 from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
21 from common import commandline, converter_source, converter_sink, mapfile
21 from common import commandline, converter_source, converter_sink, mapfile
22 from common import makedatetimestamp
22 from common import makedatetimestamp
23
23
24 try:
24 try:
25 from svn.core import SubversionException, Pool
25 from svn.core import SubversionException, Pool
26 import svn
26 import svn
27 import svn.client
27 import svn.client
28 import svn.core
28 import svn.core
29 import svn.ra
29 import svn.ra
30 import svn.delta
30 import svn.delta
31 import transport
31 import transport
32 import warnings
32 import warnings
33 warnings.filterwarnings('ignore',
33 warnings.filterwarnings('ignore',
34 module='svn.core',
34 module='svn.core',
35 category=DeprecationWarning)
35 category=DeprecationWarning)
36
36
37 except ImportError:
37 except ImportError:
38 svn = None
38 svn = None
39
39
40 class SvnPathNotFound(Exception):
40 class SvnPathNotFound(Exception):
41 pass
41 pass
42
42
43 def revsplit(rev):
43 def revsplit(rev):
44 """Parse a revision string and return (uuid, path, revnum).
44 """Parse a revision string and return (uuid, path, revnum).
45 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
45 >>> revsplit('svn:a2147622-4a9f-4db4-a8d3-13562ff547b2'
46 ... '/proj%20B/mytrunk/mytrunk@1')
46 ... '/proj%20B/mytrunk/mytrunk@1')
47 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
47 ('a2147622-4a9f-4db4-a8d3-13562ff547b2', '/proj%20B/mytrunk/mytrunk', 1)
48 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
48 >>> revsplit('svn:8af66a51-67f5-4354-b62c-98d67cc7be1d@1')
49 ('', '', 1)
49 ('', '', 1)
50 >>> revsplit('@7')
50 >>> revsplit('@7')
51 ('', '', 7)
51 ('', '', 7)
52 >>> revsplit('7')
52 >>> revsplit('7')
53 ('', '', 0)
53 ('', '', 0)
54 >>> revsplit('bad')
54 >>> revsplit('bad')
55 ('', '', 0)
55 ('', '', 0)
56 """
56 """
57 parts = rev.rsplit('@', 1)
57 parts = rev.rsplit('@', 1)
58 revnum = 0
58 revnum = 0
59 if len(parts) > 1:
59 if len(parts) > 1:
60 revnum = int(parts[1])
60 revnum = int(parts[1])
61 parts = parts[0].split('/', 1)
61 parts = parts[0].split('/', 1)
62 uuid = ''
62 uuid = ''
63 mod = ''
63 mod = ''
64 if len(parts) > 1 and parts[0].startswith('svn:'):
64 if len(parts) > 1 and parts[0].startswith('svn:'):
65 uuid = parts[0][4:]
65 uuid = parts[0][4:]
66 mod = '/' + parts[1]
66 mod = '/' + parts[1]
67 return uuid, mod, revnum
67 return uuid, mod, revnum
68
68
69 def quote(s):
69 def quote(s):
70 # As of svn 1.7, many svn calls expect "canonical" paths. In
70 # As of svn 1.7, many svn calls expect "canonical" paths. In
71 # theory, we should call svn.core.*canonicalize() on all paths
71 # theory, we should call svn.core.*canonicalize() on all paths
72 # before passing them to the API. Instead, we assume the base url
72 # before passing them to the API. Instead, we assume the base url
73 # is canonical and copy the behaviour of svn URL encoding function
73 # is canonical and copy the behaviour of svn URL encoding function
74 # so we can extend it safely with new components. The "safe"
74 # so we can extend it safely with new components. The "safe"
75 # characters were taken from the "svn_uri__char_validity" table in
75 # characters were taken from the "svn_uri__char_validity" table in
76 # libsvn_subr/path.c.
76 # libsvn_subr/path.c.
77 return urllib.quote(s, "!$&'()*+,-./:=@_~")
77 return urllib.quote(s, "!$&'()*+,-./:=@_~")
78
78
79 def geturl(path):
79 def geturl(path):
80 try:
80 try:
81 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
81 return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
82 except SubversionException:
82 except SubversionException:
83 # svn.client.url_from_path() fails with local repositories
83 # svn.client.url_from_path() fails with local repositories
84 pass
84 pass
85 if os.path.isdir(path):
85 if os.path.isdir(path):
86 path = os.path.normpath(os.path.abspath(path))
86 path = os.path.normpath(os.path.abspath(path))
87 if os.name == 'nt':
87 if os.name == 'nt':
88 path = '/' + util.normpath(path)
88 path = '/' + util.normpath(path)
89 # Module URL is later compared with the repository URL returned
89 # Module URL is later compared with the repository URL returned
90 # by svn API, which is UTF-8.
90 # by svn API, which is UTF-8.
91 path = encoding.tolocal(path)
91 path = encoding.tolocal(path)
92 path = 'file://%s' % quote(path)
92 path = 'file://%s' % quote(path)
93 return svn.core.svn_path_canonicalize(path)
93 return svn.core.svn_path_canonicalize(path)
94
94
95 def optrev(number):
95 def optrev(number):
96 optrev = svn.core.svn_opt_revision_t()
96 optrev = svn.core.svn_opt_revision_t()
97 optrev.kind = svn.core.svn_opt_revision_number
97 optrev.kind = svn.core.svn_opt_revision_number
98 optrev.value.number = number
98 optrev.value.number = number
99 return optrev
99 return optrev
100
100
101 class changedpath(object):
101 class changedpath(object):
102 def __init__(self, p):
102 def __init__(self, p):
103 self.copyfrom_path = p.copyfrom_path
103 self.copyfrom_path = p.copyfrom_path
104 self.copyfrom_rev = p.copyfrom_rev
104 self.copyfrom_rev = p.copyfrom_rev
105 self.action = p.action
105 self.action = p.action
106
106
107 def get_log_child(fp, url, paths, start, end, limit=0,
107 def get_log_child(fp, url, paths, start, end, limit=0,
108 discover_changed_paths=True, strict_node_history=False):
108 discover_changed_paths=True, strict_node_history=False):
109 protocol = -1
109 protocol = -1
110 def receiver(orig_paths, revnum, author, date, message, pool):
110 def receiver(orig_paths, revnum, author, date, message, pool):
111 paths = {}
111 paths = {}
112 if orig_paths is not None:
112 if orig_paths is not None:
113 for k, v in orig_paths.iteritems():
113 for k, v in orig_paths.iteritems():
114 paths[k] = changedpath(v)
114 paths[k] = changedpath(v)
115 pickle.dump((paths, revnum, author, date, message),
115 pickle.dump((paths, revnum, author, date, message),
116 fp, protocol)
116 fp, protocol)
117
117
118 try:
118 try:
119 # Use an ra of our own so that our parent can consume
119 # Use an ra of our own so that our parent can consume
120 # our results without confusing the server.
120 # our results without confusing the server.
121 t = transport.SvnRaTransport(url=url)
121 t = transport.SvnRaTransport(url=url)
122 svn.ra.get_log(t.ra, paths, start, end, limit,
122 svn.ra.get_log(t.ra, paths, start, end, limit,
123 discover_changed_paths,
123 discover_changed_paths,
124 strict_node_history,
124 strict_node_history,
125 receiver)
125 receiver)
126 except IOError:
126 except IOError:
127 # Caller may interrupt the iteration
127 # Caller may interrupt the iteration
128 pickle.dump(None, fp, protocol)
128 pickle.dump(None, fp, protocol)
129 except Exception, inst:
129 except Exception, inst:
130 pickle.dump(str(inst), fp, protocol)
130 pickle.dump(str(inst), fp, protocol)
131 else:
131 else:
132 pickle.dump(None, fp, protocol)
132 pickle.dump(None, fp, protocol)
133 fp.close()
133 fp.close()
134 # With large history, cleanup process goes crazy and suddenly
134 # With large history, cleanup process goes crazy and suddenly
135 # consumes *huge* amount of memory. The output file being closed,
135 # consumes *huge* amount of memory. The output file being closed,
136 # there is no need for clean termination.
136 # there is no need for clean termination.
137 os._exit(0)
137 os._exit(0)
138
138
139 def debugsvnlog(ui, **opts):
139 def debugsvnlog(ui, **opts):
140 """Fetch SVN log in a subprocess and channel them back to parent to
140 """Fetch SVN log in a subprocess and channel them back to parent to
141 avoid memory collection issues.
141 avoid memory collection issues.
142 """
142 """
143 if svn is None:
143 if svn is None:
144 raise util.Abort(_('debugsvnlog could not load Subversion python '
144 raise util.Abort(_('debugsvnlog could not load Subversion python '
145 'bindings'))
145 'bindings'))
146
146
147 util.setbinary(sys.stdin)
147 util.setbinary(sys.stdin)
148 util.setbinary(sys.stdout)
148 util.setbinary(sys.stdout)
149 args = decodeargs(sys.stdin.read())
149 args = decodeargs(sys.stdin.read())
150 get_log_child(sys.stdout, *args)
150 get_log_child(sys.stdout, *args)
151
151
152 class logstream(object):
152 class logstream(object):
153 """Interruptible revision log iterator."""
153 """Interruptible revision log iterator."""
154 def __init__(self, stdout):
154 def __init__(self, stdout):
155 self._stdout = stdout
155 self._stdout = stdout
156
156
157 def __iter__(self):
157 def __iter__(self):
158 while True:
158 while True:
159 try:
159 try:
160 entry = pickle.load(self._stdout)
160 entry = pickle.load(self._stdout)
161 except EOFError:
161 except EOFError:
162 raise util.Abort(_('Mercurial failed to run itself, check'
162 raise util.Abort(_('Mercurial failed to run itself, check'
163 ' hg executable is in PATH'))
163 ' hg executable is in PATH'))
164 try:
164 try:
165 orig_paths, revnum, author, date, message = entry
165 orig_paths, revnum, author, date, message = entry
166 except (TypeError, ValueError):
166 except (TypeError, ValueError):
167 if entry is None:
167 if entry is None:
168 break
168 break
169 raise util.Abort(_("log stream exception '%s'") % entry)
169 raise util.Abort(_("log stream exception '%s'") % entry)
170 yield entry
170 yield entry
171
171
172 def close(self):
172 def close(self):
173 if self._stdout:
173 if self._stdout:
174 self._stdout.close()
174 self._stdout.close()
175 self._stdout = None
175 self._stdout = None
176
176
177 class directlogstream(list):
177 class directlogstream(list):
178 """Direct revision log iterator.
178 """Direct revision log iterator.
179 This can be used for debugging and development but it will probably leak
179 This can be used for debugging and development but it will probably leak
180 memory and is not suitable for real conversions."""
180 memory and is not suitable for real conversions."""
181 def __init__(self, url, paths, start, end, limit=0,
181 def __init__(self, url, paths, start, end, limit=0,
182 discover_changed_paths=True, strict_node_history=False):
182 discover_changed_paths=True, strict_node_history=False):
183
183
184 def receiver(orig_paths, revnum, author, date, message, pool):
184 def receiver(orig_paths, revnum, author, date, message, pool):
185 paths = {}
185 paths = {}
186 if orig_paths is not None:
186 if orig_paths is not None:
187 for k, v in orig_paths.iteritems():
187 for k, v in orig_paths.iteritems():
188 paths[k] = changedpath(v)
188 paths[k] = changedpath(v)
189 self.append((paths, revnum, author, date, message))
189 self.append((paths, revnum, author, date, message))
190
190
191 # Use an ra of our own so that our parent can consume
191 # Use an ra of our own so that our parent can consume
192 # our results without confusing the server.
192 # our results without confusing the server.
193 t = transport.SvnRaTransport(url=url)
193 t = transport.SvnRaTransport(url=url)
194 svn.ra.get_log(t.ra, paths, start, end, limit,
194 svn.ra.get_log(t.ra, paths, start, end, limit,
195 discover_changed_paths,
195 discover_changed_paths,
196 strict_node_history,
196 strict_node_history,
197 receiver)
197 receiver)
198
198
199 def close(self):
199 def close(self):
200 pass
200 pass
201
201
202 # Check to see if the given path is a local Subversion repo. Verify this by
202 # Check to see if the given path is a local Subversion repo. Verify this by
203 # looking for several svn-specific files and directories in the given
203 # looking for several svn-specific files and directories in the given
204 # directory.
204 # directory.
205 def filecheck(ui, path, proto):
205 def filecheck(ui, path, proto):
206 for x in ('locks', 'hooks', 'format', 'db'):
206 for x in ('locks', 'hooks', 'format', 'db'):
207 if not os.path.exists(os.path.join(path, x)):
207 if not os.path.exists(os.path.join(path, x)):
208 return False
208 return False
209 return True
209 return True
210
210
211 # Check to see if a given path is the root of an svn repo over http. We verify
211 # Check to see if a given path is the root of an svn repo over http. We verify
212 # this by requesting a version-controlled URL we know can't exist and looking
212 # this by requesting a version-controlled URL we know can't exist and looking
213 # for the svn-specific "not found" XML.
213 # for the svn-specific "not found" XML.
214 def httpcheck(ui, path, proto):
214 def httpcheck(ui, path, proto):
215 try:
215 try:
216 opener = urllib2.build_opener()
216 opener = urllib2.build_opener()
217 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
217 rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
218 data = rsp.read()
218 data = rsp.read()
219 except urllib2.HTTPError, inst:
219 except urllib2.HTTPError, inst:
220 if inst.code != 404:
220 if inst.code != 404:
221 # Except for 404 we cannot know for sure this is not an svn repo
221 # Except for 404 we cannot know for sure this is not an svn repo
222 ui.warn(_('svn: cannot probe remote repository, assume it could '
222 ui.warn(_('svn: cannot probe remote repository, assume it could '
223 'be a subversion repository. Use --source-type if you '
223 'be a subversion repository. Use --source-type if you '
224 'know better.\n'))
224 'know better.\n'))
225 return True
225 return True
226 data = inst.fp.read()
226 data = inst.fp.read()
227 except Exception:
227 except Exception:
228 # Could be urllib2.URLError if the URL is invalid or anything else.
228 # Could be urllib2.URLError if the URL is invalid or anything else.
229 return False
229 return False
230 return '<m:human-readable errcode="160013">' in data
230 return '<m:human-readable errcode="160013">' in data
231
231
232 protomap = {'http': httpcheck,
232 protomap = {'http': httpcheck,
233 'https': httpcheck,
233 'https': httpcheck,
234 'file': filecheck,
234 'file': filecheck,
235 }
235 }
236 def issvnurl(ui, url):
236 def issvnurl(ui, url):
237 try:
237 try:
238 proto, path = url.split('://', 1)
238 proto, path = url.split('://', 1)
239 if proto == 'file':
239 if proto == 'file':
240 if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha()
240 if (os.name == 'nt' and path[:1] == '/' and path[1:2].isalpha()
241 and path[2:6].lower() == '%3a/'):
241 and path[2:6].lower() == '%3a/'):
242 path = path[:2] + ':/' + path[6:]
242 path = path[:2] + ':/' + path[6:]
243 path = urllib.url2pathname(path)
243 path = urllib.url2pathname(path)
244 except ValueError:
244 except ValueError:
245 proto = 'file'
245 proto = 'file'
246 path = os.path.abspath(url)
246 path = os.path.abspath(url)
247 if proto == 'file':
247 if proto == 'file':
248 path = util.pconvert(path)
248 path = util.pconvert(path)
249 check = protomap.get(proto, lambda *args: False)
249 check = protomap.get(proto, lambda *args: False)
250 while '/' in path:
250 while '/' in path:
251 if check(ui, path, proto):
251 if check(ui, path, proto):
252 return True
252 return True
253 path = path.rsplit('/', 1)[0]
253 path = path.rsplit('/', 1)[0]
254 return False
254 return False
255
255
256 # SVN conversion code stolen from bzr-svn and tailor
256 # SVN conversion code stolen from bzr-svn and tailor
257 #
257 #
258 # Subversion looks like a versioned filesystem, branches structures
258 # Subversion looks like a versioned filesystem, branches structures
259 # are defined by conventions and not enforced by the tool. First,
259 # are defined by conventions and not enforced by the tool. First,
260 # we define the potential branches (modules) as "trunk" and "branches"
260 # we define the potential branches (modules) as "trunk" and "branches"
261 # children directories. Revisions are then identified by their
261 # children directories. Revisions are then identified by their
262 # module and revision number (and a repository identifier).
262 # module and revision number (and a repository identifier).
263 #
263 #
264 # The revision graph is really a tree (or a forest). By default, a
264 # The revision graph is really a tree (or a forest). By default, a
265 # revision parent is the previous revision in the same module. If the
265 # revision parent is the previous revision in the same module. If the
266 # module directory is copied/moved from another module then the
266 # module directory is copied/moved from another module then the
267 # revision is the module root and its parent the source revision in
267 # revision is the module root and its parent the source revision in
268 # the parent module. A revision has at most one parent.
268 # the parent module. A revision has at most one parent.
269 #
269 #
270 class svn_source(converter_source):
270 class svn_source(converter_source):
271 def __init__(self, ui, url, rev=None):
271 def __init__(self, ui, url, rev=None):
272 super(svn_source, self).__init__(ui, url, rev=rev)
272 super(svn_source, self).__init__(ui, url, rev=rev)
273
273
274 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
274 if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
275 (os.path.exists(url) and
275 (os.path.exists(url) and
276 os.path.exists(os.path.join(url, '.svn'))) or
276 os.path.exists(os.path.join(url, '.svn'))) or
277 issvnurl(ui, url)):
277 issvnurl(ui, url)):
278 raise NoRepo(_("%s does not look like a Subversion repository")
278 raise NoRepo(_("%s does not look like a Subversion repository")
279 % url)
279 % url)
280 if svn is None:
280 if svn is None:
281 raise MissingTool(_('could not load Subversion python bindings'))
281 raise MissingTool(_('could not load Subversion python bindings'))
282
282
283 try:
283 try:
284 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
284 version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
285 if version < (1, 4):
285 if version < (1, 4):
286 raise MissingTool(_('Subversion python bindings %d.%d found, '
286 raise MissingTool(_('Subversion python bindings %d.%d found, '
287 '1.4 or later required') % version)
287 '1.4 or later required') % version)
288 except AttributeError:
288 except AttributeError:
289 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
289 raise MissingTool(_('Subversion python bindings are too old, 1.4 '
290 'or later required'))
290 'or later required'))
291
291
292 self.lastrevs = {}
292 self.lastrevs = {}
293
293
294 latest = None
294 latest = None
295 try:
295 try:
296 # Support file://path@rev syntax. Useful e.g. to convert
296 # Support file://path@rev syntax. Useful e.g. to convert
297 # deleted branches.
297 # deleted branches.
298 at = url.rfind('@')
298 at = url.rfind('@')
299 if at >= 0:
299 if at >= 0:
300 latest = int(url[at + 1:])
300 latest = int(url[at + 1:])
301 url = url[:at]
301 url = url[:at]
302 except ValueError:
302 except ValueError:
303 pass
303 pass
304 self.url = geturl(url)
304 self.url = geturl(url)
305 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
305 self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
306 try:
306 try:
307 self.transport = transport.SvnRaTransport(url=self.url)
307 self.transport = transport.SvnRaTransport(url=self.url)
308 self.ra = self.transport.ra
308 self.ra = self.transport.ra
309 self.ctx = self.transport.client
309 self.ctx = self.transport.client
310 self.baseurl = svn.ra.get_repos_root(self.ra)
310 self.baseurl = svn.ra.get_repos_root(self.ra)
311 # Module is either empty or a repository path starting with
311 # Module is either empty or a repository path starting with
312 # a slash and not ending with a slash.
312 # a slash and not ending with a slash.
313 self.module = urllib.unquote(self.url[len(self.baseurl):])
313 self.module = urllib.unquote(self.url[len(self.baseurl):])
314 self.prevmodule = None
314 self.prevmodule = None
315 self.rootmodule = self.module
315 self.rootmodule = self.module
316 self.commits = {}
316 self.commits = {}
317 self.paths = {}
317 self.paths = {}
318 self.uuid = svn.ra.get_uuid(self.ra)
318 self.uuid = svn.ra.get_uuid(self.ra)
319 except SubversionException:
319 except SubversionException:
320 ui.traceback()
320 ui.traceback()
321 raise NoRepo(_("%s does not look like a Subversion repository")
321 raise NoRepo(_("%s does not look like a Subversion repository")
322 % self.url)
322 % self.url)
323
323
324 if rev:
324 if rev:
325 try:
325 try:
326 latest = int(rev)
326 latest = int(rev)
327 except ValueError:
327 except ValueError:
328 raise util.Abort(_('svn: revision %s is not an integer') % rev)
328 raise util.Abort(_('svn: revision %s is not an integer') % rev)
329
329
330 self.trunkname = self.ui.config('convert', 'svn.trunk',
330 self.trunkname = self.ui.config('convert', 'svn.trunk',
331 'trunk').strip('/')
331 'trunk').strip('/')
332 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
332 self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
333 try:
333 try:
334 self.startrev = int(self.startrev)
334 self.startrev = int(self.startrev)
335 if self.startrev < 0:
335 if self.startrev < 0:
336 self.startrev = 0
336 self.startrev = 0
337 except ValueError:
337 except ValueError:
338 raise util.Abort(_('svn: start revision %s is not an integer')
338 raise util.Abort(_('svn: start revision %s is not an integer')
339 % self.startrev)
339 % self.startrev)
340
340
341 try:
341 try:
342 self.head = self.latest(self.module, latest)
342 self.head = self.latest(self.module, latest)
343 except SvnPathNotFound:
343 except SvnPathNotFound:
344 self.head = None
344 self.head = None
345 if not self.head:
345 if not self.head:
346 raise util.Abort(_('no revision found in module %s')
346 raise util.Abort(_('no revision found in module %s')
347 % self.module)
347 % self.module)
348 self.last_changed = self.revnum(self.head)
348 self.last_changed = self.revnum(self.head)
349
349
350 self._changescache = (None, None)
350 self._changescache = (None, None)
351
351
352 if os.path.exists(os.path.join(url, '.svn/entries')):
352 if os.path.exists(os.path.join(url, '.svn/entries')):
353 self.wc = url
353 self.wc = url
354 else:
354 else:
355 self.wc = None
355 self.wc = None
356 self.convertfp = None
356 self.convertfp = None
357
357
358 def setrevmap(self, revmap):
358 def setrevmap(self, revmap):
359 lastrevs = {}
359 lastrevs = {}
360 for revid in revmap.iterkeys():
360 for revid in revmap.iterkeys():
361 uuid, module, revnum = revsplit(revid)
361 uuid, module, revnum = revsplit(revid)
362 lastrevnum = lastrevs.setdefault(module, revnum)
362 lastrevnum = lastrevs.setdefault(module, revnum)
363 if revnum > lastrevnum:
363 if revnum > lastrevnum:
364 lastrevs[module] = revnum
364 lastrevs[module] = revnum
365 self.lastrevs = lastrevs
365 self.lastrevs = lastrevs
366
366
367 def exists(self, path, optrev):
367 def exists(self, path, optrev):
368 try:
368 try:
369 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
369 svn.client.ls(self.url.rstrip('/') + '/' + quote(path),
370 optrev, False, self.ctx)
370 optrev, False, self.ctx)
371 return True
371 return True
372 except SubversionException:
372 except SubversionException:
373 return False
373 return False
374
374
375 def getheads(self):
375 def getheads(self):
376
376
377 def isdir(path, revnum):
377 def isdir(path, revnum):
378 kind = self._checkpath(path, revnum)
378 kind = self._checkpath(path, revnum)
379 return kind == svn.core.svn_node_dir
379 return kind == svn.core.svn_node_dir
380
380
381 def getcfgpath(name, rev):
381 def getcfgpath(name, rev):
382 cfgpath = self.ui.config('convert', 'svn.' + name)
382 cfgpath = self.ui.config('convert', 'svn.' + name)
383 if cfgpath is not None and cfgpath.strip() == '':
383 if cfgpath is not None and cfgpath.strip() == '':
384 return None
384 return None
385 path = (cfgpath or name).strip('/')
385 path = (cfgpath or name).strip('/')
386 if not self.exists(path, rev):
386 if not self.exists(path, rev):
387 if self.module.endswith(path) and name == 'trunk':
387 if self.module.endswith(path) and name == 'trunk':
388 # we are converting from inside this directory
388 # we are converting from inside this directory
389 return None
389 return None
390 if cfgpath:
390 if cfgpath:
391 raise util.Abort(_('expected %s to be at %r, but not found')
391 raise util.Abort(_('expected %s to be at %r, but not found')
392 % (name, path))
392 % (name, path))
393 return None
393 return None
394 self.ui.note(_('found %s at %r\n') % (name, path))
394 self.ui.note(_('found %s at %r\n') % (name, path))
395 return path
395 return path
396
396
397 rev = optrev(self.last_changed)
397 rev = optrev(self.last_changed)
398 oldmodule = ''
398 oldmodule = ''
399 trunk = getcfgpath('trunk', rev)
399 trunk = getcfgpath('trunk', rev)
400 self.tags = getcfgpath('tags', rev)
400 self.tags = getcfgpath('tags', rev)
401 branches = getcfgpath('branches', rev)
401 branches = getcfgpath('branches', rev)
402
402
403 # If the project has a trunk or branches, we will extract heads
403 # If the project has a trunk or branches, we will extract heads
404 # from them. We keep the project root otherwise.
404 # from them. We keep the project root otherwise.
405 if trunk:
405 if trunk:
406 oldmodule = self.module or ''
406 oldmodule = self.module or ''
407 self.module += '/' + trunk
407 self.module += '/' + trunk
408 self.head = self.latest(self.module, self.last_changed)
408 self.head = self.latest(self.module, self.last_changed)
409 if not self.head:
409 if not self.head:
410 raise util.Abort(_('no revision found in module %s')
410 raise util.Abort(_('no revision found in module %s')
411 % self.module)
411 % self.module)
412
412
413 # First head in the list is the module's head
413 # First head in the list is the module's head
414 self.heads = [self.head]
414 self.heads = [self.head]
415 if self.tags is not None:
415 if self.tags is not None:
416 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
416 self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
417
417
418 # Check if branches bring a few more heads to the list
418 # Check if branches bring a few more heads to the list
419 if branches:
419 if branches:
420 rpath = self.url.strip('/')
420 rpath = self.url.strip('/')
421 branchnames = svn.client.ls(rpath + '/' + quote(branches),
421 branchnames = svn.client.ls(rpath + '/' + quote(branches),
422 rev, False, self.ctx)
422 rev, False, self.ctx)
423 for branch in sorted(branchnames):
423 for branch in sorted(branchnames):
424 module = '%s/%s/%s' % (oldmodule, branches, branch)
424 module = '%s/%s/%s' % (oldmodule, branches, branch)
425 if not isdir(module, self.last_changed):
425 if not isdir(module, self.last_changed):
426 continue
426 continue
427 brevid = self.latest(module, self.last_changed)
427 brevid = self.latest(module, self.last_changed)
428 if not brevid:
428 if not brevid:
429 self.ui.note(_('ignoring empty branch %s\n') % branch)
429 self.ui.note(_('ignoring empty branch %s\n') % branch)
430 continue
430 continue
431 self.ui.note(_('found branch %s at %d\n') %
431 self.ui.note(_('found branch %s at %d\n') %
432 (branch, self.revnum(brevid)))
432 (branch, self.revnum(brevid)))
433 self.heads.append(brevid)
433 self.heads.append(brevid)
434
434
435 if self.startrev and self.heads:
435 if self.startrev and self.heads:
436 if len(self.heads) > 1:
436 if len(self.heads) > 1:
437 raise util.Abort(_('svn: start revision is not supported '
437 raise util.Abort(_('svn: start revision is not supported '
438 'with more than one branch'))
438 'with more than one branch'))
439 revnum = self.revnum(self.heads[0])
439 revnum = self.revnum(self.heads[0])
440 if revnum < self.startrev:
440 if revnum < self.startrev:
441 raise util.Abort(
441 raise util.Abort(
442 _('svn: no revision found after start revision %d')
442 _('svn: no revision found after start revision %d')
443 % self.startrev)
443 % self.startrev)
444
444
445 return self.heads
445 return self.heads
446
446
447 def _getchanges(self, rev):
447 def _getchanges(self, rev, full):
448 (paths, parents) = self.paths[rev]
448 (paths, parents) = self.paths[rev]
449 copies = {}
449 if parents:
450 if parents:
450 files, self.removed, copies = self.expandpaths(rev, paths, parents)
451 files, self.removed, copies = self.expandpaths(rev, paths, parents)
451 else:
452 if full or not parents:
452 # Perform a full checkout on roots
453 # Perform a full checkout on roots
453 uuid, module, revnum = revsplit(rev)
454 uuid, module, revnum = revsplit(rev)
454 entries = svn.client.ls(self.baseurl + quote(module),
455 entries = svn.client.ls(self.baseurl + quote(module),
455 optrev(revnum), True, self.ctx)
456 optrev(revnum), True, self.ctx)
456 files = [n for n, e in entries.iteritems()
457 files = [n for n, e in entries.iteritems()
457 if e.kind == svn.core.svn_node_file]
458 if e.kind == svn.core.svn_node_file]
458 copies = {}
459 self.removed = set()
459 self.removed = set()
460
460
461 files.sort()
461 files.sort()
462 files = zip(files, [rev] * len(files))
462 files = zip(files, [rev] * len(files))
463 return (files, copies)
463 return (files, copies)
464
464
465 def getchanges(self, rev):
465 def getchanges(self, rev, full):
466 # reuse cache from getchangedfiles
466 # reuse cache from getchangedfiles
467 if self._changescache[0] == rev:
467 if self._changescache[0] == rev and not full:
468 (files, copies) = self._changescache[1]
468 (files, copies) = self._changescache[1]
469 else:
469 else:
470 (files, copies) = self._getchanges(rev)
470 (files, copies) = self._getchanges(rev, full)
471 # caller caches the result, so free it here to release memory
471 # caller caches the result, so free it here to release memory
472 del self.paths[rev]
472 del self.paths[rev]
473 return (files, copies)
473 return (files, copies)
474
474
475 def getchangedfiles(self, rev, i):
475 def getchangedfiles(self, rev, i):
476 # called from filemap - cache computed values for reuse in getchanges
476 # called from filemap - cache computed values for reuse in getchanges
477 (files, copies) = self._getchanges(rev)
477 (files, copies) = self._getchanges(rev, False)
478 self._changescache = (rev, (files, copies))
478 self._changescache = (rev, (files, copies))
479 return [f[0] for f in files]
479 return [f[0] for f in files]
480
480
481 def getcommit(self, rev):
481 def getcommit(self, rev):
482 if rev not in self.commits:
482 if rev not in self.commits:
483 uuid, module, revnum = revsplit(rev)
483 uuid, module, revnum = revsplit(rev)
484 self.module = module
484 self.module = module
485 self.reparent(module)
485 self.reparent(module)
486 # We assume that:
486 # We assume that:
487 # - requests for revisions after "stop" come from the
487 # - requests for revisions after "stop" come from the
488 # revision graph backward traversal. Cache all of them
488 # revision graph backward traversal. Cache all of them
489 # down to stop, they will be used eventually.
489 # down to stop, they will be used eventually.
490 # - requests for revisions before "stop" come to get
490 # - requests for revisions before "stop" come to get
491 # isolated branches parents. Just fetch what is needed.
491 # isolated branches parents. Just fetch what is needed.
492 stop = self.lastrevs.get(module, 0)
492 stop = self.lastrevs.get(module, 0)
493 if revnum < stop:
493 if revnum < stop:
494 stop = revnum + 1
494 stop = revnum + 1
495 self._fetch_revisions(revnum, stop)
495 self._fetch_revisions(revnum, stop)
496 if rev not in self.commits:
496 if rev not in self.commits:
497 raise util.Abort(_('svn: revision %s not found') % revnum)
497 raise util.Abort(_('svn: revision %s not found') % revnum)
498 revcommit = self.commits[rev]
498 revcommit = self.commits[rev]
499 # caller caches the result, so free it here to release memory
499 # caller caches the result, so free it here to release memory
500 del self.commits[rev]
500 del self.commits[rev]
501 return revcommit
501 return revcommit
502
502
503 def checkrevformat(self, revstr, mapname='splicemap'):
503 def checkrevformat(self, revstr, mapname='splicemap'):
504 """ fails if revision format does not match the correct format"""
504 """ fails if revision format does not match the correct format"""
505 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
505 if not re.match(r'svn:[0-9a-f]{8,8}-[0-9a-f]{4,4}-'
506 '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
506 '[0-9a-f]{4,4}-[0-9a-f]{4,4}-[0-9a-f]'
507 '{12,12}(.*)\@[0-9]+$',revstr):
507 '{12,12}(.*)\@[0-9]+$',revstr):
508 raise util.Abort(_('%s entry %s is not a valid revision'
508 raise util.Abort(_('%s entry %s is not a valid revision'
509 ' identifier') % (mapname, revstr))
509 ' identifier') % (mapname, revstr))
510
510
511 def gettags(self):
511 def gettags(self):
512 tags = {}
512 tags = {}
513 if self.tags is None:
513 if self.tags is None:
514 return tags
514 return tags
515
515
516 # svn tags are just a convention, project branches left in a
516 # svn tags are just a convention, project branches left in a
517 # 'tags' directory. There is no other relationship than
517 # 'tags' directory. There is no other relationship than
518 # ancestry, which is expensive to discover and makes them hard
518 # ancestry, which is expensive to discover and makes them hard
519 # to update incrementally. Worse, past revisions may be
519 # to update incrementally. Worse, past revisions may be
520 # referenced by tags far away in the future, requiring a deep
520 # referenced by tags far away in the future, requiring a deep
521 # history traversal on every calculation. Current code
521 # history traversal on every calculation. Current code
522 # performs a single backward traversal, tracking moves within
522 # performs a single backward traversal, tracking moves within
523 # the tags directory (tag renaming) and recording a new tag
523 # the tags directory (tag renaming) and recording a new tag
524 # everytime a project is copied from outside the tags
524 # everytime a project is copied from outside the tags
525 # directory. It also lists deleted tags, this behaviour may
525 # directory. It also lists deleted tags, this behaviour may
526 # change in the future.
526 # change in the future.
527 pendings = []
527 pendings = []
528 tagspath = self.tags
528 tagspath = self.tags
529 start = svn.ra.get_latest_revnum(self.ra)
529 start = svn.ra.get_latest_revnum(self.ra)
530 stream = self._getlog([self.tags], start, self.startrev)
530 stream = self._getlog([self.tags], start, self.startrev)
531 try:
531 try:
532 for entry in stream:
532 for entry in stream:
533 origpaths, revnum, author, date, message = entry
533 origpaths, revnum, author, date, message = entry
534 if not origpaths:
534 if not origpaths:
535 origpaths = []
535 origpaths = []
536 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
536 copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
537 in origpaths.iteritems() if e.copyfrom_path]
537 in origpaths.iteritems() if e.copyfrom_path]
538 # Apply moves/copies from more specific to general
538 # Apply moves/copies from more specific to general
539 copies.sort(reverse=True)
539 copies.sort(reverse=True)
540
540
541 srctagspath = tagspath
541 srctagspath = tagspath
542 if copies and copies[-1][2] == tagspath:
542 if copies and copies[-1][2] == tagspath:
543 # Track tags directory moves
543 # Track tags directory moves
544 srctagspath = copies.pop()[0]
544 srctagspath = copies.pop()[0]
545
545
546 for source, sourcerev, dest in copies:
546 for source, sourcerev, dest in copies:
547 if not dest.startswith(tagspath + '/'):
547 if not dest.startswith(tagspath + '/'):
548 continue
548 continue
549 for tag in pendings:
549 for tag in pendings:
550 if tag[0].startswith(dest):
550 if tag[0].startswith(dest):
551 tagpath = source + tag[0][len(dest):]
551 tagpath = source + tag[0][len(dest):]
552 tag[:2] = [tagpath, sourcerev]
552 tag[:2] = [tagpath, sourcerev]
553 break
553 break
554 else:
554 else:
555 pendings.append([source, sourcerev, dest])
555 pendings.append([source, sourcerev, dest])
556
556
557 # Filter out tags with children coming from different
557 # Filter out tags with children coming from different
558 # parts of the repository like:
558 # parts of the repository like:
559 # /tags/tag.1 (from /trunk:10)
559 # /tags/tag.1 (from /trunk:10)
560 # /tags/tag.1/foo (from /branches/foo:12)
560 # /tags/tag.1/foo (from /branches/foo:12)
561 # Here/tags/tag.1 discarded as well as its children.
561 # Here/tags/tag.1 discarded as well as its children.
562 # It happens with tools like cvs2svn. Such tags cannot
562 # It happens with tools like cvs2svn. Such tags cannot
563 # be represented in mercurial.
563 # be represented in mercurial.
564 addeds = dict((p, e.copyfrom_path) for p, e
564 addeds = dict((p, e.copyfrom_path) for p, e
565 in origpaths.iteritems()
565 in origpaths.iteritems()
566 if e.action == 'A' and e.copyfrom_path)
566 if e.action == 'A' and e.copyfrom_path)
567 badroots = set()
567 badroots = set()
568 for destroot in addeds:
568 for destroot in addeds:
569 for source, sourcerev, dest in pendings:
569 for source, sourcerev, dest in pendings:
570 if (not dest.startswith(destroot + '/')
570 if (not dest.startswith(destroot + '/')
571 or source.startswith(addeds[destroot] + '/')):
571 or source.startswith(addeds[destroot] + '/')):
572 continue
572 continue
573 badroots.add(destroot)
573 badroots.add(destroot)
574 break
574 break
575
575
576 for badroot in badroots:
576 for badroot in badroots:
577 pendings = [p for p in pendings if p[2] != badroot
577 pendings = [p for p in pendings if p[2] != badroot
578 and not p[2].startswith(badroot + '/')]
578 and not p[2].startswith(badroot + '/')]
579
579
580 # Tell tag renamings from tag creations
580 # Tell tag renamings from tag creations
581 renamings = []
581 renamings = []
582 for source, sourcerev, dest in pendings:
582 for source, sourcerev, dest in pendings:
583 tagname = dest.split('/')[-1]
583 tagname = dest.split('/')[-1]
584 if source.startswith(srctagspath):
584 if source.startswith(srctagspath):
585 renamings.append([source, sourcerev, tagname])
585 renamings.append([source, sourcerev, tagname])
586 continue
586 continue
587 if tagname in tags:
587 if tagname in tags:
588 # Keep the latest tag value
588 # Keep the latest tag value
589 continue
589 continue
590 # From revision may be fake, get one with changes
590 # From revision may be fake, get one with changes
591 try:
591 try:
592 tagid = self.latest(source, sourcerev)
592 tagid = self.latest(source, sourcerev)
593 if tagid and tagname not in tags:
593 if tagid and tagname not in tags:
594 tags[tagname] = tagid
594 tags[tagname] = tagid
595 except SvnPathNotFound:
595 except SvnPathNotFound:
596 # It happens when we are following directories
596 # It happens when we are following directories
597 # we assumed were copied with their parents
597 # we assumed were copied with their parents
598 # but were really created in the tag
598 # but were really created in the tag
599 # directory.
599 # directory.
600 pass
600 pass
601 pendings = renamings
601 pendings = renamings
602 tagspath = srctagspath
602 tagspath = srctagspath
603 finally:
603 finally:
604 stream.close()
604 stream.close()
605 return tags
605 return tags
606
606
607 def converted(self, rev, destrev):
607 def converted(self, rev, destrev):
608 if not self.wc:
608 if not self.wc:
609 return
609 return
610 if self.convertfp is None:
610 if self.convertfp is None:
611 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
611 self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
612 'a')
612 'a')
613 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
613 self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
614 self.convertfp.flush()
614 self.convertfp.flush()
615
615
616 def revid(self, revnum, module=None):
616 def revid(self, revnum, module=None):
617 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
617 return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
618
618
619 def revnum(self, rev):
619 def revnum(self, rev):
620 return int(rev.split('@')[-1])
620 return int(rev.split('@')[-1])
621
621
622 def latest(self, path, stop=None):
622 def latest(self, path, stop=None):
623 """Find the latest revid affecting path, up to stop revision
623 """Find the latest revid affecting path, up to stop revision
624 number. If stop is None, default to repository latest
624 number. If stop is None, default to repository latest
625 revision. It may return a revision in a different module,
625 revision. It may return a revision in a different module,
626 since a branch may be moved without a change being
626 since a branch may be moved without a change being
627 reported. Return None if computed module does not belong to
627 reported. Return None if computed module does not belong to
628 rootmodule subtree.
628 rootmodule subtree.
629 """
629 """
630 def findchanges(path, start, stop=None):
630 def findchanges(path, start, stop=None):
631 stream = self._getlog([path], start, stop or 1)
631 stream = self._getlog([path], start, stop or 1)
632 try:
632 try:
633 for entry in stream:
633 for entry in stream:
634 paths, revnum, author, date, message = entry
634 paths, revnum, author, date, message = entry
635 if stop is None and paths:
635 if stop is None and paths:
636 # We do not know the latest changed revision,
636 # We do not know the latest changed revision,
637 # keep the first one with changed paths.
637 # keep the first one with changed paths.
638 break
638 break
639 if revnum <= stop:
639 if revnum <= stop:
640 break
640 break
641
641
642 for p in paths:
642 for p in paths:
643 if (not path.startswith(p) or
643 if (not path.startswith(p) or
644 not paths[p].copyfrom_path):
644 not paths[p].copyfrom_path):
645 continue
645 continue
646 newpath = paths[p].copyfrom_path + path[len(p):]
646 newpath = paths[p].copyfrom_path + path[len(p):]
647 self.ui.debug("branch renamed from %s to %s at %d\n" %
647 self.ui.debug("branch renamed from %s to %s at %d\n" %
648 (path, newpath, revnum))
648 (path, newpath, revnum))
649 path = newpath
649 path = newpath
650 break
650 break
651 if not paths:
651 if not paths:
652 revnum = None
652 revnum = None
653 return revnum, path
653 return revnum, path
654 finally:
654 finally:
655 stream.close()
655 stream.close()
656
656
657 if not path.startswith(self.rootmodule):
657 if not path.startswith(self.rootmodule):
658 # Requests on foreign branches may be forbidden at server level
658 # Requests on foreign branches may be forbidden at server level
659 self.ui.debug('ignoring foreign branch %r\n' % path)
659 self.ui.debug('ignoring foreign branch %r\n' % path)
660 return None
660 return None
661
661
662 if stop is None:
662 if stop is None:
663 stop = svn.ra.get_latest_revnum(self.ra)
663 stop = svn.ra.get_latest_revnum(self.ra)
664 try:
664 try:
665 prevmodule = self.reparent('')
665 prevmodule = self.reparent('')
666 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
666 dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
667 self.reparent(prevmodule)
667 self.reparent(prevmodule)
668 except SubversionException:
668 except SubversionException:
669 dirent = None
669 dirent = None
670 if not dirent:
670 if not dirent:
671 raise SvnPathNotFound(_('%s not found up to revision %d')
671 raise SvnPathNotFound(_('%s not found up to revision %d')
672 % (path, stop))
672 % (path, stop))
673
673
674 # stat() gives us the previous revision on this line of
674 # stat() gives us the previous revision on this line of
675 # development, but it might be in *another module*. Fetch the
675 # development, but it might be in *another module*. Fetch the
676 # log and detect renames down to the latest revision.
676 # log and detect renames down to the latest revision.
677 revnum, realpath = findchanges(path, stop, dirent.created_rev)
677 revnum, realpath = findchanges(path, stop, dirent.created_rev)
678 if revnum is None:
678 if revnum is None:
679 # Tools like svnsync can create empty revision, when
679 # Tools like svnsync can create empty revision, when
680 # synchronizing only a subtree for instance. These empty
680 # synchronizing only a subtree for instance. These empty
681 # revisions created_rev still have their original values
681 # revisions created_rev still have their original values
682 # despite all changes having disappeared and can be
682 # despite all changes having disappeared and can be
683 # returned by ra.stat(), at least when stating the root
683 # returned by ra.stat(), at least when stating the root
684 # module. In that case, do not trust created_rev and scan
684 # module. In that case, do not trust created_rev and scan
685 # the whole history.
685 # the whole history.
686 revnum, realpath = findchanges(path, stop)
686 revnum, realpath = findchanges(path, stop)
687 if revnum is None:
687 if revnum is None:
688 self.ui.debug('ignoring empty branch %r\n' % realpath)
688 self.ui.debug('ignoring empty branch %r\n' % realpath)
689 return None
689 return None
690
690
691 if not realpath.startswith(self.rootmodule):
691 if not realpath.startswith(self.rootmodule):
692 self.ui.debug('ignoring foreign branch %r\n' % realpath)
692 self.ui.debug('ignoring foreign branch %r\n' % realpath)
693 return None
693 return None
694 return self.revid(revnum, realpath)
694 return self.revid(revnum, realpath)
695
695
696 def reparent(self, module):
696 def reparent(self, module):
697 """Reparent the svn transport and return the previous parent."""
697 """Reparent the svn transport and return the previous parent."""
698 if self.prevmodule == module:
698 if self.prevmodule == module:
699 return module
699 return module
700 svnurl = self.baseurl + quote(module)
700 svnurl = self.baseurl + quote(module)
701 prevmodule = self.prevmodule
701 prevmodule = self.prevmodule
702 if prevmodule is None:
702 if prevmodule is None:
703 prevmodule = ''
703 prevmodule = ''
704 self.ui.debug("reparent to %s\n" % svnurl)
704 self.ui.debug("reparent to %s\n" % svnurl)
705 svn.ra.reparent(self.ra, svnurl)
705 svn.ra.reparent(self.ra, svnurl)
706 self.prevmodule = module
706 self.prevmodule = module
707 return prevmodule
707 return prevmodule
708
708
709 def expandpaths(self, rev, paths, parents):
709 def expandpaths(self, rev, paths, parents):
710 changed, removed = set(), set()
710 changed, removed = set(), set()
711 copies = {}
711 copies = {}
712
712
713 new_module, revnum = revsplit(rev)[1:]
713 new_module, revnum = revsplit(rev)[1:]
714 if new_module != self.module:
714 if new_module != self.module:
715 self.module = new_module
715 self.module = new_module
716 self.reparent(self.module)
716 self.reparent(self.module)
717
717
718 for i, (path, ent) in enumerate(paths):
718 for i, (path, ent) in enumerate(paths):
719 self.ui.progress(_('scanning paths'), i, item=path,
719 self.ui.progress(_('scanning paths'), i, item=path,
720 total=len(paths))
720 total=len(paths))
721 entrypath = self.getrelpath(path)
721 entrypath = self.getrelpath(path)
722
722
723 kind = self._checkpath(entrypath, revnum)
723 kind = self._checkpath(entrypath, revnum)
724 if kind == svn.core.svn_node_file:
724 if kind == svn.core.svn_node_file:
725 changed.add(self.recode(entrypath))
725 changed.add(self.recode(entrypath))
726 if not ent.copyfrom_path or not parents:
726 if not ent.copyfrom_path or not parents:
727 continue
727 continue
728 # Copy sources not in parent revisions cannot be
728 # Copy sources not in parent revisions cannot be
729 # represented, ignore their origin for now
729 # represented, ignore their origin for now
730 pmodule, prevnum = revsplit(parents[0])[1:]
730 pmodule, prevnum = revsplit(parents[0])[1:]
731 if ent.copyfrom_rev < prevnum:
731 if ent.copyfrom_rev < prevnum:
732 continue
732 continue
733 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
733 copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
734 if not copyfrom_path:
734 if not copyfrom_path:
735 continue
735 continue
736 self.ui.debug("copied to %s from %s@%s\n" %
736 self.ui.debug("copied to %s from %s@%s\n" %
737 (entrypath, copyfrom_path, ent.copyfrom_rev))
737 (entrypath, copyfrom_path, ent.copyfrom_rev))
738 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
738 copies[self.recode(entrypath)] = self.recode(copyfrom_path)
739 elif kind == 0: # gone, but had better be a deleted *file*
739 elif kind == 0: # gone, but had better be a deleted *file*
740 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
740 self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
741 pmodule, prevnum = revsplit(parents[0])[1:]
741 pmodule, prevnum = revsplit(parents[0])[1:]
742 parentpath = pmodule + "/" + entrypath
742 parentpath = pmodule + "/" + entrypath
743 fromkind = self._checkpath(entrypath, prevnum, pmodule)
743 fromkind = self._checkpath(entrypath, prevnum, pmodule)
744
744
745 if fromkind == svn.core.svn_node_file:
745 if fromkind == svn.core.svn_node_file:
746 removed.add(self.recode(entrypath))
746 removed.add(self.recode(entrypath))
747 elif fromkind == svn.core.svn_node_dir:
747 elif fromkind == svn.core.svn_node_dir:
748 oroot = parentpath.strip('/')
748 oroot = parentpath.strip('/')
749 nroot = path.strip('/')
749 nroot = path.strip('/')
750 children = self._iterfiles(oroot, prevnum)
750 children = self._iterfiles(oroot, prevnum)
751 for childpath in children:
751 for childpath in children:
752 childpath = childpath.replace(oroot, nroot)
752 childpath = childpath.replace(oroot, nroot)
753 childpath = self.getrelpath("/" + childpath, pmodule)
753 childpath = self.getrelpath("/" + childpath, pmodule)
754 if childpath:
754 if childpath:
755 removed.add(self.recode(childpath))
755 removed.add(self.recode(childpath))
756 else:
756 else:
757 self.ui.debug('unknown path in revision %d: %s\n' % \
757 self.ui.debug('unknown path in revision %d: %s\n' % \
758 (revnum, path))
758 (revnum, path))
759 elif kind == svn.core.svn_node_dir:
759 elif kind == svn.core.svn_node_dir:
760 if ent.action == 'M':
760 if ent.action == 'M':
761 # If the directory just had a prop change,
761 # If the directory just had a prop change,
762 # then we shouldn't need to look for its children.
762 # then we shouldn't need to look for its children.
763 continue
763 continue
764 if ent.action == 'R' and parents:
764 if ent.action == 'R' and parents:
765 # If a directory is replacing a file, mark the previous
765 # If a directory is replacing a file, mark the previous
766 # file as deleted
766 # file as deleted
767 pmodule, prevnum = revsplit(parents[0])[1:]
767 pmodule, prevnum = revsplit(parents[0])[1:]
768 pkind = self._checkpath(entrypath, prevnum, pmodule)
768 pkind = self._checkpath(entrypath, prevnum, pmodule)
769 if pkind == svn.core.svn_node_file:
769 if pkind == svn.core.svn_node_file:
770 removed.add(self.recode(entrypath))
770 removed.add(self.recode(entrypath))
771 elif pkind == svn.core.svn_node_dir:
771 elif pkind == svn.core.svn_node_dir:
772 # We do not know what files were kept or removed,
772 # We do not know what files were kept or removed,
773 # mark them all as changed.
773 # mark them all as changed.
774 for childpath in self._iterfiles(pmodule, prevnum):
774 for childpath in self._iterfiles(pmodule, prevnum):
775 childpath = self.getrelpath("/" + childpath)
775 childpath = self.getrelpath("/" + childpath)
776 if childpath:
776 if childpath:
777 changed.add(self.recode(childpath))
777 changed.add(self.recode(childpath))
778
778
779 for childpath in self._iterfiles(path, revnum):
779 for childpath in self._iterfiles(path, revnum):
780 childpath = self.getrelpath("/" + childpath)
780 childpath = self.getrelpath("/" + childpath)
781 if childpath:
781 if childpath:
782 changed.add(self.recode(childpath))
782 changed.add(self.recode(childpath))
783
783
784 # Handle directory copies
784 # Handle directory copies
785 if not ent.copyfrom_path or not parents:
785 if not ent.copyfrom_path or not parents:
786 continue
786 continue
787 # Copy sources not in parent revisions cannot be
787 # Copy sources not in parent revisions cannot be
788 # represented, ignore their origin for now
788 # represented, ignore their origin for now
789 pmodule, prevnum = revsplit(parents[0])[1:]
789 pmodule, prevnum = revsplit(parents[0])[1:]
790 if ent.copyfrom_rev < prevnum:
790 if ent.copyfrom_rev < prevnum:
791 continue
791 continue
792 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
792 copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
793 if not copyfrompath:
793 if not copyfrompath:
794 continue
794 continue
795 self.ui.debug("mark %s came from %s:%d\n"
795 self.ui.debug("mark %s came from %s:%d\n"
796 % (path, copyfrompath, ent.copyfrom_rev))
796 % (path, copyfrompath, ent.copyfrom_rev))
797 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
797 children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
798 for childpath in children:
798 for childpath in children:
799 childpath = self.getrelpath("/" + childpath, pmodule)
799 childpath = self.getrelpath("/" + childpath, pmodule)
800 if not childpath:
800 if not childpath:
801 continue
801 continue
802 copytopath = path + childpath[len(copyfrompath):]
802 copytopath = path + childpath[len(copyfrompath):]
803 copytopath = self.getrelpath(copytopath)
803 copytopath = self.getrelpath(copytopath)
804 copies[self.recode(copytopath)] = self.recode(childpath)
804 copies[self.recode(copytopath)] = self.recode(childpath)
805
805
806 self.ui.progress(_('scanning paths'), None)
806 self.ui.progress(_('scanning paths'), None)
807 changed.update(removed)
807 changed.update(removed)
808 return (list(changed), removed, copies)
808 return (list(changed), removed, copies)
809
809
810 def _fetch_revisions(self, from_revnum, to_revnum):
810 def _fetch_revisions(self, from_revnum, to_revnum):
811 if from_revnum < to_revnum:
811 if from_revnum < to_revnum:
812 from_revnum, to_revnum = to_revnum, from_revnum
812 from_revnum, to_revnum = to_revnum, from_revnum
813
813
814 self.child_cset = None
814 self.child_cset = None
815
815
816 def parselogentry(orig_paths, revnum, author, date, message):
816 def parselogentry(orig_paths, revnum, author, date, message):
817 """Return the parsed commit object or None, and True if
817 """Return the parsed commit object or None, and True if
818 the revision is a branch root.
818 the revision is a branch root.
819 """
819 """
820 self.ui.debug("parsing revision %d (%d changes)\n" %
820 self.ui.debug("parsing revision %d (%d changes)\n" %
821 (revnum, len(orig_paths)))
821 (revnum, len(orig_paths)))
822
822
823 branched = False
823 branched = False
824 rev = self.revid(revnum)
824 rev = self.revid(revnum)
825 # branch log might return entries for a parent we already have
825 # branch log might return entries for a parent we already have
826
826
827 if rev in self.commits or revnum < to_revnum:
827 if rev in self.commits or revnum < to_revnum:
828 return None, branched
828 return None, branched
829
829
830 parents = []
830 parents = []
831 # check whether this revision is the start of a branch or part
831 # check whether this revision is the start of a branch or part
832 # of a branch renaming
832 # of a branch renaming
833 orig_paths = sorted(orig_paths.iteritems())
833 orig_paths = sorted(orig_paths.iteritems())
834 root_paths = [(p, e) for p, e in orig_paths
834 root_paths = [(p, e) for p, e in orig_paths
835 if self.module.startswith(p)]
835 if self.module.startswith(p)]
836 if root_paths:
836 if root_paths:
837 path, ent = root_paths[-1]
837 path, ent = root_paths[-1]
838 if ent.copyfrom_path:
838 if ent.copyfrom_path:
839 branched = True
839 branched = True
840 newpath = ent.copyfrom_path + self.module[len(path):]
840 newpath = ent.copyfrom_path + self.module[len(path):]
841 # ent.copyfrom_rev may not be the actual last revision
841 # ent.copyfrom_rev may not be the actual last revision
842 previd = self.latest(newpath, ent.copyfrom_rev)
842 previd = self.latest(newpath, ent.copyfrom_rev)
843 if previd is not None:
843 if previd is not None:
844 prevmodule, prevnum = revsplit(previd)[1:]
844 prevmodule, prevnum = revsplit(previd)[1:]
845 if prevnum >= self.startrev:
845 if prevnum >= self.startrev:
846 parents = [previd]
846 parents = [previd]
847 self.ui.note(
847 self.ui.note(
848 _('found parent of branch %s at %d: %s\n') %
848 _('found parent of branch %s at %d: %s\n') %
849 (self.module, prevnum, prevmodule))
849 (self.module, prevnum, prevmodule))
850 else:
850 else:
851 self.ui.debug("no copyfrom path, don't know what to do.\n")
851 self.ui.debug("no copyfrom path, don't know what to do.\n")
852
852
853 paths = []
853 paths = []
854 # filter out unrelated paths
854 # filter out unrelated paths
855 for path, ent in orig_paths:
855 for path, ent in orig_paths:
856 if self.getrelpath(path) is None:
856 if self.getrelpath(path) is None:
857 continue
857 continue
858 paths.append((path, ent))
858 paths.append((path, ent))
859
859
860 # Example SVN datetime. Includes microseconds.
860 # Example SVN datetime. Includes microseconds.
861 # ISO-8601 conformant
861 # ISO-8601 conformant
862 # '2007-01-04T17:35:00.902377Z'
862 # '2007-01-04T17:35:00.902377Z'
863 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
863 date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
864 if self.ui.configbool('convert', 'localtimezone'):
864 if self.ui.configbool('convert', 'localtimezone'):
865 date = makedatetimestamp(date[0])
865 date = makedatetimestamp(date[0])
866
866
867 log = message and self.recode(message) or ''
867 log = message and self.recode(message) or ''
868 author = author and self.recode(author) or ''
868 author = author and self.recode(author) or ''
869 try:
869 try:
870 branch = self.module.split("/")[-1]
870 branch = self.module.split("/")[-1]
871 if branch == self.trunkname:
871 if branch == self.trunkname:
872 branch = None
872 branch = None
873 except IndexError:
873 except IndexError:
874 branch = None
874 branch = None
875
875
876 cset = commit(author=author,
876 cset = commit(author=author,
877 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
877 date=util.datestr(date, '%Y-%m-%d %H:%M:%S %1%2'),
878 desc=log,
878 desc=log,
879 parents=parents,
879 parents=parents,
880 branch=branch,
880 branch=branch,
881 rev=rev)
881 rev=rev)
882
882
883 self.commits[rev] = cset
883 self.commits[rev] = cset
884 # The parents list is *shared* among self.paths and the
884 # The parents list is *shared* among self.paths and the
885 # commit object. Both will be updated below.
885 # commit object. Both will be updated below.
886 self.paths[rev] = (paths, cset.parents)
886 self.paths[rev] = (paths, cset.parents)
887 if self.child_cset and not self.child_cset.parents:
887 if self.child_cset and not self.child_cset.parents:
888 self.child_cset.parents[:] = [rev]
888 self.child_cset.parents[:] = [rev]
889 self.child_cset = cset
889 self.child_cset = cset
890 return cset, branched
890 return cset, branched
891
891
892 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
892 self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
893 (self.module, from_revnum, to_revnum))
893 (self.module, from_revnum, to_revnum))
894
894
895 try:
895 try:
896 firstcset = None
896 firstcset = None
897 lastonbranch = False
897 lastonbranch = False
898 stream = self._getlog([self.module], from_revnum, to_revnum)
898 stream = self._getlog([self.module], from_revnum, to_revnum)
899 try:
899 try:
900 for entry in stream:
900 for entry in stream:
901 paths, revnum, author, date, message = entry
901 paths, revnum, author, date, message = entry
902 if revnum < self.startrev:
902 if revnum < self.startrev:
903 lastonbranch = True
903 lastonbranch = True
904 break
904 break
905 if not paths:
905 if not paths:
906 self.ui.debug('revision %d has no entries\n' % revnum)
906 self.ui.debug('revision %d has no entries\n' % revnum)
907 # If we ever leave the loop on an empty
907 # If we ever leave the loop on an empty
908 # revision, do not try to get a parent branch
908 # revision, do not try to get a parent branch
909 lastonbranch = lastonbranch or revnum == 0
909 lastonbranch = lastonbranch or revnum == 0
910 continue
910 continue
911 cset, lastonbranch = parselogentry(paths, revnum, author,
911 cset, lastonbranch = parselogentry(paths, revnum, author,
912 date, message)
912 date, message)
913 if cset:
913 if cset:
914 firstcset = cset
914 firstcset = cset
915 if lastonbranch:
915 if lastonbranch:
916 break
916 break
917 finally:
917 finally:
918 stream.close()
918 stream.close()
919
919
920 if not lastonbranch and firstcset and not firstcset.parents:
920 if not lastonbranch and firstcset and not firstcset.parents:
921 # The first revision of the sequence (the last fetched one)
921 # The first revision of the sequence (the last fetched one)
922 # has invalid parents if not a branch root. Find the parent
922 # has invalid parents if not a branch root. Find the parent
923 # revision now, if any.
923 # revision now, if any.
924 try:
924 try:
925 firstrevnum = self.revnum(firstcset.rev)
925 firstrevnum = self.revnum(firstcset.rev)
926 if firstrevnum > 1:
926 if firstrevnum > 1:
927 latest = self.latest(self.module, firstrevnum - 1)
927 latest = self.latest(self.module, firstrevnum - 1)
928 if latest:
928 if latest:
929 firstcset.parents.append(latest)
929 firstcset.parents.append(latest)
930 except SvnPathNotFound:
930 except SvnPathNotFound:
931 pass
931 pass
932 except SubversionException, (inst, num):
932 except SubversionException, (inst, num):
933 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
933 if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
934 raise util.Abort(_('svn: branch has no revision %s')
934 raise util.Abort(_('svn: branch has no revision %s')
935 % to_revnum)
935 % to_revnum)
936 raise
936 raise
937
937
938 def getfile(self, file, rev):
938 def getfile(self, file, rev):
939 # TODO: ra.get_file transmits the whole file instead of diffs.
939 # TODO: ra.get_file transmits the whole file instead of diffs.
940 if file in self.removed:
940 if file in self.removed:
941 return None, None
941 return None, None
942 mode = ''
942 mode = ''
943 try:
943 try:
944 new_module, revnum = revsplit(rev)[1:]
944 new_module, revnum = revsplit(rev)[1:]
945 if self.module != new_module:
945 if self.module != new_module:
946 self.module = new_module
946 self.module = new_module
947 self.reparent(self.module)
947 self.reparent(self.module)
948 io = StringIO()
948 io = StringIO()
949 info = svn.ra.get_file(self.ra, file, revnum, io)
949 info = svn.ra.get_file(self.ra, file, revnum, io)
950 data = io.getvalue()
950 data = io.getvalue()
951 # ra.get_file() seems to keep a reference on the input buffer
951 # ra.get_file() seems to keep a reference on the input buffer
952 # preventing collection. Release it explicitly.
952 # preventing collection. Release it explicitly.
953 io.close()
953 io.close()
954 if isinstance(info, list):
954 if isinstance(info, list):
955 info = info[-1]
955 info = info[-1]
956 mode = ("svn:executable" in info) and 'x' or ''
956 mode = ("svn:executable" in info) and 'x' or ''
957 mode = ("svn:special" in info) and 'l' or mode
957 mode = ("svn:special" in info) and 'l' or mode
958 except SubversionException, e:
958 except SubversionException, e:
959 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
959 notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
960 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
960 svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
961 if e.apr_err in notfound: # File not found
961 if e.apr_err in notfound: # File not found
962 return None, None
962 return None, None
963 raise
963 raise
964 if mode == 'l':
964 if mode == 'l':
965 link_prefix = "link "
965 link_prefix = "link "
966 if data.startswith(link_prefix):
966 if data.startswith(link_prefix):
967 data = data[len(link_prefix):]
967 data = data[len(link_prefix):]
968 return data, mode
968 return data, mode
969
969
970 def _iterfiles(self, path, revnum):
970 def _iterfiles(self, path, revnum):
971 """Enumerate all files in path at revnum, recursively."""
971 """Enumerate all files in path at revnum, recursively."""
972 path = path.strip('/')
972 path = path.strip('/')
973 pool = Pool()
973 pool = Pool()
974 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
974 rpath = '/'.join([self.baseurl, quote(path)]).strip('/')
975 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
975 entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
976 if path:
976 if path:
977 path += '/'
977 path += '/'
978 return ((path + p) for p, e in entries.iteritems()
978 return ((path + p) for p, e in entries.iteritems()
979 if e.kind == svn.core.svn_node_file)
979 if e.kind == svn.core.svn_node_file)
980
980
981 def getrelpath(self, path, module=None):
981 def getrelpath(self, path, module=None):
982 if module is None:
982 if module is None:
983 module = self.module
983 module = self.module
984 # Given the repository url of this wc, say
984 # Given the repository url of this wc, say
985 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
985 # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
986 # extract the "entry" portion (a relative path) from what
986 # extract the "entry" portion (a relative path) from what
987 # svn log --xml says, i.e.
987 # svn log --xml says, i.e.
988 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
988 # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
989 # that is to say "tests/PloneTestCase.py"
989 # that is to say "tests/PloneTestCase.py"
990 if path.startswith(module):
990 if path.startswith(module):
991 relative = path.rstrip('/')[len(module):]
991 relative = path.rstrip('/')[len(module):]
992 if relative.startswith('/'):
992 if relative.startswith('/'):
993 return relative[1:]
993 return relative[1:]
994 elif relative == '':
994 elif relative == '':
995 return relative
995 return relative
996
996
997 # The path is outside our tracked tree...
997 # The path is outside our tracked tree...
998 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
998 self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
999 return None
999 return None
1000
1000
1001 def _checkpath(self, path, revnum, module=None):
1001 def _checkpath(self, path, revnum, module=None):
1002 if module is not None:
1002 if module is not None:
1003 prevmodule = self.reparent('')
1003 prevmodule = self.reparent('')
1004 path = module + '/' + path
1004 path = module + '/' + path
1005 try:
1005 try:
1006 # ra.check_path does not like leading slashes very much, it leads
1006 # ra.check_path does not like leading slashes very much, it leads
1007 # to PROPFIND subversion errors
1007 # to PROPFIND subversion errors
1008 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1008 return svn.ra.check_path(self.ra, path.strip('/'), revnum)
1009 finally:
1009 finally:
1010 if module is not None:
1010 if module is not None:
1011 self.reparent(prevmodule)
1011 self.reparent(prevmodule)
1012
1012
1013 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1013 def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
1014 strict_node_history=False):
1014 strict_node_history=False):
1015 # Normalize path names, svn >= 1.5 only wants paths relative to
1015 # Normalize path names, svn >= 1.5 only wants paths relative to
1016 # supplied URL
1016 # supplied URL
1017 relpaths = []
1017 relpaths = []
1018 for p in paths:
1018 for p in paths:
1019 if not p.startswith('/'):
1019 if not p.startswith('/'):
1020 p = self.module + '/' + p
1020 p = self.module + '/' + p
1021 relpaths.append(p.strip('/'))
1021 relpaths.append(p.strip('/'))
1022 args = [self.baseurl, relpaths, start, end, limit,
1022 args = [self.baseurl, relpaths, start, end, limit,
1023 discover_changed_paths, strict_node_history]
1023 discover_changed_paths, strict_node_history]
1024 # undocumented feature: debugsvnlog can be disabled
1024 # undocumented feature: debugsvnlog can be disabled
1025 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1025 if not self.ui.configbool('convert', 'svn.debugsvnlog', True):
1026 return directlogstream(*args)
1026 return directlogstream(*args)
1027 arg = encodeargs(args)
1027 arg = encodeargs(args)
1028 hgexe = util.hgexecutable()
1028 hgexe = util.hgexecutable()
1029 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1029 cmd = '%s debugsvnlog' % util.shellquote(hgexe)
1030 stdin, stdout = util.popen2(util.quotecommand(cmd))
1030 stdin, stdout = util.popen2(util.quotecommand(cmd))
1031 stdin.write(arg)
1031 stdin.write(arg)
1032 try:
1032 try:
1033 stdin.close()
1033 stdin.close()
1034 except IOError:
1034 except IOError:
1035 raise util.Abort(_('Mercurial failed to run itself, check'
1035 raise util.Abort(_('Mercurial failed to run itself, check'
1036 ' hg executable is in PATH'))
1036 ' hg executable is in PATH'))
1037 return logstream(stdout)
1037 return logstream(stdout)
1038
1038
1039 pre_revprop_change = '''#!/bin/sh
1039 pre_revprop_change = '''#!/bin/sh
1040
1040
1041 REPOS="$1"
1041 REPOS="$1"
1042 REV="$2"
1042 REV="$2"
1043 USER="$3"
1043 USER="$3"
1044 PROPNAME="$4"
1044 PROPNAME="$4"
1045 ACTION="$5"
1045 ACTION="$5"
1046
1046
1047 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1047 if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
1048 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1048 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
1049 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1049 if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
1050
1050
1051 echo "Changing prohibited revision property" >&2
1051 echo "Changing prohibited revision property" >&2
1052 exit 1
1052 exit 1
1053 '''
1053 '''
1054
1054
1055 class svn_sink(converter_sink, commandline):
1055 class svn_sink(converter_sink, commandline):
1056 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1056 commit_re = re.compile(r'Committed revision (\d+).', re.M)
1057 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1057 uuid_re = re.compile(r'Repository UUID:\s*(\S+)', re.M)
1058
1058
1059 def prerun(self):
1059 def prerun(self):
1060 if self.wc:
1060 if self.wc:
1061 os.chdir(self.wc)
1061 os.chdir(self.wc)
1062
1062
1063 def postrun(self):
1063 def postrun(self):
1064 if self.wc:
1064 if self.wc:
1065 os.chdir(self.cwd)
1065 os.chdir(self.cwd)
1066
1066
1067 def join(self, name):
1067 def join(self, name):
1068 return os.path.join(self.wc, '.svn', name)
1068 return os.path.join(self.wc, '.svn', name)
1069
1069
1070 def revmapfile(self):
1070 def revmapfile(self):
1071 return self.join('hg-shamap')
1071 return self.join('hg-shamap')
1072
1072
1073 def authorfile(self):
1073 def authorfile(self):
1074 return self.join('hg-authormap')
1074 return self.join('hg-authormap')
1075
1075
1076 def __init__(self, ui, path):
1076 def __init__(self, ui, path):
1077
1077
1078 converter_sink.__init__(self, ui, path)
1078 converter_sink.__init__(self, ui, path)
1079 commandline.__init__(self, ui, 'svn')
1079 commandline.__init__(self, ui, 'svn')
1080 self.delete = []
1080 self.delete = []
1081 self.setexec = []
1081 self.setexec = []
1082 self.delexec = []
1082 self.delexec = []
1083 self.copies = []
1083 self.copies = []
1084 self.wc = None
1084 self.wc = None
1085 self.cwd = os.getcwd()
1085 self.cwd = os.getcwd()
1086
1086
1087 created = False
1087 created = False
1088 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1088 if os.path.isfile(os.path.join(path, '.svn', 'entries')):
1089 self.wc = os.path.realpath(path)
1089 self.wc = os.path.realpath(path)
1090 self.run0('update')
1090 self.run0('update')
1091 else:
1091 else:
1092 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1092 if not re.search(r'^(file|http|https|svn|svn\+ssh)\://', path):
1093 path = os.path.realpath(path)
1093 path = os.path.realpath(path)
1094 if os.path.isdir(os.path.dirname(path)):
1094 if os.path.isdir(os.path.dirname(path)):
1095 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1095 if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
1096 ui.status(_('initializing svn repository %r\n') %
1096 ui.status(_('initializing svn repository %r\n') %
1097 os.path.basename(path))
1097 os.path.basename(path))
1098 commandline(ui, 'svnadmin').run0('create', path)
1098 commandline(ui, 'svnadmin').run0('create', path)
1099 created = path
1099 created = path
1100 path = util.normpath(path)
1100 path = util.normpath(path)
1101 if not path.startswith('/'):
1101 if not path.startswith('/'):
1102 path = '/' + path
1102 path = '/' + path
1103 path = 'file://' + path
1103 path = 'file://' + path
1104
1104
1105 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1105 wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
1106 ui.status(_('initializing svn working copy %r\n')
1106 ui.status(_('initializing svn working copy %r\n')
1107 % os.path.basename(wcpath))
1107 % os.path.basename(wcpath))
1108 self.run0('checkout', path, wcpath)
1108 self.run0('checkout', path, wcpath)
1109
1109
1110 self.wc = wcpath
1110 self.wc = wcpath
1111 self.opener = scmutil.opener(self.wc)
1111 self.opener = scmutil.opener(self.wc)
1112 self.wopener = scmutil.opener(self.wc)
1112 self.wopener = scmutil.opener(self.wc)
1113 self.childmap = mapfile(ui, self.join('hg-childmap'))
1113 self.childmap = mapfile(ui, self.join('hg-childmap'))
1114 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1114 self.is_exec = util.checkexec(self.wc) and util.isexec or None
1115
1115
1116 if created:
1116 if created:
1117 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1117 hook = os.path.join(created, 'hooks', 'pre-revprop-change')
1118 fp = open(hook, 'w')
1118 fp = open(hook, 'w')
1119 fp.write(pre_revprop_change)
1119 fp.write(pre_revprop_change)
1120 fp.close()
1120 fp.close()
1121 util.setflags(hook, False, True)
1121 util.setflags(hook, False, True)
1122
1122
1123 output = self.run0('info')
1123 output = self.run0('info')
1124 self.uuid = self.uuid_re.search(output).group(1).strip()
1124 self.uuid = self.uuid_re.search(output).group(1).strip()
1125
1125
1126 def wjoin(self, *names):
1126 def wjoin(self, *names):
1127 return os.path.join(self.wc, *names)
1127 return os.path.join(self.wc, *names)
1128
1128
1129 @propertycache
1129 @propertycache
1130 def manifest(self):
1130 def manifest(self):
1131 # As of svn 1.7, the "add" command fails when receiving
1131 # As of svn 1.7, the "add" command fails when receiving
1132 # already tracked entries, so we have to track and filter them
1132 # already tracked entries, so we have to track and filter them
1133 # ourselves.
1133 # ourselves.
1134 m = set()
1134 m = set()
1135 output = self.run0('ls', recursive=True, xml=True)
1135 output = self.run0('ls', recursive=True, xml=True)
1136 doc = xml.dom.minidom.parseString(output)
1136 doc = xml.dom.minidom.parseString(output)
1137 for e in doc.getElementsByTagName('entry'):
1137 for e in doc.getElementsByTagName('entry'):
1138 for n in e.childNodes:
1138 for n in e.childNodes:
1139 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1139 if n.nodeType != n.ELEMENT_NODE or n.tagName != 'name':
1140 continue
1140 continue
1141 name = ''.join(c.data for c in n.childNodes
1141 name = ''.join(c.data for c in n.childNodes
1142 if c.nodeType == c.TEXT_NODE)
1142 if c.nodeType == c.TEXT_NODE)
1143 # Entries are compared with names coming from
1143 # Entries are compared with names coming from
1144 # mercurial, so bytes with undefined encoding. Our
1144 # mercurial, so bytes with undefined encoding. Our
1145 # best bet is to assume they are in local
1145 # best bet is to assume they are in local
1146 # encoding. They will be passed to command line calls
1146 # encoding. They will be passed to command line calls
1147 # later anyway, so they better be.
1147 # later anyway, so they better be.
1148 m.add(encoding.tolocal(name.encode('utf-8')))
1148 m.add(encoding.tolocal(name.encode('utf-8')))
1149 break
1149 break
1150 return m
1150 return m
1151
1151
1152 def putfile(self, filename, flags, data):
1152 def putfile(self, filename, flags, data):
1153 if 'l' in flags:
1153 if 'l' in flags:
1154 self.wopener.symlink(data, filename)
1154 self.wopener.symlink(data, filename)
1155 else:
1155 else:
1156 try:
1156 try:
1157 if os.path.islink(self.wjoin(filename)):
1157 if os.path.islink(self.wjoin(filename)):
1158 os.unlink(filename)
1158 os.unlink(filename)
1159 except OSError:
1159 except OSError:
1160 pass
1160 pass
1161 self.wopener.write(filename, data)
1161 self.wopener.write(filename, data)
1162
1162
1163 if self.is_exec:
1163 if self.is_exec:
1164 if self.is_exec(self.wjoin(filename)):
1164 if self.is_exec(self.wjoin(filename)):
1165 if 'x' not in flags:
1165 if 'x' not in flags:
1166 self.delexec.append(filename)
1166 self.delexec.append(filename)
1167 else:
1167 else:
1168 if 'x' in flags:
1168 if 'x' in flags:
1169 self.setexec.append(filename)
1169 self.setexec.append(filename)
1170 util.setflags(self.wjoin(filename), False, 'x' in flags)
1170 util.setflags(self.wjoin(filename), False, 'x' in flags)
1171
1171
1172 def _copyfile(self, source, dest):
1172 def _copyfile(self, source, dest):
1173 # SVN's copy command pukes if the destination file exists, but
1173 # SVN's copy command pukes if the destination file exists, but
1174 # our copyfile method expects to record a copy that has
1174 # our copyfile method expects to record a copy that has
1175 # already occurred. Cross the semantic gap.
1175 # already occurred. Cross the semantic gap.
1176 wdest = self.wjoin(dest)
1176 wdest = self.wjoin(dest)
1177 exists = os.path.lexists(wdest)
1177 exists = os.path.lexists(wdest)
1178 if exists:
1178 if exists:
1179 fd, tempname = tempfile.mkstemp(
1179 fd, tempname = tempfile.mkstemp(
1180 prefix='hg-copy-', dir=os.path.dirname(wdest))
1180 prefix='hg-copy-', dir=os.path.dirname(wdest))
1181 os.close(fd)
1181 os.close(fd)
1182 os.unlink(tempname)
1182 os.unlink(tempname)
1183 os.rename(wdest, tempname)
1183 os.rename(wdest, tempname)
1184 try:
1184 try:
1185 self.run0('copy', source, dest)
1185 self.run0('copy', source, dest)
1186 finally:
1186 finally:
1187 self.manifest.add(dest)
1187 self.manifest.add(dest)
1188 if exists:
1188 if exists:
1189 try:
1189 try:
1190 os.unlink(wdest)
1190 os.unlink(wdest)
1191 except OSError:
1191 except OSError:
1192 pass
1192 pass
1193 os.rename(tempname, wdest)
1193 os.rename(tempname, wdest)
1194
1194
1195 def dirs_of(self, files):
1195 def dirs_of(self, files):
1196 dirs = set()
1196 dirs = set()
1197 for f in files:
1197 for f in files:
1198 if os.path.isdir(self.wjoin(f)):
1198 if os.path.isdir(self.wjoin(f)):
1199 dirs.add(f)
1199 dirs.add(f)
1200 for i in strutil.rfindall(f, '/'):
1200 for i in strutil.rfindall(f, '/'):
1201 dirs.add(f[:i])
1201 dirs.add(f[:i])
1202 return dirs
1202 return dirs
1203
1203
1204 def add_dirs(self, files):
1204 def add_dirs(self, files):
1205 add_dirs = [d for d in sorted(self.dirs_of(files))
1205 add_dirs = [d for d in sorted(self.dirs_of(files))
1206 if d not in self.manifest]
1206 if d not in self.manifest]
1207 if add_dirs:
1207 if add_dirs:
1208 self.manifest.update(add_dirs)
1208 self.manifest.update(add_dirs)
1209 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1209 self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
1210 return add_dirs
1210 return add_dirs
1211
1211
1212 def add_files(self, files):
1212 def add_files(self, files):
1213 files = [f for f in files if f not in self.manifest]
1213 files = [f for f in files if f not in self.manifest]
1214 if files:
1214 if files:
1215 self.manifest.update(files)
1215 self.manifest.update(files)
1216 self.xargs(files, 'add', quiet=True)
1216 self.xargs(files, 'add', quiet=True)
1217 return files
1217 return files
1218
1218
1219 def addchild(self, parent, child):
1219 def addchild(self, parent, child):
1220 self.childmap[parent] = child
1220 self.childmap[parent] = child
1221
1221
1222 def revid(self, rev):
1222 def revid(self, rev):
1223 return u"svn:%s@%s" % (self.uuid, rev)
1223 return u"svn:%s@%s" % (self.uuid, rev)
1224
1224
1225 def putcommit(self, files, copies, parents, commit, source, revmap):
1225 def putcommit(self, files, copies, parents, commit, source, revmap, full):
1226 for parent in parents:
1226 for parent in parents:
1227 try:
1227 try:
1228 return self.revid(self.childmap[parent])
1228 return self.revid(self.childmap[parent])
1229 except KeyError:
1229 except KeyError:
1230 pass
1230 pass
1231
1231
1232 # Apply changes to working copy
1232 # Apply changes to working copy
1233 for f, v in files:
1233 for f, v in files:
1234 data, mode = source.getfile(f, v)
1234 data, mode = source.getfile(f, v)
1235 if data is None:
1235 if data is None:
1236 self.delete.append(f)
1236 self.delete.append(f)
1237 else:
1237 else:
1238 self.putfile(f, mode, data)
1238 self.putfile(f, mode, data)
1239 if f in copies:
1239 if f in copies:
1240 self.copies.append([copies[f], f])
1240 self.copies.append([copies[f], f])
1241 if full:
1242 self.delete.extend(sorted(self.manifest.difference(files)))
1241 files = [f[0] for f in files]
1243 files = [f[0] for f in files]
1242
1244
1243 entries = set(self.delete)
1245 entries = set(self.delete)
1244 files = frozenset(files)
1246 files = frozenset(files)
1245 entries.update(self.add_dirs(files.difference(entries)))
1247 entries.update(self.add_dirs(files.difference(entries)))
1246 if self.copies:
1248 if self.copies:
1247 for s, d in self.copies:
1249 for s, d in self.copies:
1248 self._copyfile(s, d)
1250 self._copyfile(s, d)
1249 self.copies = []
1251 self.copies = []
1250 if self.delete:
1252 if self.delete:
1251 self.xargs(self.delete, 'delete')
1253 self.xargs(self.delete, 'delete')
1252 for f in self.delete:
1254 for f in self.delete:
1253 self.manifest.remove(f)
1255 self.manifest.remove(f)
1254 self.delete = []
1256 self.delete = []
1255 entries.update(self.add_files(files.difference(entries)))
1257 entries.update(self.add_files(files.difference(entries)))
1256 if self.delexec:
1258 if self.delexec:
1257 self.xargs(self.delexec, 'propdel', 'svn:executable')
1259 self.xargs(self.delexec, 'propdel', 'svn:executable')
1258 self.delexec = []
1260 self.delexec = []
1259 if self.setexec:
1261 if self.setexec:
1260 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1262 self.xargs(self.setexec, 'propset', 'svn:executable', '*')
1261 self.setexec = []
1263 self.setexec = []
1262
1264
1263 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1265 fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
1264 fp = os.fdopen(fd, 'w')
1266 fp = os.fdopen(fd, 'w')
1265 fp.write(commit.desc)
1267 fp.write(commit.desc)
1266 fp.close()
1268 fp.close()
1267 try:
1269 try:
1268 output = self.run0('commit',
1270 output = self.run0('commit',
1269 username=util.shortuser(commit.author),
1271 username=util.shortuser(commit.author),
1270 file=messagefile,
1272 file=messagefile,
1271 encoding='utf-8')
1273 encoding='utf-8')
1272 try:
1274 try:
1273 rev = self.commit_re.search(output).group(1)
1275 rev = self.commit_re.search(output).group(1)
1274 except AttributeError:
1276 except AttributeError:
1275 if not files:
1277 if not files:
1276 return parents[0]
1278 return parents[0]
1277 self.ui.warn(_('unexpected svn output:\n'))
1279 self.ui.warn(_('unexpected svn output:\n'))
1278 self.ui.warn(output)
1280 self.ui.warn(output)
1279 raise util.Abort(_('unable to cope with svn output'))
1281 raise util.Abort(_('unable to cope with svn output'))
1280 if commit.rev:
1282 if commit.rev:
1281 self.run('propset', 'hg:convert-rev', commit.rev,
1283 self.run('propset', 'hg:convert-rev', commit.rev,
1282 revprop=True, revision=rev)
1284 revprop=True, revision=rev)
1283 if commit.branch and commit.branch != 'default':
1285 if commit.branch and commit.branch != 'default':
1284 self.run('propset', 'hg:convert-branch', commit.branch,
1286 self.run('propset', 'hg:convert-branch', commit.branch,
1285 revprop=True, revision=rev)
1287 revprop=True, revision=rev)
1286 for parent in parents:
1288 for parent in parents:
1287 self.addchild(parent, rev)
1289 self.addchild(parent, rev)
1288 return self.revid(rev)
1290 return self.revid(rev)
1289 finally:
1291 finally:
1290 os.unlink(messagefile)
1292 os.unlink(messagefile)
1291
1293
1292 def puttags(self, tags):
1294 def puttags(self, tags):
1293 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1295 self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
1294 return None, None
1296 return None, None
1295
1297
1296 def hascommitfrommap(self, rev):
1298 def hascommitfrommap(self, rev):
1297 # We trust that revisions referenced in a map still is present
1299 # We trust that revisions referenced in a map still is present
1298 # TODO: implement something better if necessary and feasible
1300 # TODO: implement something better if necessary and feasible
1299 return True
1301 return True
1300
1302
1301 def hascommitforsplicemap(self, rev):
1303 def hascommitforsplicemap(self, rev):
1302 # This is not correct as one can convert to an existing subversion
1304 # This is not correct as one can convert to an existing subversion
1303 # repository and childmap would not list all revisions. Too bad.
1305 # repository and childmap would not list all revisions. Too bad.
1304 if rev in self.childmap:
1306 if rev in self.childmap:
1305 return True
1307 return True
1306 raise util.Abort(_('splice map revision %s not found in subversion '
1308 raise util.Abort(_('splice map revision %s not found in subversion '
1307 'child map (revision lookups are not implemented)')
1309 'child map (revision lookups are not implemented)')
1308 % rev)
1310 % rev)
@@ -1,539 +1,552 b''
1
1
2 $ cat >> $HGRCPATH <<EOF
2 $ cat >> $HGRCPATH <<EOF
3 > [extensions]
3 > [extensions]
4 > convert=
4 > convert=
5 > [convert]
5 > [convert]
6 > hg.saverev=False
6 > hg.saverev=False
7 > EOF
7 > EOF
8 $ hg init orig
8 $ hg init orig
9 $ cd orig
9 $ cd orig
10 $ echo foo > foo
10 $ echo foo > foo
11 $ echo bar > bar
11 $ echo bar > bar
12 $ hg ci -qAm 'add foo and bar'
12 $ hg ci -qAm 'add foo and bar'
13 $ hg rm foo
13 $ hg rm foo
14 $ hg ci -m 'remove foo'
14 $ hg ci -m 'remove foo'
15 $ mkdir foo
15 $ mkdir foo
16 $ echo file > foo/file
16 $ echo file > foo/file
17 $ hg ci -qAm 'add foo/file'
17 $ hg ci -qAm 'add foo/file'
18 $ hg tag some-tag
18 $ hg tag some-tag
19 $ hg tag -l local-tag
19 $ hg tag -l local-tag
20 $ hg log
20 $ hg log
21 changeset: 3:593cbf6fb2b4
21 changeset: 3:593cbf6fb2b4
22 tag: local-tag
22 tag: local-tag
23 tag: tip
23 tag: tip
24 user: test
24 user: test
25 date: Thu Jan 01 00:00:00 1970 +0000
25 date: Thu Jan 01 00:00:00 1970 +0000
26 summary: Added tag some-tag for changeset ad681a868e44
26 summary: Added tag some-tag for changeset ad681a868e44
27
27
28 changeset: 2:ad681a868e44
28 changeset: 2:ad681a868e44
29 tag: some-tag
29 tag: some-tag
30 user: test
30 user: test
31 date: Thu Jan 01 00:00:00 1970 +0000
31 date: Thu Jan 01 00:00:00 1970 +0000
32 summary: add foo/file
32 summary: add foo/file
33
33
34 changeset: 1:cbba8ecc03b7
34 changeset: 1:cbba8ecc03b7
35 user: test
35 user: test
36 date: Thu Jan 01 00:00:00 1970 +0000
36 date: Thu Jan 01 00:00:00 1970 +0000
37 summary: remove foo
37 summary: remove foo
38
38
39 changeset: 0:327daa9251fa
39 changeset: 0:327daa9251fa
40 user: test
40 user: test
41 date: Thu Jan 01 00:00:00 1970 +0000
41 date: Thu Jan 01 00:00:00 1970 +0000
42 summary: add foo and bar
42 summary: add foo and bar
43
43
44 $ cd ..
44 $ cd ..
45 $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
45 $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
46 initializing destination new repository
46 initializing destination new repository
47 scanning source...
47 scanning source...
48 sorting...
48 sorting...
49 converting...
49 converting...
50 3 add foo and bar
50 3 add foo and bar
51 2 remove foo
51 2 remove foo
52 1 add foo/file
52 1 add foo/file
53 0 Added tag some-tag for changeset ad681a868e44
53 0 Added tag some-tag for changeset ad681a868e44
54 $ cd new
54 $ cd new
55 $ hg out ../orig
55 $ hg out ../orig
56 comparing with ../orig
56 comparing with ../orig
57 searching for changes
57 searching for changes
58 no changes found
58 no changes found
59 [1]
59 [1]
60
60
61 dirstate should be empty:
61 dirstate should be empty:
62
62
63 $ hg debugstate
63 $ hg debugstate
64 $ hg parents -q
64 $ hg parents -q
65 $ hg up -C
65 $ hg up -C
66 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
66 3 files updated, 0 files merged, 0 files removed, 0 files unresolved
67 $ hg copy bar baz
67 $ hg copy bar baz
68
68
69 put something in the dirstate:
69 put something in the dirstate:
70
70
71 $ hg debugstate > debugstate
71 $ hg debugstate > debugstate
72 $ grep baz debugstate
72 $ grep baz debugstate
73 a 0 -1 unset baz
73 a 0 -1 unset baz
74 copy: bar -> baz
74 copy: bar -> baz
75
75
76 add a new revision in the original repo
76 add a new revision in the original repo
77
77
78 $ cd ../orig
78 $ cd ../orig
79 $ echo baz > baz
79 $ echo baz > baz
80 $ hg ci -qAm 'add baz'
80 $ hg ci -qAm 'add baz'
81 $ cd ..
81 $ cd ..
82 $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
82 $ hg convert orig new 2>&1 | grep -v 'subversion python bindings could not be loaded'
83 scanning source...
83 scanning source...
84 sorting...
84 sorting...
85 converting...
85 converting...
86 0 add baz
86 0 add baz
87 $ cd new
87 $ cd new
88 $ hg out ../orig
88 $ hg out ../orig
89 comparing with ../orig
89 comparing with ../orig
90 searching for changes
90 searching for changes
91 no changes found
91 no changes found
92 [1]
92 [1]
93
93
94 dirstate should be the same (no output below):
94 dirstate should be the same (no output below):
95
95
96 $ hg debugstate > new-debugstate
96 $ hg debugstate > new-debugstate
97 $ diff debugstate new-debugstate
97 $ diff debugstate new-debugstate
98
98
99 no copies
99 no copies
100
100
101 $ hg up -C
101 $ hg up -C
102 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
102 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
103 $ hg debugrename baz
103 $ hg debugrename baz
104 baz not renamed
104 baz not renamed
105 $ cd ..
105 $ cd ..
106
106
107 test tag rewriting
107 test tag rewriting
108
108
109 $ cat > filemap <<EOF
109 $ cat > filemap <<EOF
110 > exclude foo
110 > exclude foo
111 > EOF
111 > EOF
112 $ hg convert --filemap filemap orig new-filemap 2>&1 | grep -v 'subversion python bindings could not be loaded'
112 $ hg convert --filemap filemap orig new-filemap 2>&1 | grep -v 'subversion python bindings could not be loaded'
113 initializing destination new-filemap repository
113 initializing destination new-filemap repository
114 scanning source...
114 scanning source...
115 sorting...
115 sorting...
116 converting...
116 converting...
117 4 add foo and bar
117 4 add foo and bar
118 3 remove foo
118 3 remove foo
119 2 add foo/file
119 2 add foo/file
120 1 Added tag some-tag for changeset ad681a868e44
120 1 Added tag some-tag for changeset ad681a868e44
121 0 add baz
121 0 add baz
122 $ cd new-filemap
122 $ cd new-filemap
123 $ hg tags
123 $ hg tags
124 tip 2:3c74706b1ff8
124 tip 2:3c74706b1ff8
125 some-tag 0:ba8636729451
125 some-tag 0:ba8636729451
126 $ cd ..
126 $ cd ..
127
127
128
128
129 Test cases for hg-hg roundtrip
129 Test cases for hg-hg roundtrip
130
130
131 Helper
131 Helper
132
132
133 $ glog()
133 $ glog()
134 > {
134 > {
135 > hg log -G --template '{rev} {node|short} "{desc}" files: {files}\n' $*
135 > hg log -G --template '{rev} {node|short} "{desc}" files: {files}\n' $*
136 > }
136 > }
137
137
138 Create a tricky source repo
138 Create a tricky source repo
139
139
140 $ hg init source
140 $ hg init source
141 $ cd source
141 $ cd source
142
142
143 $ echo 0 > 0
143 $ echo 0 > 0
144 $ hg ci -Aqm '0: add 0'
144 $ hg ci -Aqm '0: add 0'
145 $ echo a > a
145 $ echo a > a
146 $ mkdir dir
146 $ mkdir dir
147 $ echo b > dir/b
147 $ echo b > dir/b
148 $ hg ci -qAm '1: add a and dir/b'
148 $ hg ci -qAm '1: add a and dir/b'
149 $ echo c > dir/c
149 $ echo c > dir/c
150 $ hg ci -qAm '2: add dir/c'
150 $ hg ci -qAm '2: add dir/c'
151 $ hg copy a e
151 $ hg copy a e
152 $ echo b >> b
152 $ echo b >> b
153 $ hg ci -qAm '3: copy a to e, change b'
153 $ hg ci -qAm '3: copy a to e, change b'
154 $ hg up -qr -3
154 $ hg up -qr -3
155 $ echo a >> a
155 $ echo a >> a
156 $ hg ci -qAm '4: change a'
156 $ hg ci -qAm '4: change a'
157 $ hg merge
157 $ hg merge
158 merging a and e to e
158 merging a and e to e
159 2 files updated, 1 files merged, 0 files removed, 0 files unresolved
159 2 files updated, 1 files merged, 0 files removed, 0 files unresolved
160 (branch merge, don't forget to commit)
160 (branch merge, don't forget to commit)
161 $ hg copy b dir/d
161 $ hg copy b dir/d
162 $ hg ci -qAm '5: merge 2 and 3, copy b to dir/d'
162 $ hg ci -qAm '5: merge 2 and 3, copy b to dir/d'
163 $ echo a >> a
163 $ echo a >> a
164 $ hg ci -qAm '6: change a'
164 $ hg ci -qAm '6: change a'
165
165
166 $ hg mani
166 $ hg mani
167 0
167 0
168 a
168 a
169 b
169 b
170 dir/b
170 dir/b
171 dir/c
171 dir/c
172 dir/d
172 dir/d
173 e
173 e
174 $ glog
174 $ glog
175 @ 6 0613c8e59a3d "6: change a" files: a
175 @ 6 0613c8e59a3d "6: change a" files: a
176 |
176 |
177 o 5 717e9b37cdb7 "5: merge 2 and 3, copy b to dir/d" files: dir/d e
177 o 5 717e9b37cdb7 "5: merge 2 and 3, copy b to dir/d" files: dir/d e
178 |\
178 |\
179 | o 4 86a55cb968d5 "4: change a" files: a
179 | o 4 86a55cb968d5 "4: change a" files: a
180 | |
180 | |
181 o | 3 0e6e235919dd "3: copy a to e, change b" files: b e
181 o | 3 0e6e235919dd "3: copy a to e, change b" files: b e
182 | |
182 | |
183 o | 2 0394b0d5e4f7 "2: add dir/c" files: dir/c
183 o | 2 0394b0d5e4f7 "2: add dir/c" files: dir/c
184 |/
184 |/
185 o 1 333546584845 "1: add a and dir/b" files: a dir/b
185 o 1 333546584845 "1: add a and dir/b" files: a dir/b
186 |
186 |
187 o 0 d1a24e2ebd23 "0: add 0" files: 0
187 o 0 d1a24e2ebd23 "0: add 0" files: 0
188
188
189 $ cd ..
189 $ cd ..
190
190
191 Convert excluding rev 0 and dir/ (and thus rev2):
191 Convert excluding rev 0 and dir/ (and thus rev2):
192
192
193 $ cat << EOF > filemap
193 $ cat << EOF > filemap
194 > exclude dir
194 > exclude dir
195 > EOF
195 > EOF
196
196
197 $ hg convert --filemap filemap source dest --config convert.hg.revs=1::
197 $ hg convert --filemap filemap source dest --config convert.hg.revs=1::
198 initializing destination dest repository
198 initializing destination dest repository
199 scanning source...
199 scanning source...
200 sorting...
200 sorting...
201 converting...
201 converting...
202 5 1: add a and dir/b
202 5 1: add a and dir/b
203 4 2: add dir/c
203 4 2: add dir/c
204 3 3: copy a to e, change b
204 3 3: copy a to e, change b
205 2 4: change a
205 2 4: change a
206 1 5: merge 2 and 3, copy b to dir/d
206 1 5: merge 2 and 3, copy b to dir/d
207 0 6: change a
207 0 6: change a
208
208
209 Verify that conversion skipped rev 2:
209 Verify that conversion skipped rev 2:
210
210
211 $ glog -R dest
211 $ glog -R dest
212 o 4 78814e84a217 "6: change a" files: a
212 o 4 78814e84a217 "6: change a" files: a
213 |
213 |
214 o 3 f7cff662c5e5 "5: merge 2 and 3, copy b to dir/d" files: e
214 o 3 f7cff662c5e5 "5: merge 2 and 3, copy b to dir/d" files: e
215 |\
215 |\
216 | o 2 ab40a95b0072 "4: change a" files: a
216 | o 2 ab40a95b0072 "4: change a" files: a
217 | |
217 | |
218 o | 1 bd51f17597bf "3: copy a to e, change b" files: b e
218 o | 1 bd51f17597bf "3: copy a to e, change b" files: b e
219 |/
219 |/
220 o 0 a4a1dae0fe35 "1: add a and dir/b" files: 0 a
220 o 0 a4a1dae0fe35 "1: add a and dir/b" files: 0 a
221
221
222
222
223 Verify mapping correct in both directions:
223 Verify mapping correct in both directions:
224
224
225 $ cat source/.hg/shamap
225 $ cat source/.hg/shamap
226 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 333546584845f70c4cfecb992341aaef0e708166
226 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5 333546584845f70c4cfecb992341aaef0e708166
227 bd51f17597bf32268e68a560b206898c3960cda2 0e6e235919dd8e9285ba8eb5adf703af9ad99378
227 bd51f17597bf32268e68a560b206898c3960cda2 0e6e235919dd8e9285ba8eb5adf703af9ad99378
228 ab40a95b00725307e79c2fd271000aa8af9759f4 86a55cb968d51770cba2a1630d6cc637b574580a
228 ab40a95b00725307e79c2fd271000aa8af9759f4 86a55cb968d51770cba2a1630d6cc637b574580a
229 f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d
229 f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d
230 78814e84a217894517c2de392b903ed05e6871a4 0613c8e59a3ddb9789072ef52f1ed13496489bb4
230 78814e84a217894517c2de392b903ed05e6871a4 0613c8e59a3ddb9789072ef52f1ed13496489bb4
231 $ cat dest/.hg/shamap
231 $ cat dest/.hg/shamap
232 333546584845f70c4cfecb992341aaef0e708166 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
232 333546584845f70c4cfecb992341aaef0e708166 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
233 0394b0d5e4f761ced559fd0bbdc6afc16cb3f7d1 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
233 0394b0d5e4f761ced559fd0bbdc6afc16cb3f7d1 a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
234 0e6e235919dd8e9285ba8eb5adf703af9ad99378 bd51f17597bf32268e68a560b206898c3960cda2
234 0e6e235919dd8e9285ba8eb5adf703af9ad99378 bd51f17597bf32268e68a560b206898c3960cda2
235 86a55cb968d51770cba2a1630d6cc637b574580a ab40a95b00725307e79c2fd271000aa8af9759f4
235 86a55cb968d51770cba2a1630d6cc637b574580a ab40a95b00725307e79c2fd271000aa8af9759f4
236 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba
236 717e9b37cdb7eb9917ca8e30aa3f986e6d5b177d f7cff662c5e581e6f3f1a85ffdd2bcb35825f6ba
237 0613c8e59a3ddb9789072ef52f1ed13496489bb4 78814e84a217894517c2de392b903ed05e6871a4
237 0613c8e59a3ddb9789072ef52f1ed13496489bb4 78814e84a217894517c2de392b903ed05e6871a4
238
238
239 Verify meta data converted correctly:
239 Verify meta data converted correctly:
240
240
241 $ hg -R dest log -r 1 --debug -p --git
241 $ hg -R dest log -r 1 --debug -p --git
242 changeset: 1:bd51f17597bf32268e68a560b206898c3960cda2
242 changeset: 1:bd51f17597bf32268e68a560b206898c3960cda2
243 phase: draft
243 phase: draft
244 parent: 0:a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
244 parent: 0:a4a1dae0fe3514cefd9b8541b7abbc8f44f946d5
245 parent: -1:0000000000000000000000000000000000000000
245 parent: -1:0000000000000000000000000000000000000000
246 manifest: 1:040c72ed9b101773c24ac314776bfc846943781f
246 manifest: 1:040c72ed9b101773c24ac314776bfc846943781f
247 user: test
247 user: test
248 date: Thu Jan 01 00:00:00 1970 +0000
248 date: Thu Jan 01 00:00:00 1970 +0000
249 files+: b e
249 files+: b e
250 extra: branch=default
250 extra: branch=default
251 description:
251 description:
252 3: copy a to e, change b
252 3: copy a to e, change b
253
253
254
254
255 diff --git a/b b/b
255 diff --git a/b b/b
256 new file mode 100644
256 new file mode 100644
257 --- /dev/null
257 --- /dev/null
258 +++ b/b
258 +++ b/b
259 @@ -0,0 +1,1 @@
259 @@ -0,0 +1,1 @@
260 +b
260 +b
261 diff --git a/a b/e
261 diff --git a/a b/e
262 copy from a
262 copy from a
263 copy to e
263 copy to e
264
264
265 Verify files included and excluded correctly:
265 Verify files included and excluded correctly:
266
266
267 $ hg -R dest manifest -r tip
267 $ hg -R dest manifest -r tip
268 0
268 0
269 a
269 a
270 b
270 b
271 e
271 e
272
272
273
273
274 Make changes in dest and convert back:
274 Make changes in dest and convert back:
275
275
276 $ hg -R dest up -q
276 $ hg -R dest up -q
277 $ echo dest > dest/dest
277 $ echo dest > dest/dest
278 $ hg -R dest ci -Aqm 'change in dest'
278 $ hg -R dest ci -Aqm 'change in dest'
279 $ hg -R dest tip
279 $ hg -R dest tip
280 changeset: 5:a2e0e3cc6d1d
280 changeset: 5:a2e0e3cc6d1d
281 tag: tip
281 tag: tip
282 user: test
282 user: test
283 date: Thu Jan 01 00:00:00 1970 +0000
283 date: Thu Jan 01 00:00:00 1970 +0000
284 summary: change in dest
284 summary: change in dest
285
285
286
286
287 (converting merges back after using a filemap will probably cause chaos so we
287 (converting merges back after using a filemap will probably cause chaos so we
288 exclude merges.)
288 exclude merges.)
289
289
290 $ hg convert dest source --config convert.hg.revs='!merge()'
290 $ hg convert dest source --config convert.hg.revs='!merge()'
291 scanning source...
291 scanning source...
292 sorting...
292 sorting...
293 converting...
293 converting...
294 0 change in dest
294 0 change in dest
295
295
296 Verify the conversion back:
296 Verify the conversion back:
297
297
298 $ hg -R source log --debug -r tip
298 $ hg -R source log --debug -r tip
299 changeset: 7:e6d364a69ff1248b2099e603b0c145504cade6f0
299 changeset: 7:e6d364a69ff1248b2099e603b0c145504cade6f0
300 tag: tip
300 tag: tip
301 phase: draft
301 phase: draft
302 parent: 6:0613c8e59a3ddb9789072ef52f1ed13496489bb4
302 parent: 6:0613c8e59a3ddb9789072ef52f1ed13496489bb4
303 parent: -1:0000000000000000000000000000000000000000
303 parent: -1:0000000000000000000000000000000000000000
304 manifest: 7:aa3e9542f3b76d4f1f1b2e9c7ce9dbb48b6a95ec
304 manifest: 7:aa3e9542f3b76d4f1f1b2e9c7ce9dbb48b6a95ec
305 user: test
305 user: test
306 date: Thu Jan 01 00:00:00 1970 +0000
306 date: Thu Jan 01 00:00:00 1970 +0000
307 files+: dest
307 files+: dest
308 extra: branch=default
308 extra: branch=default
309 description:
309 description:
310 change in dest
310 change in dest
311
311
312
312
313 Files that had been excluded are still present:
313 Files that had been excluded are still present:
314
314
315 $ hg -R source manifest -r tip
315 $ hg -R source manifest -r tip
316 0
316 0
317 a
317 a
318 b
318 b
319 dest
319 dest
320 dir/b
320 dir/b
321 dir/c
321 dir/c
322 dir/d
322 dir/d
323 e
323 e
324
324
325 More source changes
325 More source changes
326
326
327 $ cd source
327 $ cd source
328 $ echo 1 >> a
328 $ echo 1 >> a
329 $ hg ci -m '8: source first branch'
329 $ hg ci -m '8: source first branch'
330 created new head
330 created new head
331 $ hg up -qr -2
331 $ hg up -qr -2
332 $ echo 2 >> a
332 $ echo 2 >> a
333 $ hg ci -m '9: source second branch'
333 $ hg ci -m '9: source second branch'
334 $ hg merge -q --tool internal:local
334 $ hg merge -q --tool internal:local
335 $ hg ci -m '10: source merge'
335 $ hg ci -m '10: source merge'
336 $ echo >> a
336 $ echo >> a
337 $ hg ci -m '11: source change'
337 $ hg ci -m '11: source change'
338
338
339 $ hg mani
339 $ hg mani
340 0
340 0
341 a
341 a
342 b
342 b
343 dest
343 dest
344 dir/b
344 dir/b
345 dir/c
345 dir/c
346 dir/d
346 dir/d
347 e
347 e
348
348
349 $ glog -r 6:
349 $ glog -r 6:
350 @ 11 0c8927d1f7f4 "11: source change" files: a
350 @ 11 0c8927d1f7f4 "11: source change" files: a
351 |
351 |
352 o 10 9ccb7ee8d261 "10: source merge" files: a
352 o 10 9ccb7ee8d261 "10: source merge" files: a
353 |\
353 |\
354 | o 9 f131b1518dba "9: source second branch" files: a
354 | o 9 f131b1518dba "9: source second branch" files: a
355 | |
355 | |
356 o | 8 669cf0e74b50 "8: source first branch" files: a
356 o | 8 669cf0e74b50 "8: source first branch" files: a
357 | |
357 | |
358 | o 7 e6d364a69ff1 "change in dest" files: dest
358 | o 7 e6d364a69ff1 "change in dest" files: dest
359 |/
359 |/
360 o 6 0613c8e59a3d "6: change a" files: a
360 o 6 0613c8e59a3d "6: change a" files: a
361 |
361 |
362 $ cd ..
362 $ cd ..
363
363
364 $ hg convert --filemap filemap source dest --config convert.hg.revs=3:
364 $ hg convert --filemap filemap source dest --config convert.hg.revs=3:
365 scanning source...
365 scanning source...
366 sorting...
366 sorting...
367 converting...
367 converting...
368 3 8: source first branch
368 3 8: source first branch
369 2 9: source second branch
369 2 9: source second branch
370 1 10: source merge
370 1 10: source merge
371 0 11: source change
371 0 11: source change
372
372
373 $ glog -R dest
373 $ glog -R dest
374 o 9 8432d597b263 "11: source change" files: a
374 o 9 8432d597b263 "11: source change" files: a
375 |
375 |
376 o 8 632ffacdcd6f "10: source merge" files: a
376 o 8 632ffacdcd6f "10: source merge" files: a
377 |\
377 |\
378 | o 7 049cfee90ee6 "9: source second branch" files: a
378 | o 7 049cfee90ee6 "9: source second branch" files: a
379 | |
379 | |
380 o | 6 9b6845e036e5 "8: source first branch" files: a
380 o | 6 9b6845e036e5 "8: source first branch" files: a
381 | |
381 | |
382 | @ 5 a2e0e3cc6d1d "change in dest" files: dest
382 | @ 5 a2e0e3cc6d1d "change in dest" files: dest
383 |/
383 |/
384 o 4 78814e84a217 "6: change a" files: a
384 o 4 78814e84a217 "6: change a" files: a
385 |
385 |
386 o 3 f7cff662c5e5 "5: merge 2 and 3, copy b to dir/d" files: e
386 o 3 f7cff662c5e5 "5: merge 2 and 3, copy b to dir/d" files: e
387 |\
387 |\
388 | o 2 ab40a95b0072 "4: change a" files: a
388 | o 2 ab40a95b0072 "4: change a" files: a
389 | |
389 | |
390 o | 1 bd51f17597bf "3: copy a to e, change b" files: b e
390 o | 1 bd51f17597bf "3: copy a to e, change b" files: b e
391 |/
391 |/
392 o 0 a4a1dae0fe35 "1: add a and dir/b" files: 0 a
392 o 0 a4a1dae0fe35 "1: add a and dir/b" files: 0 a
393
393
394 $ cd ..
394 $ cd ..
395
395
396 Two way tests
396 Two way tests
397
397
398 $ hg init 0
398 $ hg init 0
399 $ echo f > 0/f
399 $ echo f > 0/f
400 $ echo a > 0/a-only
400 $ echo a > 0/a-only
401 $ echo b > 0/b-only
401 $ echo b > 0/b-only
402 $ hg -R 0 ci -Aqm0
402 $ hg -R 0 ci -Aqm0
403
403
404 $ cat << EOF > filemap-a
404 $ cat << EOF > filemap-a
405 > exclude b-only
405 > exclude b-only
406 > EOF
406 > EOF
407 $ cat << EOF > filemap-b
407 $ cat << EOF > filemap-b
408 > exclude a-only
408 > exclude a-only
409 > EOF
409 > EOF
410 $ hg convert --filemap filemap-a 0 a
410 $ hg convert --filemap filemap-a 0 a
411 initializing destination a repository
411 initializing destination a repository
412 scanning source...
412 scanning source...
413 sorting...
413 sorting...
414 converting...
414 converting...
415 0 0
415 0 0
416 $ hg -R a up -q
416 $ hg -R a up -q
417 $ echo a > a/f
417 $ echo a > a/f
418 $ hg -R a ci -ma
418 $ hg -R a ci -ma
419
419
420 $ hg convert --filemap filemap-b 0 b
420 $ hg convert --filemap filemap-b 0 b
421 initializing destination b repository
421 initializing destination b repository
422 scanning source...
422 scanning source...
423 sorting...
423 sorting...
424 converting...
424 converting...
425 0 0
425 0 0
426 $ hg -R b up -q
426 $ hg -R b up -q
427 $ echo b > b/f
427 $ echo b > b/f
428 $ hg -R b ci -mb
428 $ hg -R b ci -mb
429
429
430 $ tail */.hg/shamap
430 $ tail */.hg/shamap
431 ==> 0/.hg/shamap <==
431 ==> 0/.hg/shamap <==
432 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a
432 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a
433 dd9f218eb91fb857f2a62fe023e1d64a4e7812fe 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a
433 dd9f218eb91fb857f2a62fe023e1d64a4e7812fe 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a
434
434
435 ==> a/.hg/shamap <==
435 ==> a/.hg/shamap <==
436 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6
436 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a 86f3f774ffb682bffb5dc3c1d3b3da637cb9a0d6
437
437
438 ==> b/.hg/shamap <==
438 ==> b/.hg/shamap <==
439 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a dd9f218eb91fb857f2a62fe023e1d64a4e7812fe
439 8a028c7c77f6c7bd6d63bc3f02ca9f779eabf16a dd9f218eb91fb857f2a62fe023e1d64a4e7812fe
440
440
441 $ hg convert a 0
441 $ hg convert a 0
442 scanning source...
442 scanning source...
443 sorting...
443 sorting...
444 converting...
444 converting...
445 0 a
445 0 a
446
446
447 $ hg convert b 0
447 $ hg convert b 0
448 scanning source...
448 scanning source...
449 sorting...
449 sorting...
450 converting...
450 converting...
451 0 b
451 0 b
452
452
453 $ hg -R 0 log -G
453 $ hg -R 0 log -G
454 o changeset: 2:637fbbbe96b6
454 o changeset: 2:637fbbbe96b6
455 | tag: tip
455 | tag: tip
456 | parent: 0:8a028c7c77f6
456 | parent: 0:8a028c7c77f6
457 | user: test
457 | user: test
458 | date: Thu Jan 01 00:00:00 1970 +0000
458 | date: Thu Jan 01 00:00:00 1970 +0000
459 | summary: b
459 | summary: b
460 |
460 |
461 | o changeset: 1:ec7b9c96e692
461 | o changeset: 1:ec7b9c96e692
462 |/ user: test
462 |/ user: test
463 | date: Thu Jan 01 00:00:00 1970 +0000
463 | date: Thu Jan 01 00:00:00 1970 +0000
464 | summary: a
464 | summary: a
465 |
465 |
466 @ changeset: 0:8a028c7c77f6
466 @ changeset: 0:8a028c7c77f6
467 user: test
467 user: test
468 date: Thu Jan 01 00:00:00 1970 +0000
468 date: Thu Jan 01 00:00:00 1970 +0000
469 summary: 0
469 summary: 0
470
470
471 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
471 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
472 scanning source...
472 scanning source...
473 sorting...
473 sorting...
474 converting...
474 converting...
475
475
476 $ hg -R 0 up -r1
476 $ hg -R 0 up -r1
477 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
477 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
478 $ echo f >> 0/f
478 $ echo f >> 0/f
479 $ hg -R 0 ci -mx
479 $ hg -R 0 ci -mx
480
480
481 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
481 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
482 scanning source...
482 scanning source...
483 sorting...
483 sorting...
484 converting...
484 converting...
485 0 x
485 0 x
486
486
487 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
487 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
488 o 2 x (f)
488 o 2 x (f)
489 |
489 |
490 @ 1 a (f)
490 @ 1 a (f)
491 |
491 |
492 o 0 0 (a-only f)
492 o 0 0 (a-only f)
493
493
494 $ hg -R a mani -r tip
494 $ hg -R a mani -r tip
495 a-only
495 a-only
496 f
496 f
497
497
498 An additional round, demonstrating that unchanged files don't get converted
498 An additional round, demonstrating that unchanged files don't get converted
499
499
500 $ echo f >> 0/f
500 $ echo f >> 0/f
501 $ echo f >> 0/a-only
501 $ echo f >> 0/a-only
502 $ hg -R 0 ci -m "extra f+a-only change"
502 $ hg -R 0 ci -m "extra f+a-only change"
503
503
504 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
504 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
505 scanning source...
505 scanning source...
506 sorting...
506 sorting...
507 converting...
507 converting...
508 0 extra f+a-only change
508 0 extra f+a-only change
509
509
510 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
510 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
511 o 3 extra f+a-only change (f)
511 o 3 extra f+a-only change (f)
512 |
512 |
513 o 2 x (f)
513 o 2 x (f)
514 |
514 |
515 @ 1 a (f)
515 @ 1 a (f)
516 |
516 |
517 o 0 0 (a-only f)
517 o 0 0 (a-only f)
518
518
519
519
520 Conversion after rollback
520 Conversion after rollback
521
521
522 $ hg -R a rollback -f
522 $ hg -R a rollback -f
523 repository tip rolled back to revision 2 (undo commit)
523 repository tip rolled back to revision 2 (undo commit)
524
524
525 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
525 $ hg convert --filemap filemap-b 0 a --config convert.hg.revs=1::
526 scanning source...
526 scanning source...
527 sorting...
527 sorting...
528 converting...
528 converting...
529 0 extra f+a-only change
529 0 extra f+a-only change
530
530
531 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
531 $ hg -R a log -G -T '{rev} {desc|firstline} ({files})\n'
532 o 3 extra f+a-only change (f)
532 o 3 extra f+a-only change (f)
533 |
533 |
534 o 2 x (f)
534 o 2 x (f)
535 |
535 |
536 @ 1 a (f)
536 @ 1 a (f)
537 |
537 |
538 o 0 0 (a-only f)
538 o 0 0 (a-only f)
539
539
540 Convert with --full adds and removes files that didn't change
541
542 $ echo f >> 0/f
543 $ hg -R 0 ci -m "f"
544 $ hg convert --filemap filemap-b --full 0 a --config convert.hg.revs=1::
545 scanning source...
546 sorting...
547 converting...
548 0 f
549 $ hg -R a status --change tip
550 M f
551 A b-only
552 R a-only
@@ -1,432 +1,457 b''
1 #require svn13
1 #require svn13
2
2
3 $ svnupanddisplay()
3 $ svnupanddisplay()
4 > {
4 > {
5 > (
5 > (
6 > cd $1;
6 > cd $1;
7 > svn up -q;
7 > svn up -q;
8 > svn st -v | sed 's/ */ /g' | sort
8 > svn st -v | sed 's/ */ /g' | sort
9 > limit=''
9 > limit=''
10 > if [ $2 -gt 0 ]; then
10 > if [ $2 -gt 0 ]; then
11 > limit="--limit=$2"
11 > limit="--limit=$2"
12 > fi
12 > fi
13 > svn log --xml -v $limit | python "$TESTDIR/svnxml.py"
13 > svn log --xml -v $limit | python "$TESTDIR/svnxml.py"
14 > )
14 > )
15 > }
15 > }
16
16
17 $ cat >> $HGRCPATH <<EOF
17 $ cat >> $HGRCPATH <<EOF
18 > [extensions]
18 > [extensions]
19 > convert =
19 > convert =
20 > EOF
20 > EOF
21
21
22 $ hg init a
22 $ hg init a
23
23
24 Add
24 Add
25
25
26 $ echo a > a/a
26 $ echo a > a/a
27 $ mkdir -p a/d1/d2
27 $ mkdir -p a/d1/d2
28 $ echo b > a/d1/d2/b
28 $ echo b > a/d1/d2/b
29 $ hg --cwd a ci -d '0 0' -A -m 'add a file'
29 $ hg --cwd a ci -d '0 0' -A -m 'add a file'
30 adding a
30 adding a
31 adding d1/d2/b
31 adding d1/d2/b
32
32
33 Modify
33 Modify
34
34
35 $ "$TESTDIR/svn-safe-append.py" a a/a
35 $ "$TESTDIR/svn-safe-append.py" a a/a
36 $ hg --cwd a ci -d '1 0' -m 'modify a file'
36 $ hg --cwd a ci -d '1 0' -m 'modify a file'
37 $ hg --cwd a tip -q
37 $ hg --cwd a tip -q
38 1:e0e2b8a9156b
38 1:e0e2b8a9156b
39
39
40 $ hg convert -d svn a
40 $ hg convert -d svn a
41 assuming destination a-hg
41 assuming destination a-hg
42 initializing svn repository 'a-hg'
42 initializing svn repository 'a-hg'
43 initializing svn working copy 'a-hg-wc'
43 initializing svn working copy 'a-hg-wc'
44 scanning source...
44 scanning source...
45 sorting...
45 sorting...
46 converting...
46 converting...
47 1 add a file
47 1 add a file
48 0 modify a file
48 0 modify a file
49 $ svnupanddisplay a-hg-wc 2
49 $ svnupanddisplay a-hg-wc 2
50 2 1 test d1
50 2 1 test d1
51 2 1 test d1/d2 (glob)
51 2 1 test d1/d2 (glob)
52 2 1 test d1/d2/b (glob)
52 2 1 test d1/d2/b (glob)
53 2 2 test .
53 2 2 test .
54 2 2 test a
54 2 2 test a
55 revision: 2
55 revision: 2
56 author: test
56 author: test
57 msg: modify a file
57 msg: modify a file
58 M /a
58 M /a
59 revision: 1
59 revision: 1
60 author: test
60 author: test
61 msg: add a file
61 msg: add a file
62 A /a
62 A /a
63 A /d1
63 A /d1
64 A /d1/d2
64 A /d1/d2
65 A /d1/d2/b
65 A /d1/d2/b
66 $ ls a a-hg-wc
66 $ ls a a-hg-wc
67 a:
67 a:
68 a
68 a
69 d1
69 d1
70
70
71 a-hg-wc:
71 a-hg-wc:
72 a
72 a
73 d1
73 d1
74 $ cmp a/a a-hg-wc/a
74 $ cmp a/a a-hg-wc/a
75
75
76 Rename
76 Rename
77
77
78 $ hg --cwd a mv a b
78 $ hg --cwd a mv a b
79 $ hg --cwd a ci -d '2 0' -m 'rename a file'
79 $ hg --cwd a ci -d '2 0' -m 'rename a file'
80 $ hg --cwd a tip -q
80 $ hg --cwd a tip -q
81 2:eb5169441d43
81 2:eb5169441d43
82
82
83 $ hg convert -d svn a
83 $ hg convert -d svn a
84 assuming destination a-hg
84 assuming destination a-hg
85 initializing svn working copy 'a-hg-wc'
85 initializing svn working copy 'a-hg-wc'
86 scanning source...
86 scanning source...
87 sorting...
87 sorting...
88 converting...
88 converting...
89 0 rename a file
89 0 rename a file
90 $ svnupanddisplay a-hg-wc 1
90 $ svnupanddisplay a-hg-wc 1
91 3 1 test d1
91 3 1 test d1
92 3 1 test d1/d2 (glob)
92 3 1 test d1/d2 (glob)
93 3 1 test d1/d2/b (glob)
93 3 1 test d1/d2/b (glob)
94 3 3 test .
94 3 3 test .
95 3 3 test b
95 3 3 test b
96 revision: 3
96 revision: 3
97 author: test
97 author: test
98 msg: rename a file
98 msg: rename a file
99 D /a
99 D /a
100 A /b (from /a@2)
100 A /b (from /a@2)
101 $ ls a a-hg-wc
101 $ ls a a-hg-wc
102 a:
102 a:
103 b
103 b
104 d1
104 d1
105
105
106 a-hg-wc:
106 a-hg-wc:
107 b
107 b
108 d1
108 d1
109
109
110 Copy
110 Copy
111
111
112 $ hg --cwd a cp b c
112 $ hg --cwd a cp b c
113
113
114 $ hg --cwd a ci -d '3 0' -m 'copy a file'
114 $ hg --cwd a ci -d '3 0' -m 'copy a file'
115 $ hg --cwd a tip -q
115 $ hg --cwd a tip -q
116 3:60effef6ab48
116 3:60effef6ab48
117
117
118 $ hg convert -d svn a
118 $ hg convert -d svn a
119 assuming destination a-hg
119 assuming destination a-hg
120 initializing svn working copy 'a-hg-wc'
120 initializing svn working copy 'a-hg-wc'
121 scanning source...
121 scanning source...
122 sorting...
122 sorting...
123 converting...
123 converting...
124 0 copy a file
124 0 copy a file
125 $ svnupanddisplay a-hg-wc 1
125 $ svnupanddisplay a-hg-wc 1
126 4 1 test d1
126 4 1 test d1
127 4 1 test d1/d2 (glob)
127 4 1 test d1/d2 (glob)
128 4 1 test d1/d2/b (glob)
128 4 1 test d1/d2/b (glob)
129 4 3 test b
129 4 3 test b
130 4 4 test .
130 4 4 test .
131 4 4 test c
131 4 4 test c
132 revision: 4
132 revision: 4
133 author: test
133 author: test
134 msg: copy a file
134 msg: copy a file
135 A /c (from /b@3)
135 A /c (from /b@3)
136 $ ls a a-hg-wc
136 $ ls a a-hg-wc
137 a:
137 a:
138 b
138 b
139 c
139 c
140 d1
140 d1
141
141
142 a-hg-wc:
142 a-hg-wc:
143 b
143 b
144 c
144 c
145 d1
145 d1
146
146
147 $ hg --cwd a rm b
147 $ hg --cwd a rm b
148
148
149 Remove
149 Remove
150
150
151 $ hg --cwd a ci -d '4 0' -m 'remove a file'
151 $ hg --cwd a ci -d '4 0' -m 'remove a file'
152 $ hg --cwd a tip -q
152 $ hg --cwd a tip -q
153 4:87bbe3013fb6
153 4:87bbe3013fb6
154
154
155 $ hg convert -d svn a
155 $ hg convert -d svn a
156 assuming destination a-hg
156 assuming destination a-hg
157 initializing svn working copy 'a-hg-wc'
157 initializing svn working copy 'a-hg-wc'
158 scanning source...
158 scanning source...
159 sorting...
159 sorting...
160 converting...
160 converting...
161 0 remove a file
161 0 remove a file
162 $ svnupanddisplay a-hg-wc 1
162 $ svnupanddisplay a-hg-wc 1
163 5 1 test d1
163 5 1 test d1
164 5 1 test d1/d2 (glob)
164 5 1 test d1/d2 (glob)
165 5 1 test d1/d2/b (glob)
165 5 1 test d1/d2/b (glob)
166 5 4 test c
166 5 4 test c
167 5 5 test .
167 5 5 test .
168 revision: 5
168 revision: 5
169 author: test
169 author: test
170 msg: remove a file
170 msg: remove a file
171 D /b
171 D /b
172 $ ls a a-hg-wc
172 $ ls a a-hg-wc
173 a:
173 a:
174 c
174 c
175 d1
175 d1
176
176
177 a-hg-wc:
177 a-hg-wc:
178 c
178 c
179 d1
179 d1
180
180
181 Executable
181 Executable
182
182
183 #if execbit
183 #if execbit
184 $ chmod +x a/c
184 $ chmod +x a/c
185 #else
185 #else
186 $ echo fake >> a/c
186 $ echo fake >> a/c
187 #endif
187 #endif
188 $ hg --cwd a ci -d '5 0' -m 'make a file executable'
188 $ hg --cwd a ci -d '5 0' -m 'make a file executable'
189 #if execbit
189 #if execbit
190 $ hg --cwd a tip -q
190 $ hg --cwd a tip -q
191 5:ff42e473c340
191 5:ff42e473c340
192 #else
192 #else
193 $ hg --cwd a tip -q
193 $ hg --cwd a tip -q
194 5:817a700c8cf1
194 5:817a700c8cf1
195 #endif
195 #endif
196
196
197 $ hg convert -d svn a
197 $ hg convert -d svn a
198 assuming destination a-hg
198 assuming destination a-hg
199 initializing svn working copy 'a-hg-wc'
199 initializing svn working copy 'a-hg-wc'
200 scanning source...
200 scanning source...
201 sorting...
201 sorting...
202 converting...
202 converting...
203 0 make a file executable
203 0 make a file executable
204 $ svnupanddisplay a-hg-wc 1
204 $ svnupanddisplay a-hg-wc 1
205 6 1 test d1
205 6 1 test d1
206 6 1 test d1/d2 (glob)
206 6 1 test d1/d2 (glob)
207 6 1 test d1/d2/b (glob)
207 6 1 test d1/d2/b (glob)
208 6 6 test .
208 6 6 test .
209 6 6 test c
209 6 6 test c
210 revision: 6
210 revision: 6
211 author: test
211 author: test
212 msg: make a file executable
212 msg: make a file executable
213 M /c
213 M /c
214 #if execbit
214 #if execbit
215 $ test -x a-hg-wc/c
215 $ test -x a-hg-wc/c
216 #endif
216 #endif
217
217
218 #if symlink
218 #if symlink
219
219
220 Symlinks
220 Symlinks
221
221
222 $ ln -s a/missing a/link
222 $ ln -s a/missing a/link
223 $ hg --cwd a commit -Am 'add symlink'
223 $ hg --cwd a commit -Am 'add symlink'
224 adding link
224 adding link
225 $ hg --cwd a mv link newlink
225 $ hg --cwd a mv link newlink
226 $ hg --cwd a commit -m 'move symlink'
226 $ hg --cwd a commit -m 'move symlink'
227 $ hg convert -d svn a
227 $ hg convert -d svn a
228 assuming destination a-hg
228 assuming destination a-hg
229 initializing svn working copy 'a-hg-wc'
229 initializing svn working copy 'a-hg-wc'
230 scanning source...
230 scanning source...
231 sorting...
231 sorting...
232 converting...
232 converting...
233 1 add symlink
233 1 add symlink
234 0 move symlink
234 0 move symlink
235 $ svnupanddisplay a-hg-wc 1
235 $ svnupanddisplay a-hg-wc 1
236 8 1 test d1
236 8 1 test d1
237 8 1 test d1/d2
237 8 1 test d1/d2
238 8 1 test d1/d2/b
238 8 1 test d1/d2/b
239 8 6 test c
239 8 6 test c
240 8 8 test .
240 8 8 test .
241 8 8 test newlink
241 8 8 test newlink
242 revision: 8
242 revision: 8
243 author: test
243 author: test
244 msg: move symlink
244 msg: move symlink
245 D /link
245 D /link
246 A /newlink (from /link@7)
246 A /newlink (from /link@7)
247
247
248 #endif
248 #endif
249
249
250 Convert with --full adds and removes files that didn't change
251
252 $ touch a/f
253 $ hg -R a ci -Aqmf
254 $ echo "rename c d" > filemap
255 $ hg convert -d svn a --filemap filemap --full
256 assuming destination a-hg
257 initializing svn working copy 'a-hg-wc'
258 scanning source...
259 sorting...
260 converting...
261 0 f
262 $ svnupanddisplay a-hg-wc 1
263 9 9 test .
264 9 9 test d
265 9 9 test f
266 revision: 9
267 author: test
268 msg: f
269 D /c
270 A /d
271 D /d1
272 A /f
273 D /newlink
274
250 $ rm -rf a a-hg a-hg-wc
275 $ rm -rf a a-hg a-hg-wc
251
276
252
277
253 Executable in new directory
278 Executable in new directory
254
279
255 $ hg init a
280 $ hg init a
256
281
257 $ mkdir a/d1
282 $ mkdir a/d1
258 $ echo a > a/d1/a
283 $ echo a > a/d1/a
259 #if execbit
284 #if execbit
260 $ chmod +x a/d1/a
285 $ chmod +x a/d1/a
261 #else
286 #else
262 $ echo fake >> a/d1/a
287 $ echo fake >> a/d1/a
263 #endif
288 #endif
264 $ hg --cwd a ci -d '0 0' -A -m 'add executable file in new directory'
289 $ hg --cwd a ci -d '0 0' -A -m 'add executable file in new directory'
265 adding d1/a
290 adding d1/a
266
291
267 $ hg convert -d svn a
292 $ hg convert -d svn a
268 assuming destination a-hg
293 assuming destination a-hg
269 initializing svn repository 'a-hg'
294 initializing svn repository 'a-hg'
270 initializing svn working copy 'a-hg-wc'
295 initializing svn working copy 'a-hg-wc'
271 scanning source...
296 scanning source...
272 sorting...
297 sorting...
273 converting...
298 converting...
274 0 add executable file in new directory
299 0 add executable file in new directory
275 $ svnupanddisplay a-hg-wc 1
300 $ svnupanddisplay a-hg-wc 1
276 1 1 test .
301 1 1 test .
277 1 1 test d1
302 1 1 test d1
278 1 1 test d1/a (glob)
303 1 1 test d1/a (glob)
279 revision: 1
304 revision: 1
280 author: test
305 author: test
281 msg: add executable file in new directory
306 msg: add executable file in new directory
282 A /d1
307 A /d1
283 A /d1/a
308 A /d1/a
284 #if execbit
309 #if execbit
285 $ test -x a-hg-wc/d1/a
310 $ test -x a-hg-wc/d1/a
286 #endif
311 #endif
287
312
288 Copy to new directory
313 Copy to new directory
289
314
290 $ mkdir a/d2
315 $ mkdir a/d2
291 $ hg --cwd a cp d1/a d2/a
316 $ hg --cwd a cp d1/a d2/a
292 $ hg --cwd a ci -d '1 0' -A -m 'copy file to new directory'
317 $ hg --cwd a ci -d '1 0' -A -m 'copy file to new directory'
293
318
294 $ hg convert -d svn a
319 $ hg convert -d svn a
295 assuming destination a-hg
320 assuming destination a-hg
296 initializing svn working copy 'a-hg-wc'
321 initializing svn working copy 'a-hg-wc'
297 scanning source...
322 scanning source...
298 sorting...
323 sorting...
299 converting...
324 converting...
300 0 copy file to new directory
325 0 copy file to new directory
301 $ svnupanddisplay a-hg-wc 1
326 $ svnupanddisplay a-hg-wc 1
302 2 1 test d1
327 2 1 test d1
303 2 1 test d1/a (glob)
328 2 1 test d1/a (glob)
304 2 2 test .
329 2 2 test .
305 2 2 test d2
330 2 2 test d2
306 2 2 test d2/a (glob)
331 2 2 test d2/a (glob)
307 revision: 2
332 revision: 2
308 author: test
333 author: test
309 msg: copy file to new directory
334 msg: copy file to new directory
310 A /d2
335 A /d2
311 A /d2/a (from /d1/a@1)
336 A /d2/a (from /d1/a@1)
312
337
313 Branchy history
338 Branchy history
314
339
315 $ hg init b
340 $ hg init b
316 $ echo base > b/b
341 $ echo base > b/b
317 $ hg --cwd b ci -d '0 0' -Ambase
342 $ hg --cwd b ci -d '0 0' -Ambase
318 adding b
343 adding b
319
344
320 $ "$TESTDIR/svn-safe-append.py" left-1 b/b
345 $ "$TESTDIR/svn-safe-append.py" left-1 b/b
321 $ echo left-1 > b/left-1
346 $ echo left-1 > b/left-1
322 $ hg --cwd b ci -d '1 0' -Amleft-1
347 $ hg --cwd b ci -d '1 0' -Amleft-1
323 adding left-1
348 adding left-1
324
349
325 $ "$TESTDIR/svn-safe-append.py" left-2 b/b
350 $ "$TESTDIR/svn-safe-append.py" left-2 b/b
326 $ echo left-2 > b/left-2
351 $ echo left-2 > b/left-2
327 $ hg --cwd b ci -d '2 0' -Amleft-2
352 $ hg --cwd b ci -d '2 0' -Amleft-2
328 adding left-2
353 adding left-2
329
354
330 $ hg --cwd b up 0
355 $ hg --cwd b up 0
331 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
356 1 files updated, 0 files merged, 2 files removed, 0 files unresolved
332
357
333 $ "$TESTDIR/svn-safe-append.py" right-1 b/b
358 $ "$TESTDIR/svn-safe-append.py" right-1 b/b
334 $ echo right-1 > b/right-1
359 $ echo right-1 > b/right-1
335 $ hg --cwd b ci -d '3 0' -Amright-1
360 $ hg --cwd b ci -d '3 0' -Amright-1
336 adding right-1
361 adding right-1
337 created new head
362 created new head
338
363
339 $ "$TESTDIR/svn-safe-append.py" right-2 b/b
364 $ "$TESTDIR/svn-safe-append.py" right-2 b/b
340 $ echo right-2 > b/right-2
365 $ echo right-2 > b/right-2
341 $ hg --cwd b ci -d '4 0' -Amright-2
366 $ hg --cwd b ci -d '4 0' -Amright-2
342 adding right-2
367 adding right-2
343
368
344 $ hg --cwd b up -C 2
369 $ hg --cwd b up -C 2
345 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
370 3 files updated, 0 files merged, 2 files removed, 0 files unresolved
346 $ hg --cwd b merge
371 $ hg --cwd b merge
347 merging b
372 merging b
348 warning: conflicts during merge.
373 warning: conflicts during merge.
349 merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
374 merging b incomplete! (edit conflicts, then use 'hg resolve --mark')
350 2 files updated, 0 files merged, 0 files removed, 1 files unresolved
375 2 files updated, 0 files merged, 0 files removed, 1 files unresolved
351 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
376 use 'hg resolve' to retry unresolved file merges or 'hg update -C .' to abandon
352 [1]
377 [1]
353 $ hg --cwd b revert -r 2 b
378 $ hg --cwd b revert -r 2 b
354 $ hg --cwd b resolve -m b
379 $ hg --cwd b resolve -m b
355 (no more unresolved files)
380 (no more unresolved files)
356 $ hg --cwd b ci -d '5 0' -m 'merge'
381 $ hg --cwd b ci -d '5 0' -m 'merge'
357
382
358 Expect 4 changes
383 Expect 4 changes
359
384
360 $ hg convert -d svn b
385 $ hg convert -d svn b
361 assuming destination b-hg
386 assuming destination b-hg
362 initializing svn repository 'b-hg'
387 initializing svn repository 'b-hg'
363 initializing svn working copy 'b-hg-wc'
388 initializing svn working copy 'b-hg-wc'
364 scanning source...
389 scanning source...
365 sorting...
390 sorting...
366 converting...
391 converting...
367 5 base
392 5 base
368 4 left-1
393 4 left-1
369 3 left-2
394 3 left-2
370 2 right-1
395 2 right-1
371 1 right-2
396 1 right-2
372 0 merge
397 0 merge
373
398
374 $ svnupanddisplay b-hg-wc 0
399 $ svnupanddisplay b-hg-wc 0
375 4 2 test left-1
400 4 2 test left-1
376 4 3 test b
401 4 3 test b
377 4 3 test left-2
402 4 3 test left-2
378 4 4 test .
403 4 4 test .
379 4 4 test right-1
404 4 4 test right-1
380 4 4 test right-2
405 4 4 test right-2
381 revision: 4
406 revision: 4
382 author: test
407 author: test
383 msg: merge
408 msg: merge
384 A /right-1
409 A /right-1
385 A /right-2
410 A /right-2
386 revision: 3
411 revision: 3
387 author: test
412 author: test
388 msg: left-2
413 msg: left-2
389 M /b
414 M /b
390 A /left-2
415 A /left-2
391 revision: 2
416 revision: 2
392 author: test
417 author: test
393 msg: left-1
418 msg: left-1
394 M /b
419 M /b
395 A /left-1
420 A /left-1
396 revision: 1
421 revision: 1
397 author: test
422 author: test
398 msg: base
423 msg: base
399 A /b
424 A /b
400
425
401 Tags are not supported, but must not break conversion
426 Tags are not supported, but must not break conversion
402
427
403 $ rm -rf a a-hg a-hg-wc
428 $ rm -rf a a-hg a-hg-wc
404 $ hg init a
429 $ hg init a
405 $ echo a > a/a
430 $ echo a > a/a
406 $ hg --cwd a ci -d '0 0' -A -m 'Add file a'
431 $ hg --cwd a ci -d '0 0' -A -m 'Add file a'
407 adding a
432 adding a
408 $ hg --cwd a tag -d '1 0' -m 'Tagged as v1.0' v1.0
433 $ hg --cwd a tag -d '1 0' -m 'Tagged as v1.0' v1.0
409
434
410 $ hg convert -d svn a
435 $ hg convert -d svn a
411 assuming destination a-hg
436 assuming destination a-hg
412 initializing svn repository 'a-hg'
437 initializing svn repository 'a-hg'
413 initializing svn working copy 'a-hg-wc'
438 initializing svn working copy 'a-hg-wc'
414 scanning source...
439 scanning source...
415 sorting...
440 sorting...
416 converting...
441 converting...
417 1 Add file a
442 1 Add file a
418 0 Tagged as v1.0
443 0 Tagged as v1.0
419 writing Subversion tags is not yet implemented
444 writing Subversion tags is not yet implemented
420 $ svnupanddisplay a-hg-wc 2
445 $ svnupanddisplay a-hg-wc 2
421 2 1 test a
446 2 1 test a
422 2 2 test .
447 2 2 test .
423 2 2 test .hgtags
448 2 2 test .hgtags
424 revision: 2
449 revision: 2
425 author: test
450 author: test
426 msg: Tagged as v1.0
451 msg: Tagged as v1.0
427 A /.hgtags
452 A /.hgtags
428 revision: 1
453 revision: 1
429 author: test
454 author: test
430 msg: Add file a
455 msg: Add file a
431 A /a
456 A /a
432 $ rm -rf a a-hg a-hg-wc
457 $ rm -rf a a-hg a-hg-wc
@@ -1,220 +1,241 b''
1 #require svn svn-bindings
1 #require svn svn-bindings
2
2
3 $ cat >> $HGRCPATH <<EOF
3 $ cat >> $HGRCPATH <<EOF
4 > [extensions]
4 > [extensions]
5 > convert =
5 > convert =
6 > [convert]
6 > [convert]
7 > svn.trunk = mytrunk
7 > svn.trunk = mytrunk
8 > EOF
8 > EOF
9
9
10 $ svnadmin create svn-repo
10 $ svnadmin create svn-repo
11 $ SVNREPOPATH=`pwd`/svn-repo
11 $ SVNREPOPATH=`pwd`/svn-repo
12 #if windows
12 #if windows
13 $ SVNREPOURL=file:///`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
13 $ SVNREPOURL=file:///`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
14 #else
14 #else
15 $ SVNREPOURL=file://`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
15 $ SVNREPOURL=file://`python -c "import urllib, sys; sys.stdout.write(urllib.quote(sys.argv[1]))" "$SVNREPOPATH"`
16 #endif
16 #endif
17 $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1
17 $ INVALIDREVISIONID=svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1
18 $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
18 $ VALIDREVISIONID=svn:a2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk/mytrunk@1
19
19
20 Now test that it works with trunk/tags layout, but no branches yet.
20 Now test that it works with trunk/tags layout, but no branches yet.
21
21
22 Initial svn import
22 Initial svn import
23
23
24 $ mkdir projB
24 $ mkdir projB
25 $ cd projB
25 $ cd projB
26 $ mkdir mytrunk
26 $ mkdir mytrunk
27 $ mkdir tags
27 $ mkdir tags
28 $ cd ..
28 $ cd ..
29
29
30 $ svn import -m "init projB" projB "$SVNREPOURL/proj%20B" | sort
30 $ svn import -m "init projB" projB "$SVNREPOURL/proj%20B" | sort
31
31
32 Adding projB/mytrunk (glob)
32 Adding projB/mytrunk (glob)
33 Adding projB/tags (glob)
33 Adding projB/tags (glob)
34 Committed revision 1.
34 Committed revision 1.
35
35
36 Update svn repository
36 Update svn repository
37
37
38 $ svn co "$SVNREPOURL/proj%20B/mytrunk" B
38 $ svn co "$SVNREPOURL/proj%20B/mytrunk" B
39 Checked out revision 1.
39 Checked out revision 1.
40 $ cd B
40 $ cd B
41 $ echo hello > 'letter .txt'
41 $ echo hello > 'letter .txt'
42 $ svn add 'letter .txt'
42 $ svn add 'letter .txt'
43 A letter .txt
43 A letter .txt
44 $ svn ci -m hello
44 $ svn ci -m hello
45 Adding letter .txt
45 Adding letter .txt
46 Transmitting file data .
46 Transmitting file data .
47 Committed revision 2.
47 Committed revision 2.
48
48
49 $ "$TESTDIR/svn-safe-append.py" world 'letter .txt'
49 $ "$TESTDIR/svn-safe-append.py" world 'letter .txt'
50 $ svn ci -m world
50 $ svn ci -m world
51 Sending letter .txt
51 Sending letter .txt
52 Transmitting file data .
52 Transmitting file data .
53 Committed revision 3.
53 Committed revision 3.
54
54
55 $ svn copy -m "tag v0.1" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.1"
55 $ svn copy -m "tag v0.1" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.1"
56
56
57 Committed revision 4.
57 Committed revision 4.
58
58
59 $ "$TESTDIR/svn-safe-append.py" 'nice day today!' 'letter .txt'
59 $ "$TESTDIR/svn-safe-append.py" 'nice day today!' 'letter .txt'
60 $ svn ci -m "nice day"
60 $ svn ci -m "nice day"
61 Sending letter .txt
61 Sending letter .txt
62 Transmitting file data .
62 Transmitting file data .
63 Committed revision 5.
63 Committed revision 5.
64 $ cd ..
64 $ cd ..
65
65
66 Convert to hg once and also test localtimezone option
66 Convert to hg once and also test localtimezone option
67
67
68 NOTE: This doesn't check all time zones -- it merely determines that
68 NOTE: This doesn't check all time zones -- it merely determines that
69 the configuration option is taking effect.
69 the configuration option is taking effect.
70
70
71 An arbitrary (U.S.) time zone is used here. TZ=US/Hawaii is selected
71 An arbitrary (U.S.) time zone is used here. TZ=US/Hawaii is selected
72 since it does not use DST (unlike other U.S. time zones) and is always
72 since it does not use DST (unlike other U.S. time zones) and is always
73 a fixed difference from UTC.
73 a fixed difference from UTC.
74
74
75 $ TZ=US/Hawaii hg convert --config convert.localtimezone=True "$SVNREPOURL/proj%20B" B-hg
75 $ TZ=US/Hawaii hg convert --config convert.localtimezone=True "$SVNREPOURL/proj%20B" B-hg
76 initializing destination B-hg repository
76 initializing destination B-hg repository
77 scanning source...
77 scanning source...
78 sorting...
78 sorting...
79 converting...
79 converting...
80 3 init projB
80 3 init projB
81 2 hello
81 2 hello
82 1 world
82 1 world
83 0 nice day
83 0 nice day
84 updating tags
84 updating tags
85
85
86 Update svn repository again
86 Update svn repository again
87
87
88 $ cd B
88 $ cd B
89 $ "$TESTDIR/svn-safe-append.py" "see second letter" 'letter .txt'
89 $ "$TESTDIR/svn-safe-append.py" "see second letter" 'letter .txt'
90 $ echo "nice to meet you" > letter2.txt
90 $ echo "nice to meet you" > letter2.txt
91 $ svn add letter2.txt
91 $ svn add letter2.txt
92 A letter2.txt
92 A letter2.txt
93 $ svn ci -m "second letter"
93 $ svn ci -m "second letter"
94 Sending letter .txt
94 Sending letter .txt
95 Adding letter2.txt
95 Adding letter2.txt
96 Transmitting file data ..
96 Transmitting file data ..
97 Committed revision 6.
97 Committed revision 6.
98
98
99 $ svn copy -m "tag v0.2" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.2"
99 $ svn copy -m "tag v0.2" "$SVNREPOURL/proj%20B/mytrunk" "$SVNREPOURL/proj%20B/tags/v0.2"
100
100
101 Committed revision 7.
101 Committed revision 7.
102
102
103 $ "$TESTDIR/svn-safe-append.py" "blah-blah-blah" letter2.txt
103 $ "$TESTDIR/svn-safe-append.py" "blah-blah-blah" letter2.txt
104 $ svn ci -m "work in progress"
104 $ svn ci -m "work in progress"
105 Sending letter2.txt
105 Sending letter2.txt
106 Transmitting file data .
106 Transmitting file data .
107 Committed revision 8.
107 Committed revision 8.
108 $ cd ..
108 $ cd ..
109
109
110 $ hg convert -s svn "$SVNREPOURL/proj%20B/non-existent-path" dest
110 $ hg convert -s svn "$SVNREPOURL/proj%20B/non-existent-path" dest
111 initializing destination dest repository
111 initializing destination dest repository
112 abort: no revision found in module /proj B/non-existent-path
112 abort: no revision found in module /proj B/non-existent-path
113 [255]
113 [255]
114
114
115 ########################################
115 ########################################
116
116
117 Test incremental conversion
117 Test incremental conversion
118
118
119 $ TZ=US/Hawaii hg convert --config convert.localtimezone=True "$SVNREPOURL/proj%20B" B-hg
119 $ TZ=US/Hawaii hg convert --config convert.localtimezone=True "$SVNREPOURL/proj%20B" B-hg
120 scanning source...
120 scanning source...
121 sorting...
121 sorting...
122 converting...
122 converting...
123 1 second letter
123 1 second letter
124 0 work in progress
124 0 work in progress
125 updating tags
125 updating tags
126
126
127 $ cd B-hg
127 $ cd B-hg
128 $ hg log -G --template '{rev} {desc|firstline} date: {date|date} files: {files}\n'
128 $ hg log -G --template '{rev} {desc|firstline} date: {date|date} files: {files}\n'
129 o 7 update tags date: * +0000 files: .hgtags (glob)
129 o 7 update tags date: * +0000 files: .hgtags (glob)
130 |
130 |
131 o 6 work in progress date: * -1000 files: letter2.txt (glob)
131 o 6 work in progress date: * -1000 files: letter2.txt (glob)
132 |
132 |
133 o 5 second letter date: * -1000 files: letter .txt letter2.txt (glob)
133 o 5 second letter date: * -1000 files: letter .txt letter2.txt (glob)
134 |
134 |
135 o 4 update tags date: * +0000 files: .hgtags (glob)
135 o 4 update tags date: * +0000 files: .hgtags (glob)
136 |
136 |
137 o 3 nice day date: * -1000 files: letter .txt (glob)
137 o 3 nice day date: * -1000 files: letter .txt (glob)
138 |
138 |
139 o 2 world date: * -1000 files: letter .txt (glob)
139 o 2 world date: * -1000 files: letter .txt (glob)
140 |
140 |
141 o 1 hello date: * -1000 files: letter .txt (glob)
141 o 1 hello date: * -1000 files: letter .txt (glob)
142 |
142 |
143 o 0 init projB date: * -1000 files: (glob)
143 o 0 init projB date: * -1000 files: (glob)
144
144
145 $ hg tags -q
145 $ hg tags -q
146 tip
146 tip
147 v0.2
147 v0.2
148 v0.1
148 v0.1
149 $ cd ..
149 $ cd ..
150
150
151 Test filemap
151 Test filemap
152 $ echo 'include letter2.txt' > filemap
152 $ echo 'include letter2.txt' > filemap
153 $ hg convert --filemap filemap "$SVNREPOURL/proj%20B/mytrunk" fmap
153 $ hg convert --filemap filemap "$SVNREPOURL/proj%20B/mytrunk" fmap
154 initializing destination fmap repository
154 initializing destination fmap repository
155 scanning source...
155 scanning source...
156 sorting...
156 sorting...
157 converting...
157 converting...
158 5 init projB
158 5 init projB
159 4 hello
159 4 hello
160 3 world
160 3 world
161 2 nice day
161 2 nice day
162 1 second letter
162 1 second letter
163 0 work in progress
163 0 work in progress
164 $ hg -R fmap branch -q
164 $ hg -R fmap branch -q
165 default
165 default
166 $ hg log -G -R fmap --template '{rev} {desc|firstline} files: {files}\n'
166 $ hg log -G -R fmap --template '{rev} {desc|firstline} files: {files}\n'
167 o 1 work in progress files: letter2.txt
167 o 1 work in progress files: letter2.txt
168 |
168 |
169 o 0 second letter files: letter2.txt
169 o 0 second letter files: letter2.txt
170
170
171 Convert with --full adds and removes files that didn't change
172
173 $ cd B
174 $ echo >> "letter .txt"
175 $ svn ci -m 'nothing'
176 Sending letter .txt
177 Transmitting file data .
178 Committed revision 9.
179 $ cd ..
180
181 $ echo 'rename letter2.txt letter3.txt' > filemap
182 $ hg convert --filemap filemap --full "$SVNREPOURL/proj%20B/mytrunk" fmap
183 scanning source...
184 sorting...
185 converting...
186 0 nothing
187 $ hg -R fmap st --change tip
188 A letter .txt
189 A letter3.txt
190 R letter2.txt
191
171 test invalid splicemap1
192 test invalid splicemap1
172
193
173 $ cat > splicemap <<EOF
194 $ cat > splicemap <<EOF
174 > $INVALIDREVISIONID $VALIDREVISIONID
195 > $INVALIDREVISIONID $VALIDREVISIONID
175 > EOF
196 > EOF
176 $ hg convert --splicemap splicemap "$SVNREPOURL/proj%20B/mytrunk" smap
197 $ hg convert --splicemap splicemap "$SVNREPOURL/proj%20B/mytrunk" smap
177 initializing destination smap repository
198 initializing destination smap repository
178 abort: splicemap entry svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 is not a valid revision identifier
199 abort: splicemap entry svn:x2147622-4a9f-4db4-a8d3-13562ff547b2/proj%20B/mytrunk@1 is not a valid revision identifier
179 [255]
200 [255]
180
201
181 Test stop revision
202 Test stop revision
182 $ hg convert --rev 1 "$SVNREPOURL/proj%20B/mytrunk" stoprev
203 $ hg convert --rev 1 "$SVNREPOURL/proj%20B/mytrunk" stoprev
183 initializing destination stoprev repository
204 initializing destination stoprev repository
184 scanning source...
205 scanning source...
185 sorting...
206 sorting...
186 converting...
207 converting...
187 0 init projB
208 0 init projB
188 $ hg -R stoprev branch -q
209 $ hg -R stoprev branch -q
189 default
210 default
190
211
191 Check convert_revision extra-records.
212 Check convert_revision extra-records.
192 This is also the only place testing more than one extra field in a revision.
213 This is also the only place testing more than one extra field in a revision.
193
214
194 $ cd stoprev
215 $ cd stoprev
195 $ hg tip --debug | grep extra
216 $ hg tip --debug | grep extra
196 extra: branch=default
217 extra: branch=default
197 extra: convert_revision=svn:........-....-....-....-............/proj B/mytrunk@1 (re)
218 extra: convert_revision=svn:........-....-....-....-............/proj B/mytrunk@1 (re)
198 $ cd ..
219 $ cd ..
199
220
200 Test converting empty heads (issue3347).
221 Test converting empty heads (issue3347).
201 Also tests getting logs directly without debugsvnlog.
222 Also tests getting logs directly without debugsvnlog.
202
223
203 $ svnadmin create svn-empty
224 $ svnadmin create svn-empty
204 $ svnadmin load -q svn-empty < "$TESTDIR/svn/empty.svndump"
225 $ svnadmin load -q svn-empty < "$TESTDIR/svn/empty.svndump"
205 $ hg --config convert.svn.trunk= --config convert.svn.debugsvnlog=0 convert svn-empty
226 $ hg --config convert.svn.trunk= --config convert.svn.debugsvnlog=0 convert svn-empty
206 assuming destination svn-empty-hg
227 assuming destination svn-empty-hg
207 initializing destination svn-empty-hg repository
228 initializing destination svn-empty-hg repository
208 scanning source...
229 scanning source...
209 sorting...
230 sorting...
210 converting...
231 converting...
211 1 init projA
232 1 init projA
212 0 adddir
233 0 adddir
213 $ hg --config convert.svn.trunk= convert "$SVNREPOURL/../svn-empty/trunk"
234 $ hg --config convert.svn.trunk= convert "$SVNREPOURL/../svn-empty/trunk"
214 assuming destination trunk-hg
235 assuming destination trunk-hg
215 initializing destination trunk-hg repository
236 initializing destination trunk-hg repository
216 scanning source...
237 scanning source...
217 sorting...
238 sorting...
218 converting...
239 converting...
219 1 init projA
240 1 init projA
220 0 adddir
241 0 adddir
@@ -1,458 +1,466 b''
1 $ cat >> $HGRCPATH <<EOF
1 $ cat >> $HGRCPATH <<EOF
2 > [extensions]
2 > [extensions]
3 > convert=
3 > convert=
4 > [convert]
4 > [convert]
5 > hg.saverev=False
5 > hg.saverev=False
6 > EOF
6 > EOF
7 $ hg help convert
7 $ hg help convert
8 hg convert [OPTION]... SOURCE [DEST [REVMAP]]
8 hg convert [OPTION]... SOURCE [DEST [REVMAP]]
9
9
10 convert a foreign SCM repository to a Mercurial one.
10 convert a foreign SCM repository to a Mercurial one.
11
11
12 Accepted source formats [identifiers]:
12 Accepted source formats [identifiers]:
13
13
14 - Mercurial [hg]
14 - Mercurial [hg]
15 - CVS [cvs]
15 - CVS [cvs]
16 - Darcs [darcs]
16 - Darcs [darcs]
17 - git [git]
17 - git [git]
18 - Subversion [svn]
18 - Subversion [svn]
19 - Monotone [mtn]
19 - Monotone [mtn]
20 - GNU Arch [gnuarch]
20 - GNU Arch [gnuarch]
21 - Bazaar [bzr]
21 - Bazaar [bzr]
22 - Perforce [p4]
22 - Perforce [p4]
23
23
24 Accepted destination formats [identifiers]:
24 Accepted destination formats [identifiers]:
25
25
26 - Mercurial [hg]
26 - Mercurial [hg]
27 - Subversion [svn] (history on branches is not preserved)
27 - Subversion [svn] (history on branches is not preserved)
28
28
29 If no revision is given, all revisions will be converted. Otherwise,
29 If no revision is given, all revisions will be converted. Otherwise,
30 convert will only import up to the named revision (given in a format
30 convert will only import up to the named revision (given in a format
31 understood by the source).
31 understood by the source).
32
32
33 If no destination directory name is specified, it defaults to the basename
33 If no destination directory name is specified, it defaults to the basename
34 of the source with "-hg" appended. If the destination repository doesn't
34 of the source with "-hg" appended. If the destination repository doesn't
35 exist, it will be created.
35 exist, it will be created.
36
36
37 By default, all sources except Mercurial will use --branchsort. Mercurial
37 By default, all sources except Mercurial will use --branchsort. Mercurial
38 uses --sourcesort to preserve original revision numbers order. Sort modes
38 uses --sourcesort to preserve original revision numbers order. Sort modes
39 have the following effects:
39 have the following effects:
40
40
41 --branchsort convert from parent to child revision when possible, which
41 --branchsort convert from parent to child revision when possible, which
42 means branches are usually converted one after the other.
42 means branches are usually converted one after the other.
43 It generates more compact repositories.
43 It generates more compact repositories.
44 --datesort sort revisions by date. Converted repositories have good-
44 --datesort sort revisions by date. Converted repositories have good-
45 looking changelogs but are often an order of magnitude
45 looking changelogs but are often an order of magnitude
46 larger than the same ones generated by --branchsort.
46 larger than the same ones generated by --branchsort.
47 --sourcesort try to preserve source revisions order, only supported by
47 --sourcesort try to preserve source revisions order, only supported by
48 Mercurial sources.
48 Mercurial sources.
49 --closesort try to move closed revisions as close as possible to parent
49 --closesort try to move closed revisions as close as possible to parent
50 branches, only supported by Mercurial sources.
50 branches, only supported by Mercurial sources.
51
51
52 If "REVMAP" isn't given, it will be put in a default location
52 If "REVMAP" isn't given, it will be put in a default location
53 ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that
53 ("<dest>/.hg/shamap" by default). The "REVMAP" is a simple text file that
54 maps each source commit ID to the destination ID for that revision, like
54 maps each source commit ID to the destination ID for that revision, like
55 so:
55 so:
56
56
57 <source ID> <destination ID>
57 <source ID> <destination ID>
58
58
59 If the file doesn't exist, it's automatically created. It's updated on
59 If the file doesn't exist, it's automatically created. It's updated on
60 each commit copied, so "hg convert" can be interrupted and can be run
60 each commit copied, so "hg convert" can be interrupted and can be run
61 repeatedly to copy new commits.
61 repeatedly to copy new commits.
62
62
63 The authormap is a simple text file that maps each source commit author to
63 The authormap is a simple text file that maps each source commit author to
64 a destination commit author. It is handy for source SCMs that use unix
64 a destination commit author. It is handy for source SCMs that use unix
65 logins to identify authors (e.g.: CVS). One line per author mapping and
65 logins to identify authors (e.g.: CVS). One line per author mapping and
66 the line format is:
66 the line format is:
67
67
68 source author = destination author
68 source author = destination author
69
69
70 Empty lines and lines starting with a "#" are ignored.
70 Empty lines and lines starting with a "#" are ignored.
71
71
72 The filemap is a file that allows filtering and remapping of files and
72 The filemap is a file that allows filtering and remapping of files and
73 directories. Each line can contain one of the following directives:
73 directories. Each line can contain one of the following directives:
74
74
75 include path/to/file-or-dir
75 include path/to/file-or-dir
76
76
77 exclude path/to/file-or-dir
77 exclude path/to/file-or-dir
78
78
79 rename path/to/source path/to/destination
79 rename path/to/source path/to/destination
80
80
81 Comment lines start with "#". A specified path matches if it equals the
81 Comment lines start with "#". A specified path matches if it equals the
82 full relative name of a file or one of its parent directories. The
82 full relative name of a file or one of its parent directories. The
83 "include" or "exclude" directive with the longest matching path applies,
83 "include" or "exclude" directive with the longest matching path applies,
84 so line order does not matter.
84 so line order does not matter.
85
85
86 The "include" directive causes a file, or all files under a directory, to
86 The "include" directive causes a file, or all files under a directory, to
87 be included in the destination repository. The default if there are no
87 be included in the destination repository. The default if there are no
88 "include" statements is to include everything. If there are any "include"
88 "include" statements is to include everything. If there are any "include"
89 statements, nothing else is included. The "exclude" directive causes files
89 statements, nothing else is included. The "exclude" directive causes files
90 or directories to be omitted. The "rename" directive renames a file or
90 or directories to be omitted. The "rename" directive renames a file or
91 directory if it is converted. To rename from a subdirectory into the root
91 directory if it is converted. To rename from a subdirectory into the root
92 of the repository, use "." as the path to rename to.
92 of the repository, use "." as the path to rename to.
93
93
94 "--full" will make sure the converted changesets contain exactly the right
95 files with the right content. It will make a full conversion of all files,
96 not just the ones that have changed. Files that already are correct will
97 not be changed. This can be used to apply filemap changes when converting
98 incrementally. This is currently only supported for Mercurial and
99 Subversion.
100
94 The splicemap is a file that allows insertion of synthetic history,
101 The splicemap is a file that allows insertion of synthetic history,
95 letting you specify the parents of a revision. This is useful if you want
102 letting you specify the parents of a revision. This is useful if you want
96 to e.g. give a Subversion merge two parents, or graft two disconnected
103 to e.g. give a Subversion merge two parents, or graft two disconnected
97 series of history together. Each entry contains a key, followed by a
104 series of history together. Each entry contains a key, followed by a
98 space, followed by one or two comma-separated values:
105 space, followed by one or two comma-separated values:
99
106
100 key parent1, parent2
107 key parent1, parent2
101
108
102 The key is the revision ID in the source revision control system whose
109 The key is the revision ID in the source revision control system whose
103 parents should be modified (same format as a key in .hg/shamap). The
110 parents should be modified (same format as a key in .hg/shamap). The
104 values are the revision IDs (in either the source or destination revision
111 values are the revision IDs (in either the source or destination revision
105 control system) that should be used as the new parents for that node. For
112 control system) that should be used as the new parents for that node. For
106 example, if you have merged "release-1.0" into "trunk", then you should
113 example, if you have merged "release-1.0" into "trunk", then you should
107 specify the revision on "trunk" as the first parent and the one on the
114 specify the revision on "trunk" as the first parent and the one on the
108 "release-1.0" branch as the second.
115 "release-1.0" branch as the second.
109
116
110 The branchmap is a file that allows you to rename a branch when it is
117 The branchmap is a file that allows you to rename a branch when it is
111 being brought in from whatever external repository. When used in
118 being brought in from whatever external repository. When used in
112 conjunction with a splicemap, it allows for a powerful combination to help
119 conjunction with a splicemap, it allows for a powerful combination to help
113 fix even the most badly mismanaged repositories and turn them into nicely
120 fix even the most badly mismanaged repositories and turn them into nicely
114 structured Mercurial repositories. The branchmap contains lines of the
121 structured Mercurial repositories. The branchmap contains lines of the
115 form:
122 form:
116
123
117 original_branch_name new_branch_name
124 original_branch_name new_branch_name
118
125
119 where "original_branch_name" is the name of the branch in the source
126 where "original_branch_name" is the name of the branch in the source
120 repository, and "new_branch_name" is the name of the branch is the
127 repository, and "new_branch_name" is the name of the branch is the
121 destination repository. No whitespace is allowed in the branch names. This
128 destination repository. No whitespace is allowed in the branch names. This
122 can be used to (for instance) move code in one repository from "default"
129 can be used to (for instance) move code in one repository from "default"
123 to a named branch.
130 to a named branch.
124
131
125 Mercurial Source
132 Mercurial Source
126 ################
133 ################
127
134
128 The Mercurial source recognizes the following configuration options, which
135 The Mercurial source recognizes the following configuration options, which
129 you can set on the command line with "--config":
136 you can set on the command line with "--config":
130
137
131 convert.hg.ignoreerrors
138 convert.hg.ignoreerrors
132 ignore integrity errors when reading. Use it to fix
139 ignore integrity errors when reading. Use it to fix
133 Mercurial repositories with missing revlogs, by converting
140 Mercurial repositories with missing revlogs, by converting
134 from and to Mercurial. Default is False.
141 from and to Mercurial. Default is False.
135 convert.hg.saverev
142 convert.hg.saverev
136 store original revision ID in changeset (forces target IDs
143 store original revision ID in changeset (forces target IDs
137 to change). It takes a boolean argument and defaults to
144 to change). It takes a boolean argument and defaults to
138 False.
145 False.
139 convert.hg.revs
146 convert.hg.revs
140 revset specifying the source revisions to convert.
147 revset specifying the source revisions to convert.
141
148
142 CVS Source
149 CVS Source
143 ##########
150 ##########
144
151
145 CVS source will use a sandbox (i.e. a checked-out copy) from CVS to
152 CVS source will use a sandbox (i.e. a checked-out copy) from CVS to
146 indicate the starting point of what will be converted. Direct access to
153 indicate the starting point of what will be converted. Direct access to
147 the repository files is not needed, unless of course the repository is
154 the repository files is not needed, unless of course the repository is
148 ":local:". The conversion uses the top level directory in the sandbox to
155 ":local:". The conversion uses the top level directory in the sandbox to
149 find the CVS repository, and then uses CVS rlog commands to find files to
156 find the CVS repository, and then uses CVS rlog commands to find files to
150 convert. This means that unless a filemap is given, all files under the
157 convert. This means that unless a filemap is given, all files under the
151 starting directory will be converted, and that any directory
158 starting directory will be converted, and that any directory
152 reorganization in the CVS sandbox is ignored.
159 reorganization in the CVS sandbox is ignored.
153
160
154 The following options can be used with "--config":
161 The following options can be used with "--config":
155
162
156 convert.cvsps.cache
163 convert.cvsps.cache
157 Set to False to disable remote log caching, for testing and
164 Set to False to disable remote log caching, for testing and
158 debugging purposes. Default is True.
165 debugging purposes. Default is True.
159 convert.cvsps.fuzz
166 convert.cvsps.fuzz
160 Specify the maximum time (in seconds) that is allowed
167 Specify the maximum time (in seconds) that is allowed
161 between commits with identical user and log message in a
168 between commits with identical user and log message in a
162 single changeset. When very large files were checked in as
169 single changeset. When very large files were checked in as
163 part of a changeset then the default may not be long enough.
170 part of a changeset then the default may not be long enough.
164 The default is 60.
171 The default is 60.
165 convert.cvsps.mergeto
172 convert.cvsps.mergeto
166 Specify a regular expression to which commit log messages
173 Specify a regular expression to which commit log messages
167 are matched. If a match occurs, then the conversion process
174 are matched. If a match occurs, then the conversion process
168 will insert a dummy revision merging the branch on which
175 will insert a dummy revision merging the branch on which
169 this log message occurs to the branch indicated in the
176 this log message occurs to the branch indicated in the
170 regex. Default is "{{mergetobranch ([-\w]+)}}"
177 regex. Default is "{{mergetobranch ([-\w]+)}}"
171 convert.cvsps.mergefrom
178 convert.cvsps.mergefrom
172 Specify a regular expression to which commit log messages
179 Specify a regular expression to which commit log messages
173 are matched. If a match occurs, then the conversion process
180 are matched. If a match occurs, then the conversion process
174 will add the most recent revision on the branch indicated in
181 will add the most recent revision on the branch indicated in
175 the regex as the second parent of the changeset. Default is
182 the regex as the second parent of the changeset. Default is
176 "{{mergefrombranch ([-\w]+)}}"
183 "{{mergefrombranch ([-\w]+)}}"
177 convert.localtimezone
184 convert.localtimezone
178 use local time (as determined by the TZ environment
185 use local time (as determined by the TZ environment
179 variable) for changeset date/times. The default is False
186 variable) for changeset date/times. The default is False
180 (use UTC).
187 (use UTC).
181 hooks.cvslog Specify a Python function to be called at the end of
188 hooks.cvslog Specify a Python function to be called at the end of
182 gathering the CVS log. The function is passed a list with
189 gathering the CVS log. The function is passed a list with
183 the log entries, and can modify the entries in-place, or add
190 the log entries, and can modify the entries in-place, or add
184 or delete them.
191 or delete them.
185 hooks.cvschangesets
192 hooks.cvschangesets
186 Specify a Python function to be called after the changesets
193 Specify a Python function to be called after the changesets
187 are calculated from the CVS log. The function is passed a
194 are calculated from the CVS log. The function is passed a
188 list with the changeset entries, and can modify the
195 list with the changeset entries, and can modify the
189 changesets in-place, or add or delete them.
196 changesets in-place, or add or delete them.
190
197
191 An additional "debugcvsps" Mercurial command allows the builtin changeset
198 An additional "debugcvsps" Mercurial command allows the builtin changeset
192 merging code to be run without doing a conversion. Its parameters and
199 merging code to be run without doing a conversion. Its parameters and
193 output are similar to that of cvsps 2.1. Please see the command help for
200 output are similar to that of cvsps 2.1. Please see the command help for
194 more details.
201 more details.
195
202
196 Subversion Source
203 Subversion Source
197 #################
204 #################
198
205
199 Subversion source detects classical trunk/branches/tags layouts. By
206 Subversion source detects classical trunk/branches/tags layouts. By
200 default, the supplied "svn://repo/path/" source URL is converted as a
207 default, the supplied "svn://repo/path/" source URL is converted as a
201 single branch. If "svn://repo/path/trunk" exists it replaces the default
208 single branch. If "svn://repo/path/trunk" exists it replaces the default
202 branch. If "svn://repo/path/branches" exists, its subdirectories are
209 branch. If "svn://repo/path/branches" exists, its subdirectories are
203 listed as possible branches. If "svn://repo/path/tags" exists, it is
210 listed as possible branches. If "svn://repo/path/tags" exists, it is
204 looked for tags referencing converted branches. Default "trunk",
211 looked for tags referencing converted branches. Default "trunk",
205 "branches" and "tags" values can be overridden with following options. Set
212 "branches" and "tags" values can be overridden with following options. Set
206 them to paths relative to the source URL, or leave them blank to disable
213 them to paths relative to the source URL, or leave them blank to disable
207 auto detection.
214 auto detection.
208
215
209 The following options can be set with "--config":
216 The following options can be set with "--config":
210
217
211 convert.svn.branches
218 convert.svn.branches
212 specify the directory containing branches. The default is
219 specify the directory containing branches. The default is
213 "branches".
220 "branches".
214 convert.svn.tags
221 convert.svn.tags
215 specify the directory containing tags. The default is
222 specify the directory containing tags. The default is
216 "tags".
223 "tags".
217 convert.svn.trunk
224 convert.svn.trunk
218 specify the name of the trunk branch. The default is
225 specify the name of the trunk branch. The default is
219 "trunk".
226 "trunk".
220 convert.localtimezone
227 convert.localtimezone
221 use local time (as determined by the TZ environment
228 use local time (as determined by the TZ environment
222 variable) for changeset date/times. The default is False
229 variable) for changeset date/times. The default is False
223 (use UTC).
230 (use UTC).
224
231
225 Source history can be retrieved starting at a specific revision, instead
232 Source history can be retrieved starting at a specific revision, instead
226 of being integrally converted. Only single branch conversions are
233 of being integrally converted. Only single branch conversions are
227 supported.
234 supported.
228
235
229 convert.svn.startrev
236 convert.svn.startrev
230 specify start Subversion revision number. The default is 0.
237 specify start Subversion revision number. The default is 0.
231
238
232 Perforce Source
239 Perforce Source
233 ###############
240 ###############
234
241
235 The Perforce (P4) importer can be given a p4 depot path or a client
242 The Perforce (P4) importer can be given a p4 depot path or a client
236 specification as source. It will convert all files in the source to a flat
243 specification as source. It will convert all files in the source to a flat
237 Mercurial repository, ignoring labels, branches and integrations. Note
244 Mercurial repository, ignoring labels, branches and integrations. Note
238 that when a depot path is given you then usually should specify a target
245 that when a depot path is given you then usually should specify a target
239 directory, because otherwise the target may be named "...-hg".
246 directory, because otherwise the target may be named "...-hg".
240
247
241 It is possible to limit the amount of source history to be converted by
248 It is possible to limit the amount of source history to be converted by
242 specifying an initial Perforce revision:
249 specifying an initial Perforce revision:
243
250
244 convert.p4.startrev
251 convert.p4.startrev
245 specify initial Perforce revision (a Perforce changelist
252 specify initial Perforce revision (a Perforce changelist
246 number).
253 number).
247
254
248 Mercurial Destination
255 Mercurial Destination
249 #####################
256 #####################
250
257
251 The following options are supported:
258 The following options are supported:
252
259
253 convert.hg.clonebranches
260 convert.hg.clonebranches
254 dispatch source branches in separate clones. The default is
261 dispatch source branches in separate clones. The default is
255 False.
262 False.
256 convert.hg.tagsbranch
263 convert.hg.tagsbranch
257 branch name for tag revisions, defaults to "default".
264 branch name for tag revisions, defaults to "default".
258 convert.hg.usebranchnames
265 convert.hg.usebranchnames
259 preserve branch names. The default is True.
266 preserve branch names. The default is True.
260
267
261 options:
268 options:
262
269
263 -s --source-type TYPE source repository type
270 -s --source-type TYPE source repository type
264 -d --dest-type TYPE destination repository type
271 -d --dest-type TYPE destination repository type
265 -r --rev REV import up to source revision REV
272 -r --rev REV import up to source revision REV
266 -A --authormap FILE remap usernames using this file
273 -A --authormap FILE remap usernames using this file
267 --filemap FILE remap file names using contents of file
274 --filemap FILE remap file names using contents of file
275 --full apply filemap changes by converting all files again
268 --splicemap FILE splice synthesized history into place
276 --splicemap FILE splice synthesized history into place
269 --branchmap FILE change branch names while converting
277 --branchmap FILE change branch names while converting
270 --branchsort try to sort changesets by branches
278 --branchsort try to sort changesets by branches
271 --datesort try to sort changesets by date
279 --datesort try to sort changesets by date
272 --sourcesort preserve source changesets order
280 --sourcesort preserve source changesets order
273 --closesort try to reorder closed revisions
281 --closesort try to reorder closed revisions
274
282
275 (some details hidden, use --verbose to show complete help)
283 (some details hidden, use --verbose to show complete help)
276 $ hg init a
284 $ hg init a
277 $ cd a
285 $ cd a
278 $ echo a > a
286 $ echo a > a
279 $ hg ci -d'0 0' -Ama
287 $ hg ci -d'0 0' -Ama
280 adding a
288 adding a
281 $ hg cp a b
289 $ hg cp a b
282 $ hg ci -d'1 0' -mb
290 $ hg ci -d'1 0' -mb
283 $ hg rm a
291 $ hg rm a
284 $ hg ci -d'2 0' -mc
292 $ hg ci -d'2 0' -mc
285 $ hg mv b a
293 $ hg mv b a
286 $ hg ci -d'3 0' -md
294 $ hg ci -d'3 0' -md
287 $ echo a >> a
295 $ echo a >> a
288 $ hg ci -d'4 0' -me
296 $ hg ci -d'4 0' -me
289 $ cd ..
297 $ cd ..
290 $ hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
298 $ hg convert a 2>&1 | grep -v 'subversion python bindings could not be loaded'
291 assuming destination a-hg
299 assuming destination a-hg
292 initializing destination a-hg repository
300 initializing destination a-hg repository
293 scanning source...
301 scanning source...
294 sorting...
302 sorting...
295 converting...
303 converting...
296 4 a
304 4 a
297 3 b
305 3 b
298 2 c
306 2 c
299 1 d
307 1 d
300 0 e
308 0 e
301 $ hg --cwd a-hg pull ../a
309 $ hg --cwd a-hg pull ../a
302 pulling from ../a
310 pulling from ../a
303 searching for changes
311 searching for changes
304 no changes found
312 no changes found
305
313
306 conversion to existing file should fail
314 conversion to existing file should fail
307
315
308 $ touch bogusfile
316 $ touch bogusfile
309 $ hg convert a bogusfile
317 $ hg convert a bogusfile
310 initializing destination bogusfile repository
318 initializing destination bogusfile repository
311 abort: cannot create new bundle repository
319 abort: cannot create new bundle repository
312 [255]
320 [255]
313
321
314 #if unix-permissions no-root
322 #if unix-permissions no-root
315
323
316 conversion to dir without permissions should fail
324 conversion to dir without permissions should fail
317
325
318 $ mkdir bogusdir
326 $ mkdir bogusdir
319 $ chmod 000 bogusdir
327 $ chmod 000 bogusdir
320
328
321 $ hg convert a bogusdir
329 $ hg convert a bogusdir
322 abort: Permission denied: 'bogusdir'
330 abort: Permission denied: 'bogusdir'
323 [255]
331 [255]
324
332
325 user permissions should succeed
333 user permissions should succeed
326
334
327 $ chmod 700 bogusdir
335 $ chmod 700 bogusdir
328 $ hg convert a bogusdir
336 $ hg convert a bogusdir
329 initializing destination bogusdir repository
337 initializing destination bogusdir repository
330 scanning source...
338 scanning source...
331 sorting...
339 sorting...
332 converting...
340 converting...
333 4 a
341 4 a
334 3 b
342 3 b
335 2 c
343 2 c
336 1 d
344 1 d
337 0 e
345 0 e
338
346
339 #endif
347 #endif
340
348
341 test pre and post conversion actions
349 test pre and post conversion actions
342
350
343 $ echo 'include b' > filemap
351 $ echo 'include b' > filemap
344 $ hg convert --debug --filemap filemap a partialb | \
352 $ hg convert --debug --filemap filemap a partialb | \
345 > grep 'run hg'
353 > grep 'run hg'
346 run hg source pre-conversion action
354 run hg source pre-conversion action
347 run hg sink pre-conversion action
355 run hg sink pre-conversion action
348 run hg sink post-conversion action
356 run hg sink post-conversion action
349 run hg source post-conversion action
357 run hg source post-conversion action
350
358
351 converting empty dir should fail "nicely
359 converting empty dir should fail "nicely
352
360
353 $ mkdir emptydir
361 $ mkdir emptydir
354
362
355 override $PATH to ensure p4 not visible; use $PYTHON in case we're
363 override $PATH to ensure p4 not visible; use $PYTHON in case we're
356 running from a devel copy, not a temp installation
364 running from a devel copy, not a temp installation
357
365
358 $ PATH="$BINDIR" $PYTHON "$BINDIR"/hg convert emptydir
366 $ PATH="$BINDIR" $PYTHON "$BINDIR"/hg convert emptydir
359 assuming destination emptydir-hg
367 assuming destination emptydir-hg
360 initializing destination emptydir-hg repository
368 initializing destination emptydir-hg repository
361 emptydir does not look like a CVS checkout
369 emptydir does not look like a CVS checkout
362 emptydir does not look like a Git repository
370 emptydir does not look like a Git repository
363 emptydir does not look like a Subversion repository
371 emptydir does not look like a Subversion repository
364 emptydir is not a local Mercurial repository
372 emptydir is not a local Mercurial repository
365 emptydir does not look like a darcs repository
373 emptydir does not look like a darcs repository
366 emptydir does not look like a monotone repository
374 emptydir does not look like a monotone repository
367 emptydir does not look like a GNU Arch repository
375 emptydir does not look like a GNU Arch repository
368 emptydir does not look like a Bazaar repository
376 emptydir does not look like a Bazaar repository
369 cannot find required "p4" tool
377 cannot find required "p4" tool
370 abort: emptydir: missing or unsupported repository
378 abort: emptydir: missing or unsupported repository
371 [255]
379 [255]
372
380
373 convert with imaginary source type
381 convert with imaginary source type
374
382
375 $ hg convert --source-type foo a a-foo
383 $ hg convert --source-type foo a a-foo
376 initializing destination a-foo repository
384 initializing destination a-foo repository
377 abort: foo: invalid source repository type
385 abort: foo: invalid source repository type
378 [255]
386 [255]
379
387
380 convert with imaginary sink type
388 convert with imaginary sink type
381
389
382 $ hg convert --dest-type foo a a-foo
390 $ hg convert --dest-type foo a a-foo
383 abort: foo: invalid destination repository type
391 abort: foo: invalid destination repository type
384 [255]
392 [255]
385
393
386 testing: convert must not produce duplicate entries in fncache
394 testing: convert must not produce duplicate entries in fncache
387
395
388 $ hg convert a b
396 $ hg convert a b
389 initializing destination b repository
397 initializing destination b repository
390 scanning source...
398 scanning source...
391 sorting...
399 sorting...
392 converting...
400 converting...
393 4 a
401 4 a
394 3 b
402 3 b
395 2 c
403 2 c
396 1 d
404 1 d
397 0 e
405 0 e
398
406
399 contents of fncache file:
407 contents of fncache file:
400
408
401 $ cat b/.hg/store/fncache | sort
409 $ cat b/.hg/store/fncache | sort
402 data/a.i
410 data/a.i
403 data/b.i
411 data/b.i
404
412
405 test bogus URL
413 test bogus URL
406
414
407 $ hg convert -q bzr+ssh://foobar@selenic.com/baz baz
415 $ hg convert -q bzr+ssh://foobar@selenic.com/baz baz
408 abort: bzr+ssh://foobar@selenic.com/baz: missing or unsupported repository
416 abort: bzr+ssh://foobar@selenic.com/baz: missing or unsupported repository
409 [255]
417 [255]
410
418
411 test revset converted() lookup
419 test revset converted() lookup
412
420
413 $ hg --config convert.hg.saverev=True convert a c
421 $ hg --config convert.hg.saverev=True convert a c
414 initializing destination c repository
422 initializing destination c repository
415 scanning source...
423 scanning source...
416 sorting...
424 sorting...
417 converting...
425 converting...
418 4 a
426 4 a
419 3 b
427 3 b
420 2 c
428 2 c
421 1 d
429 1 d
422 0 e
430 0 e
423 $ echo f > c/f
431 $ echo f > c/f
424 $ hg -R c ci -d'0 0' -Amf
432 $ hg -R c ci -d'0 0' -Amf
425 adding f
433 adding f
426 created new head
434 created new head
427 $ hg -R c log -r "converted(09d945a62ce6)"
435 $ hg -R c log -r "converted(09d945a62ce6)"
428 changeset: 1:98c3dd46a874
436 changeset: 1:98c3dd46a874
429 user: test
437 user: test
430 date: Thu Jan 01 00:00:01 1970 +0000
438 date: Thu Jan 01 00:00:01 1970 +0000
431 summary: b
439 summary: b
432
440
433 $ hg -R c log -r "converted()"
441 $ hg -R c log -r "converted()"
434 changeset: 0:31ed57b2037c
442 changeset: 0:31ed57b2037c
435 user: test
443 user: test
436 date: Thu Jan 01 00:00:00 1970 +0000
444 date: Thu Jan 01 00:00:00 1970 +0000
437 summary: a
445 summary: a
438
446
439 changeset: 1:98c3dd46a874
447 changeset: 1:98c3dd46a874
440 user: test
448 user: test
441 date: Thu Jan 01 00:00:01 1970 +0000
449 date: Thu Jan 01 00:00:01 1970 +0000
442 summary: b
450 summary: b
443
451
444 changeset: 2:3b9ca06ef716
452 changeset: 2:3b9ca06ef716
445 user: test
453 user: test
446 date: Thu Jan 01 00:00:02 1970 +0000
454 date: Thu Jan 01 00:00:02 1970 +0000
447 summary: c
455 summary: c
448
456
449 changeset: 3:4e0debd37cf2
457 changeset: 3:4e0debd37cf2
450 user: test
458 user: test
451 date: Thu Jan 01 00:00:03 1970 +0000
459 date: Thu Jan 01 00:00:03 1970 +0000
452 summary: d
460 summary: d
453
461
454 changeset: 4:9de3bc9349c5
462 changeset: 4:9de3bc9349c5
455 user: test
463 user: test
456 date: Thu Jan 01 00:00:04 1970 +0000
464 date: Thu Jan 01 00:00:04 1970 +0000
457 summary: e
465 summary: e
458
466
General Comments 0
You need to be logged in to leave comments. Login now