##// END OF EJS Templates
fastexport: downgrade message about already exported changesets to debug...
Joerg Sonnenberger -
r45329:5d309906 stable
parent child Browse files
Show More
@@ -1,219 +1,219 b''
1 1 # Copyright 2020 Joerg Sonnenberger <joerg@bec.de>
2 2 #
3 3 # This software may be used and distributed according to the terms of the
4 4 # GNU General Public License version 2 or any later version.
5 5 """export repositories as git fast-import stream"""
6 6
7 7 # The format specification for fast-import streams can be found at
8 8 # https://git-scm.com/docs/git-fast-import#_input_format
9 9
10 10 from __future__ import absolute_import
11 11 import re
12 12
13 13 from mercurial.i18n import _
14 14 from mercurial.node import hex, nullrev
15 15 from mercurial.utils import stringutil
16 16 from mercurial import (
17 17 error,
18 18 pycompat,
19 19 registrar,
20 20 scmutil,
21 21 )
22 22 from .convert import convcmd
23 23
24 24 # Note for extension authors: ONLY specify testedwith = 'ships-with-hg-core' for
25 25 # extensions which SHIP WITH MERCURIAL. Non-mainline extensions should
26 26 # be specifying the version(s) of Mercurial they are tested with, or
27 27 # leave the attribute unspecified.
28 28 testedwith = b"ships-with-hg-core"
29 29
30 30 cmdtable = {}
31 31 command = registrar.command(cmdtable)
32 32
33 33 GIT_PERSON_PROHIBITED = re.compile(b'[<>\n"]')
34 34 GIT_EMAIL_PROHIBITED = re.compile(b"[<> \n]")
35 35
36 36
37 37 def convert_to_git_user(authormap, user, rev):
38 38 mapped_user = authormap.get(user, user)
39 39 user_person = stringutil.person(mapped_user)
40 40 user_email = stringutil.email(mapped_user)
41 41 if GIT_EMAIL_PROHIBITED.match(user_email) or GIT_PERSON_PROHIBITED.match(
42 42 user_person
43 43 ):
44 44 raise error.Abort(
45 45 _(b"Unable to parse user into person and email for revision %s")
46 46 % rev
47 47 )
48 48 if user_person:
49 49 return b'"' + user_person + b'" <' + user_email + b'>'
50 50 else:
51 51 return b"<" + user_email + b">"
52 52
53 53
54 54 def convert_to_git_date(date):
55 55 timestamp, utcoff = date
56 56 tzsign = b"+" if utcoff < 0 else b"-"
57 57 if utcoff % 60 != 0:
58 58 raise error.Abort(
59 59 _(b"UTC offset in %b is not an integer number of seconds") % (date,)
60 60 )
61 61 utcoff = abs(utcoff) // 60
62 62 tzh = utcoff // 60
63 63 tzmin = utcoff % 60
64 64 return b"%d " % int(timestamp) + tzsign + b"%02d%02d" % (tzh, tzmin)
65 65
66 66
67 67 def convert_to_git_ref(branch):
68 68 # XXX filter/map depending on git restrictions
69 69 return b"refs/heads/" + branch
70 70
71 71
72 72 def write_data(buf, data, skip_newline):
73 73 buf.append(b"data %d\n" % len(data))
74 74 buf.append(data)
75 75 if not skip_newline or data[-1:] != b"\n":
76 76 buf.append(b"\n")
77 77
78 78
79 79 def export_commit(ui, repo, rev, marks, authormap):
80 80 ctx = repo[rev]
81 81 revid = ctx.hex()
82 82 if revid in marks:
83 ui.warn(_(b"warning: revision %s already exported, skipped\n") % revid)
83 ui.debug(b"warning: revision %s already exported, skipped\n" % revid)
84 84 return
85 85 parents = [p for p in ctx.parents() if p.rev() != nullrev]
86 86 for p in parents:
87 87 if p.hex() not in marks:
88 88 ui.warn(
89 89 _(b"warning: parent %s of %s has not been exported, skipped\n")
90 90 % (p, revid)
91 91 )
92 92 return
93 93
94 94 # For all files modified by the commit, check if they have already
95 95 # been exported and otherwise dump the blob with the new mark.
96 96 for fname in ctx.files():
97 97 if fname not in ctx:
98 98 continue
99 99 filectx = ctx.filectx(fname)
100 100 filerev = hex(filectx.filenode())
101 101 if filerev not in marks:
102 102 mark = len(marks) + 1
103 103 marks[filerev] = mark
104 104 data = filectx.data()
105 105 buf = [b"blob\n", b"mark :%d\n" % mark]
106 106 write_data(buf, data, False)
107 107 ui.write(*buf, keepprogressbar=True)
108 108 del buf
109 109
110 110 # Assign a mark for the current revision for references by
111 111 # latter merge commits.
112 112 mark = len(marks) + 1
113 113 marks[revid] = mark
114 114
115 115 ref = convert_to_git_ref(ctx.branch())
116 116 buf = [
117 117 b"commit %s\n" % ref,
118 118 b"mark :%d\n" % mark,
119 119 b"committer %s %s\n"
120 120 % (
121 121 convert_to_git_user(authormap, ctx.user(), revid),
122 122 convert_to_git_date(ctx.date()),
123 123 ),
124 124 ]
125 125 write_data(buf, ctx.description(), True)
126 126 if parents:
127 127 buf.append(b"from :%d\n" % marks[parents[0].hex()])
128 128 if len(parents) == 2:
129 129 buf.append(b"merge :%d\n" % marks[parents[1].hex()])
130 130 p0ctx = repo[parents[0]]
131 131 files = ctx.manifest().diff(p0ctx.manifest())
132 132 else:
133 133 files = ctx.files()
134 134 filebuf = []
135 135 for fname in files:
136 136 if fname not in ctx:
137 137 filebuf.append((fname, b"D %s\n" % fname))
138 138 else:
139 139 filectx = ctx.filectx(fname)
140 140 filerev = filectx.filenode()
141 141 fileperm = b"755" if filectx.isexec() else b"644"
142 142 changed = b"M %s :%d %s\n" % (fileperm, marks[hex(filerev)], fname)
143 143 filebuf.append((fname, changed))
144 144 filebuf.sort()
145 145 buf.extend(changed for (fname, changed) in filebuf)
146 146 del filebuf
147 147 buf.append(b"\n")
148 148 ui.write(*buf, keepprogressbar=True)
149 149 del buf
150 150
151 151
152 152 isrev = re.compile(b"^[0-9a-f]{40}$")
153 153
154 154
155 155 @command(
156 156 b"fastexport",
157 157 [
158 158 (b"r", b"rev", [], _(b"revisions to export"), _(b"REV")),
159 159 (b"i", b"import-marks", b"", _(b"old marks file to read"), _(b"FILE")),
160 160 (b"e", b"export-marks", b"", _(b"new marks file to write"), _(b"FILE")),
161 161 (
162 162 b"A",
163 163 b"authormap",
164 164 b"",
165 165 _(b"remap usernames using this file"),
166 166 _(b"FILE"),
167 167 ),
168 168 ],
169 169 _(b"[OPTION]... [REV]..."),
170 170 helpcategory=command.CATEGORY_IMPORT_EXPORT,
171 171 )
172 172 def fastexport(ui, repo, *revs, **opts):
173 173 """export repository as git fast-import stream
174 174
175 175 This command lets you dump a repository as a human-readable text stream.
176 176 It can be piped into corresponding import routines like "git fast-import".
177 177 Incremental dumps can be created by using marks files.
178 178 """
179 179 opts = pycompat.byteskwargs(opts)
180 180
181 181 revs += tuple(opts.get(b"rev", []))
182 182 if not revs:
183 183 revs = scmutil.revrange(repo, [b":"])
184 184 else:
185 185 revs = scmutil.revrange(repo, revs)
186 186 if not revs:
187 187 raise error.Abort(_(b"no revisions matched"))
188 188 authorfile = opts.get(b"authormap")
189 189 if authorfile:
190 190 authormap = convcmd.readauthormap(ui, authorfile)
191 191 else:
192 192 authormap = {}
193 193
194 194 import_marks = opts.get(b"import_marks")
195 195 marks = {}
196 196 if import_marks:
197 197 with open(import_marks, "rb") as import_marks_file:
198 198 for line in import_marks_file:
199 199 line = line.strip()
200 200 if not isrev.match(line) or line in marks:
201 201 raise error.Abort(_(b"Corrupted marks file"))
202 202 marks[line] = len(marks) + 1
203 203
204 204 revs.sort()
205 205 with ui.makeprogress(
206 206 _(b"exporting"), unit=_(b"revisions"), total=len(revs)
207 207 ) as progress:
208 208 for rev in revs:
209 209 export_commit(ui, repo, rev, marks, authormap)
210 210 progress.increment()
211 211
212 212 export_marks = opts.get(b"export_marks")
213 213 if export_marks:
214 214 with open(export_marks, "wb") as export_marks_file:
215 215 output_marks = [None] * len(marks)
216 216 for k, v in marks.items():
217 217 output_marks[v - 1] = k
218 218 for k in output_marks:
219 219 export_marks_file.write(k + b"\n")
General Comments 0
You need to be logged in to leave comments. Login now