Show More
@@ -1,571 +1,612 | |||||
1 | # common.py - common code for the convert extension |
|
1 | # common.py - common code for the convert extension | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others |
|
3 | # Copyright 2005-2009 Olivia Mackall <olivia@selenic.com> and others | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import base64 |
|
8 | import base64 | |
9 | import os |
|
9 | import os | |
10 | import pickle |
|
10 | import pickle | |
11 | import re |
|
11 | import re | |
12 | import shlex |
|
12 | import shlex | |
13 | import subprocess |
|
13 | import subprocess | |
|
14 | import typing | |||
|
15 | ||||
|
16 | from typing import ( | |||
|
17 | Any, | |||
|
18 | AnyStr, | |||
|
19 | Optional, | |||
|
20 | ) | |||
14 |
|
21 | |||
15 | from mercurial.i18n import _ |
|
22 | from mercurial.i18n import _ | |
16 | from mercurial.pycompat import open |
|
23 | from mercurial.pycompat import open | |
17 | from mercurial import ( |
|
24 | from mercurial import ( | |
18 | encoding, |
|
25 | encoding, | |
19 | error, |
|
26 | error, | |
20 | phases, |
|
27 | phases, | |
21 | pycompat, |
|
28 | pycompat, | |
22 | util, |
|
29 | util, | |
23 | ) |
|
30 | ) | |
24 | from mercurial.utils import ( |
|
31 | from mercurial.utils import ( | |
25 | dateutil, |
|
32 | dateutil, | |
26 | procutil, |
|
33 | procutil, | |
27 | ) |
|
34 | ) | |
28 |
|
35 | |||
|
36 | if typing.TYPE_CHECKING: | |||
|
37 | from typing import ( | |||
|
38 | overload, | |||
|
39 | ) | |||
|
40 | from mercurial import ( | |||
|
41 | ui as uimod, | |||
|
42 | ) | |||
|
43 | ||||
29 | propertycache = util.propertycache |
|
44 | propertycache = util.propertycache | |
30 |
|
45 | |||
31 |
|
46 | |||
|
47 | if typing.TYPE_CHECKING: | |||
|
48 | ||||
|
49 | @overload | |||
|
50 | def _encodeornone(d: str) -> bytes: | |||
|
51 | pass | |||
|
52 | ||||
|
53 | @overload | |||
|
54 | def _encodeornone(d: None) -> None: | |||
|
55 | pass | |||
|
56 | ||||
|
57 | ||||
32 | def _encodeornone(d): |
|
58 | def _encodeornone(d): | |
33 | if d is None: |
|
59 | if d is None: | |
34 | return |
|
60 | return | |
35 | return d.encode('latin1') |
|
61 | return d.encode('latin1') | |
36 |
|
62 | |||
37 |
|
63 | |||
38 | class _shlexpy3proxy: |
|
64 | class _shlexpy3proxy: | |
39 | def __init__(self, l): |
|
65 | def __init__(self, l: shlex.shlex) -> None: | |
40 | self._l = l |
|
66 | self._l = l | |
41 |
|
67 | |||
42 | def __iter__(self): |
|
68 | def __iter__(self): | |
43 | return (_encodeornone(v) for v in self._l) |
|
69 | return (_encodeornone(v) for v in self._l) | |
44 |
|
70 | |||
45 | def get_token(self): |
|
71 | def get_token(self): | |
46 | return _encodeornone(self._l.get_token()) |
|
72 | return _encodeornone(self._l.get_token()) | |
47 |
|
73 | |||
48 | @property |
|
74 | @property | |
49 | def infile(self): |
|
75 | def infile(self): | |
50 | return self._l.infile or b'<unknown>' |
|
76 | return self._l.infile or b'<unknown>' | |
51 |
|
77 | |||
52 | @property |
|
78 | @property | |
53 | def lineno(self): |
|
79 | def lineno(self) -> int: | |
54 | return self._l.lineno |
|
80 | return self._l.lineno | |
55 |
|
81 | |||
56 |
|
82 | |||
57 | def shlexer(data=None, filepath=None, wordchars=None, whitespace=None): |
|
83 | def shlexer( | |
|
84 | data=None, | |||
|
85 | filepath: Optional[str] = None, | |||
|
86 | wordchars: Optional[bytes] = None, | |||
|
87 | whitespace: Optional[bytes] = None, | |||
|
88 | ): | |||
58 | if data is None: |
|
89 | if data is None: | |
59 | data = open(filepath, b'r', encoding='latin1') |
|
90 | data = open(filepath, b'r', encoding='latin1') | |
60 | else: |
|
91 | else: | |
61 | if filepath is not None: |
|
92 | if filepath is not None: | |
62 | raise error.ProgrammingError( |
|
93 | raise error.ProgrammingError( | |
63 | b'shlexer only accepts data or filepath, not both' |
|
94 | b'shlexer only accepts data or filepath, not both' | |
64 | ) |
|
95 | ) | |
65 | data = data.decode('latin1') |
|
96 | data = data.decode('latin1') | |
66 | l = shlex.shlex(data, infile=filepath, posix=True) |
|
97 | l = shlex.shlex(data, infile=filepath, posix=True) | |
67 | if whitespace is not None: |
|
98 | if whitespace is not None: | |
68 | l.whitespace_split = True |
|
99 | l.whitespace_split = True | |
69 | l.whitespace += whitespace.decode('latin1') |
|
100 | l.whitespace += whitespace.decode('latin1') | |
70 | if wordchars is not None: |
|
101 | if wordchars is not None: | |
71 | l.wordchars += wordchars.decode('latin1') |
|
102 | l.wordchars += wordchars.decode('latin1') | |
72 | return _shlexpy3proxy(l) |
|
103 | return _shlexpy3proxy(l) | |
73 |
|
104 | |||
74 |
|
105 | |||
75 | def encodeargs(args): |
|
106 | def encodeargs(args: Any) -> bytes: | |
76 | def encodearg(s): |
|
107 | def encodearg(s: bytes) -> bytes: | |
77 | lines = base64.encodebytes(s) |
|
108 | lines = base64.encodebytes(s) | |
78 | lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)] |
|
109 | lines = [l.splitlines()[0] for l in pycompat.iterbytestr(lines)] | |
79 | return b''.join(lines) |
|
110 | return b''.join(lines) | |
80 |
|
111 | |||
81 | s = pickle.dumps(args) |
|
112 | s = pickle.dumps(args) | |
82 | return encodearg(s) |
|
113 | return encodearg(s) | |
83 |
|
114 | |||
84 |
|
115 | |||
85 | def decodeargs(s): |
|
116 | def decodeargs(s: bytes) -> Any: | |
86 | s = base64.decodebytes(s) |
|
117 | s = base64.decodebytes(s) | |
87 | return pickle.loads(s) |
|
118 | return pickle.loads(s) | |
88 |
|
119 | |||
89 |
|
120 | |||
90 | class MissingTool(Exception): |
|
121 | class MissingTool(Exception): | |
91 | pass |
|
122 | pass | |
92 |
|
123 | |||
93 |
|
124 | |||
94 | def checktool(exe, name=None, abort=True): |
|
125 | def checktool( | |
|
126 | exe: bytes, name: Optional[bytes] = None, abort: bool = True | |||
|
127 | ) -> None: | |||
95 | name = name or exe |
|
128 | name = name or exe | |
96 | if not procutil.findexe(exe): |
|
129 | if not procutil.findexe(exe): | |
97 | if abort: |
|
130 | if abort: | |
98 | exc = error.Abort |
|
131 | exc = error.Abort | |
99 | else: |
|
132 | else: | |
100 | exc = MissingTool |
|
133 | exc = MissingTool | |
101 | raise exc(_(b'cannot find required "%s" tool') % name) |
|
134 | raise exc(_(b'cannot find required "%s" tool') % name) | |
102 |
|
135 | |||
103 |
|
136 | |||
104 | class NoRepo(Exception): |
|
137 | class NoRepo(Exception): | |
105 | pass |
|
138 | pass | |
106 |
|
139 | |||
107 |
|
140 | |||
108 | SKIPREV = b'SKIP' |
|
141 | SKIPREV: bytes = b'SKIP' | |
109 |
|
142 | |||
110 |
|
143 | |||
111 | class commit: |
|
144 | class commit: | |
112 | def __init__( |
|
145 | def __init__( | |
113 | self, |
|
146 | self, | |
114 | author, |
|
147 | author: bytes, | |
115 | date, |
|
148 | date: bytes, | |
116 | desc, |
|
149 | desc: bytes, | |
117 | parents, |
|
150 | parents, | |
118 | branch=None, |
|
151 | branch: Optional[bytes] = None, | |
119 | rev=None, |
|
152 | rev=None, | |
120 | extra=None, |
|
153 | extra=None, | |
121 | sortkey=None, |
|
154 | sortkey=None, | |
122 | saverev=True, |
|
155 | saverev=True, | |
123 | phase=phases.draft, |
|
156 | phase: int = phases.draft, | |
124 | optparents=None, |
|
157 | optparents=None, | |
125 | ctx=None, |
|
158 | ctx=None, | |
126 | ): |
|
159 | ) -> None: | |
127 | self.author = author or b'unknown' |
|
160 | self.author = author or b'unknown' | |
128 | self.date = date or b'0 0' |
|
161 | self.date = date or b'0 0' | |
129 | self.desc = desc |
|
162 | self.desc = desc | |
130 | self.parents = parents # will be converted and used as parents |
|
163 | self.parents = parents # will be converted and used as parents | |
131 | self.optparents = optparents or [] # will be used if already converted |
|
164 | self.optparents = optparents or [] # will be used if already converted | |
132 | self.branch = branch |
|
165 | self.branch = branch | |
133 | self.rev = rev |
|
166 | self.rev = rev | |
134 | self.extra = extra or {} |
|
167 | self.extra = extra or {} | |
135 | self.sortkey = sortkey |
|
168 | self.sortkey = sortkey | |
136 | self.saverev = saverev |
|
169 | self.saverev = saverev | |
137 | self.phase = phase |
|
170 | self.phase = phase | |
138 | self.ctx = ctx # for hg to hg conversions |
|
171 | self.ctx = ctx # for hg to hg conversions | |
139 |
|
172 | |||
140 |
|
173 | |||
141 | class converter_source: |
|
174 | class converter_source: | |
142 | """Conversion source interface""" |
|
175 | """Conversion source interface""" | |
143 |
|
176 | |||
144 | def __init__(self, ui, repotype, path=None, revs=None): |
|
177 | def __init__( | |
|
178 | self, | |||
|
179 | ui: "uimod.ui", | |||
|
180 | repotype: bytes, | |||
|
181 | path: Optional[bytes] = None, | |||
|
182 | revs=None, | |||
|
183 | ) -> None: | |||
145 | """Initialize conversion source (or raise NoRepo("message") |
|
184 | """Initialize conversion source (or raise NoRepo("message") | |
146 | exception if path is not a valid repository)""" |
|
185 | exception if path is not a valid repository)""" | |
147 | self.ui = ui |
|
186 | self.ui = ui | |
148 | self.path = path |
|
187 | self.path = path | |
149 | self.revs = revs |
|
188 | self.revs = revs | |
150 | self.repotype = repotype |
|
189 | self.repotype = repotype | |
151 |
|
190 | |||
152 | self.encoding = b'utf-8' |
|
191 | self.encoding = b'utf-8' | |
153 |
|
192 | |||
154 | def checkhexformat(self, revstr, mapname=b'splicemap'): |
|
193 | def checkhexformat( | |
|
194 | self, revstr: bytes, mapname: bytes = b'splicemap' | |||
|
195 | ) -> None: | |||
155 | """fails if revstr is not a 40 byte hex. mercurial and git both uses |
|
196 | """fails if revstr is not a 40 byte hex. mercurial and git both uses | |
156 | such format for their revision numbering |
|
197 | such format for their revision numbering | |
157 | """ |
|
198 | """ | |
158 | if not re.match(br'[0-9a-fA-F]{40,40}$', revstr): |
|
199 | if not re.match(br'[0-9a-fA-F]{40,40}$', revstr): | |
159 | raise error.Abort( |
|
200 | raise error.Abort( | |
160 | _(b'%s entry %s is not a valid revision identifier') |
|
201 | _(b'%s entry %s is not a valid revision identifier') | |
161 | % (mapname, revstr) |
|
202 | % (mapname, revstr) | |
162 | ) |
|
203 | ) | |
163 |
|
204 | |||
164 | def before(self): |
|
205 | def before(self) -> None: | |
165 | pass |
|
206 | pass | |
166 |
|
207 | |||
167 | def after(self): |
|
208 | def after(self) -> None: | |
168 | pass |
|
209 | pass | |
169 |
|
210 | |||
170 | def targetfilebelongstosource(self, targetfilename): |
|
211 | def targetfilebelongstosource(self, targetfilename): | |
171 | """Returns true if the given targetfile belongs to the source repo. This |
|
212 | """Returns true if the given targetfile belongs to the source repo. This | |
172 | is useful when only a subdirectory of the target belongs to the source |
|
213 | is useful when only a subdirectory of the target belongs to the source | |
173 | repo.""" |
|
214 | repo.""" | |
174 | # For normal full repo converts, this is always True. |
|
215 | # For normal full repo converts, this is always True. | |
175 | return True |
|
216 | return True | |
176 |
|
217 | |||
177 | def setrevmap(self, revmap): |
|
218 | def setrevmap(self, revmap): | |
178 | """set the map of already-converted revisions""" |
|
219 | """set the map of already-converted revisions""" | |
179 |
|
220 | |||
180 | def getheads(self): |
|
221 | def getheads(self): | |
181 | """Return a list of this repository's heads""" |
|
222 | """Return a list of this repository's heads""" | |
182 | raise NotImplementedError |
|
223 | raise NotImplementedError | |
183 |
|
224 | |||
184 | def getfile(self, name, rev): |
|
225 | def getfile(self, name, rev): | |
185 | """Return a pair (data, mode) where data is the file content |
|
226 | """Return a pair (data, mode) where data is the file content | |
186 | as a string and mode one of '', 'x' or 'l'. rev is the |
|
227 | as a string and mode one of '', 'x' or 'l'. rev is the | |
187 | identifier returned by a previous call to getchanges(). |
|
228 | identifier returned by a previous call to getchanges(). | |
188 | Data is None if file is missing/deleted in rev. |
|
229 | Data is None if file is missing/deleted in rev. | |
189 | """ |
|
230 | """ | |
190 | raise NotImplementedError |
|
231 | raise NotImplementedError | |
191 |
|
232 | |||
192 | def getchanges(self, version, full): |
|
233 | def getchanges(self, version, full): | |
193 | """Returns a tuple of (files, copies, cleanp2). |
|
234 | """Returns a tuple of (files, copies, cleanp2). | |
194 |
|
235 | |||
195 | files is a sorted list of (filename, id) tuples for all files |
|
236 | files is a sorted list of (filename, id) tuples for all files | |
196 | changed between version and its first parent returned by |
|
237 | changed between version and its first parent returned by | |
197 | getcommit(). If full, all files in that revision is returned. |
|
238 | getcommit(). If full, all files in that revision is returned. | |
198 | id is the source revision id of the file. |
|
239 | id is the source revision id of the file. | |
199 |
|
240 | |||
200 | copies is a dictionary of dest: source |
|
241 | copies is a dictionary of dest: source | |
201 |
|
242 | |||
202 | cleanp2 is the set of files filenames that are clean against p2. |
|
243 | cleanp2 is the set of files filenames that are clean against p2. | |
203 | (Files that are clean against p1 are already not in files (unless |
|
244 | (Files that are clean against p1 are already not in files (unless | |
204 | full). This makes it possible to handle p2 clean files similarly.) |
|
245 | full). This makes it possible to handle p2 clean files similarly.) | |
205 | """ |
|
246 | """ | |
206 | raise NotImplementedError |
|
247 | raise NotImplementedError | |
207 |
|
248 | |||
208 | def getcommit(self, version): |
|
249 | def getcommit(self, version): | |
209 | """Return the commit object for version""" |
|
250 | """Return the commit object for version""" | |
210 | raise NotImplementedError |
|
251 | raise NotImplementedError | |
211 |
|
252 | |||
212 | def numcommits(self): |
|
253 | def numcommits(self): | |
213 | """Return the number of commits in this source. |
|
254 | """Return the number of commits in this source. | |
214 |
|
255 | |||
215 | If unknown, return None. |
|
256 | If unknown, return None. | |
216 | """ |
|
257 | """ | |
217 | return None |
|
258 | return None | |
218 |
|
259 | |||
219 | def gettags(self): |
|
260 | def gettags(self): | |
220 | """Return the tags as a dictionary of name: revision |
|
261 | """Return the tags as a dictionary of name: revision | |
221 |
|
262 | |||
222 | Tag names must be UTF-8 strings. |
|
263 | Tag names must be UTF-8 strings. | |
223 | """ |
|
264 | """ | |
224 | raise NotImplementedError |
|
265 | raise NotImplementedError | |
225 |
|
266 | |||
226 | def recode(self, s, encoding=None): |
|
267 | def recode(self, s: AnyStr, encoding: Optional[bytes] = None) -> bytes: | |
227 | if not encoding: |
|
268 | if not encoding: | |
228 | encoding = self.encoding or b'utf-8' |
|
269 | encoding = self.encoding or b'utf-8' | |
229 |
|
270 | |||
230 | if isinstance(s, str): |
|
271 | if isinstance(s, str): | |
231 | return s.encode("utf-8") |
|
272 | return s.encode("utf-8") | |
232 | try: |
|
273 | try: | |
233 | return s.decode(pycompat.sysstr(encoding)).encode("utf-8") |
|
274 | return s.decode(pycompat.sysstr(encoding)).encode("utf-8") | |
234 | except UnicodeError: |
|
275 | except UnicodeError: | |
235 | try: |
|
276 | try: | |
236 | return s.decode("latin-1").encode("utf-8") |
|
277 | return s.decode("latin-1").encode("utf-8") | |
237 | except UnicodeError: |
|
278 | except UnicodeError: | |
238 | return s.decode(pycompat.sysstr(encoding), "replace").encode( |
|
279 | return s.decode(pycompat.sysstr(encoding), "replace").encode( | |
239 | "utf-8" |
|
280 | "utf-8" | |
240 | ) |
|
281 | ) | |
241 |
|
282 | |||
242 | def getchangedfiles(self, rev, i): |
|
283 | def getchangedfiles(self, rev, i): | |
243 | """Return the files changed by rev compared to parent[i]. |
|
284 | """Return the files changed by rev compared to parent[i]. | |
244 |
|
285 | |||
245 | i is an index selecting one of the parents of rev. The return |
|
286 | i is an index selecting one of the parents of rev. The return | |
246 | value should be the list of files that are different in rev and |
|
287 | value should be the list of files that are different in rev and | |
247 | this parent. |
|
288 | this parent. | |
248 |
|
289 | |||
249 | If rev has no parents, i is None. |
|
290 | If rev has no parents, i is None. | |
250 |
|
291 | |||
251 | This function is only needed to support --filemap |
|
292 | This function is only needed to support --filemap | |
252 | """ |
|
293 | """ | |
253 | raise NotImplementedError |
|
294 | raise NotImplementedError | |
254 |
|
295 | |||
255 | def converted(self, rev, sinkrev): |
|
296 | def converted(self, rev, sinkrev) -> None: | |
256 | '''Notify the source that a revision has been converted.''' |
|
297 | '''Notify the source that a revision has been converted.''' | |
257 |
|
298 | |||
258 | def hasnativeorder(self): |
|
299 | def hasnativeorder(self) -> bool: | |
259 | """Return true if this source has a meaningful, native revision |
|
300 | """Return true if this source has a meaningful, native revision | |
260 | order. For instance, Mercurial revisions are store sequentially |
|
301 | order. For instance, Mercurial revisions are store sequentially | |
261 | while there is no such global ordering with Darcs. |
|
302 | while there is no such global ordering with Darcs. | |
262 | """ |
|
303 | """ | |
263 | return False |
|
304 | return False | |
264 |
|
305 | |||
265 | def hasnativeclose(self): |
|
306 | def hasnativeclose(self) -> bool: | |
266 | """Return true if this source has ability to close branch.""" |
|
307 | """Return true if this source has ability to close branch.""" | |
267 | return False |
|
308 | return False | |
268 |
|
309 | |||
269 | def lookuprev(self, rev): |
|
310 | def lookuprev(self, rev): | |
270 | """If rev is a meaningful revision reference in source, return |
|
311 | """If rev is a meaningful revision reference in source, return | |
271 | the referenced identifier in the same format used by getcommit(). |
|
312 | the referenced identifier in the same format used by getcommit(). | |
272 | return None otherwise. |
|
313 | return None otherwise. | |
273 | """ |
|
314 | """ | |
274 | return None |
|
315 | return None | |
275 |
|
316 | |||
276 | def getbookmarks(self): |
|
317 | def getbookmarks(self): | |
277 | """Return the bookmarks as a dictionary of name: revision |
|
318 | """Return the bookmarks as a dictionary of name: revision | |
278 |
|
319 | |||
279 | Bookmark names are to be UTF-8 strings. |
|
320 | Bookmark names are to be UTF-8 strings. | |
280 | """ |
|
321 | """ | |
281 | return {} |
|
322 | return {} | |
282 |
|
323 | |||
283 | def checkrevformat(self, revstr, mapname=b'splicemap'): |
|
324 | def checkrevformat(self, revstr, mapname: bytes = b'splicemap') -> bool: | |
284 | """revstr is a string that describes a revision in the given |
|
325 | """revstr is a string that describes a revision in the given | |
285 | source control system. Return true if revstr has correct |
|
326 | source control system. Return true if revstr has correct | |
286 | format. |
|
327 | format. | |
287 | """ |
|
328 | """ | |
288 | return True |
|
329 | return True | |
289 |
|
330 | |||
290 |
|
331 | |||
291 | class converter_sink: |
|
332 | class converter_sink: | |
292 | """Conversion sink (target) interface""" |
|
333 | """Conversion sink (target) interface""" | |
293 |
|
334 | |||
294 | def __init__(self, ui, repotype, path): |
|
335 | def __init__(self, ui: "uimod.ui", repotype: bytes, path: bytes) -> None: | |
295 | """Initialize conversion sink (or raise NoRepo("message") |
|
336 | """Initialize conversion sink (or raise NoRepo("message") | |
296 | exception if path is not a valid repository) |
|
337 | exception if path is not a valid repository) | |
297 |
|
338 | |||
298 | created is a list of paths to remove if a fatal error occurs |
|
339 | created is a list of paths to remove if a fatal error occurs | |
299 | later""" |
|
340 | later""" | |
300 | self.ui = ui |
|
341 | self.ui = ui | |
301 | self.path = path |
|
342 | self.path = path | |
302 | self.created = [] |
|
343 | self.created = [] | |
303 | self.repotype = repotype |
|
344 | self.repotype = repotype | |
304 |
|
345 | |||
305 | def revmapfile(self): |
|
346 | def revmapfile(self): | |
306 | """Path to a file that will contain lines |
|
347 | """Path to a file that will contain lines | |
307 | source_rev_id sink_rev_id |
|
348 | source_rev_id sink_rev_id | |
308 | mapping equivalent revision identifiers for each system.""" |
|
349 | mapping equivalent revision identifiers for each system.""" | |
309 | raise NotImplementedError |
|
350 | raise NotImplementedError | |
310 |
|
351 | |||
311 | def authorfile(self): |
|
352 | def authorfile(self): | |
312 | """Path to a file that will contain lines |
|
353 | """Path to a file that will contain lines | |
313 | srcauthor=dstauthor |
|
354 | srcauthor=dstauthor | |
314 | mapping equivalent authors identifiers for each system.""" |
|
355 | mapping equivalent authors identifiers for each system.""" | |
315 | return None |
|
356 | return None | |
316 |
|
357 | |||
317 | def putcommit( |
|
358 | def putcommit( | |
318 | self, files, copies, parents, commit, source, revmap, full, cleanp2 |
|
359 | self, files, copies, parents, commit, source, revmap, full, cleanp2 | |
319 | ): |
|
360 | ): | |
320 | """Create a revision with all changed files listed in 'files' |
|
361 | """Create a revision with all changed files listed in 'files' | |
321 | and having listed parents. 'commit' is a commit object |
|
362 | and having listed parents. 'commit' is a commit object | |
322 | containing at a minimum the author, date, and message for this |
|
363 | containing at a minimum the author, date, and message for this | |
323 | changeset. 'files' is a list of (path, version) tuples, |
|
364 | changeset. 'files' is a list of (path, version) tuples, | |
324 | 'copies' is a dictionary mapping destinations to sources, |
|
365 | 'copies' is a dictionary mapping destinations to sources, | |
325 | 'source' is the source repository, and 'revmap' is a mapfile |
|
366 | 'source' is the source repository, and 'revmap' is a mapfile | |
326 | of source revisions to converted revisions. Only getfile() and |
|
367 | of source revisions to converted revisions. Only getfile() and | |
327 | lookuprev() should be called on 'source'. 'full' means that 'files' |
|
368 | lookuprev() should be called on 'source'. 'full' means that 'files' | |
328 | is complete and all other files should be removed. |
|
369 | is complete and all other files should be removed. | |
329 | 'cleanp2' is a set of the filenames that are unchanged from p2 |
|
370 | 'cleanp2' is a set of the filenames that are unchanged from p2 | |
330 | (only in the common merge case where there two parents). |
|
371 | (only in the common merge case where there two parents). | |
331 |
|
372 | |||
332 | Note that the sink repository is not told to update itself to |
|
373 | Note that the sink repository is not told to update itself to | |
333 | a particular revision (or even what that revision would be) |
|
374 | a particular revision (or even what that revision would be) | |
334 | before it receives the file data. |
|
375 | before it receives the file data. | |
335 | """ |
|
376 | """ | |
336 | raise NotImplementedError |
|
377 | raise NotImplementedError | |
337 |
|
378 | |||
338 | def puttags(self, tags): |
|
379 | def puttags(self, tags): | |
339 | """Put tags into sink. |
|
380 | """Put tags into sink. | |
340 |
|
381 | |||
341 | tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string. |
|
382 | tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string. | |
342 | Return a pair (tag_revision, tag_parent_revision), or (None, None) |
|
383 | Return a pair (tag_revision, tag_parent_revision), or (None, None) | |
343 | if nothing was changed. |
|
384 | if nothing was changed. | |
344 | """ |
|
385 | """ | |
345 | raise NotImplementedError |
|
386 | raise NotImplementedError | |
346 |
|
387 | |||
347 | def setbranch(self, branch, pbranches): |
|
388 | def setbranch(self, branch, pbranches): | |
348 | """Set the current branch name. Called before the first putcommit |
|
389 | """Set the current branch name. Called before the first putcommit | |
349 | on the branch. |
|
390 | on the branch. | |
350 | branch: branch name for subsequent commits |
|
391 | branch: branch name for subsequent commits | |
351 | pbranches: (converted parent revision, parent branch) tuples""" |
|
392 | pbranches: (converted parent revision, parent branch) tuples""" | |
352 |
|
393 | |||
353 | def setfilemapmode(self, active): |
|
394 | def setfilemapmode(self, active): | |
354 | """Tell the destination that we're using a filemap |
|
395 | """Tell the destination that we're using a filemap | |
355 |
|
396 | |||
356 | Some converter_sources (svn in particular) can claim that a file |
|
397 | Some converter_sources (svn in particular) can claim that a file | |
357 | was changed in a revision, even if there was no change. This method |
|
398 | was changed in a revision, even if there was no change. This method | |
358 | tells the destination that we're using a filemap and that it should |
|
399 | tells the destination that we're using a filemap and that it should | |
359 | filter empty revisions. |
|
400 | filter empty revisions. | |
360 | """ |
|
401 | """ | |
361 |
|
402 | |||
362 | def before(self): |
|
403 | def before(self) -> None: | |
363 | pass |
|
404 | pass | |
364 |
|
405 | |||
365 | def after(self): |
|
406 | def after(self) -> None: | |
366 | pass |
|
407 | pass | |
367 |
|
408 | |||
368 | def putbookmarks(self, bookmarks): |
|
409 | def putbookmarks(self, bookmarks): | |
369 | """Put bookmarks into sink. |
|
410 | """Put bookmarks into sink. | |
370 |
|
411 | |||
371 | bookmarks: {bookmarkname: sink_rev_id, ...} |
|
412 | bookmarks: {bookmarkname: sink_rev_id, ...} | |
372 | where bookmarkname is an UTF-8 string. |
|
413 | where bookmarkname is an UTF-8 string. | |
373 | """ |
|
414 | """ | |
374 |
|
415 | |||
375 | def hascommitfrommap(self, rev): |
|
416 | def hascommitfrommap(self, rev): | |
376 | """Return False if a rev mentioned in a filemap is known to not be |
|
417 | """Return False if a rev mentioned in a filemap is known to not be | |
377 | present.""" |
|
418 | present.""" | |
378 | raise NotImplementedError |
|
419 | raise NotImplementedError | |
379 |
|
420 | |||
380 | def hascommitforsplicemap(self, rev): |
|
421 | def hascommitforsplicemap(self, rev): | |
381 | """This method is for the special needs for splicemap handling and not |
|
422 | """This method is for the special needs for splicemap handling and not | |
382 | for general use. Returns True if the sink contains rev, aborts on some |
|
423 | for general use. Returns True if the sink contains rev, aborts on some | |
383 | special cases.""" |
|
424 | special cases.""" | |
384 | raise NotImplementedError |
|
425 | raise NotImplementedError | |
385 |
|
426 | |||
386 |
|
427 | |||
387 | class commandline: |
|
428 | class commandline: | |
388 | def __init__(self, ui, command): |
|
429 | def __init__(self, ui: "uimod.ui", command: bytes) -> None: | |
389 | self.ui = ui |
|
430 | self.ui = ui | |
390 | self.command = command |
|
431 | self.command = command | |
391 |
|
432 | |||
392 | def prerun(self): |
|
433 | def prerun(self) -> None: | |
393 | pass |
|
434 | pass | |
394 |
|
435 | |||
395 | def postrun(self): |
|
436 | def postrun(self) -> None: | |
396 | pass |
|
437 | pass | |
397 |
|
438 | |||
398 | def _cmdline(self, cmd, *args, **kwargs): |
|
439 | def _cmdline(self, cmd: bytes, *args: bytes, **kwargs) -> bytes: | |
399 | kwargs = pycompat.byteskwargs(kwargs) |
|
440 | kwargs = pycompat.byteskwargs(kwargs) | |
400 | cmdline = [self.command, cmd] + list(args) |
|
441 | cmdline = [self.command, cmd] + list(args) | |
401 | for k, v in kwargs.items(): |
|
442 | for k, v in kwargs.items(): | |
402 | if len(k) == 1: |
|
443 | if len(k) == 1: | |
403 | cmdline.append(b'-' + k) |
|
444 | cmdline.append(b'-' + k) | |
404 | else: |
|
445 | else: | |
405 | cmdline.append(b'--' + k.replace(b'_', b'-')) |
|
446 | cmdline.append(b'--' + k.replace(b'_', b'-')) | |
406 | try: |
|
447 | try: | |
407 | if len(k) == 1: |
|
448 | if len(k) == 1: | |
408 | cmdline.append(b'' + v) |
|
449 | cmdline.append(b'' + v) | |
409 | else: |
|
450 | else: | |
410 | cmdline[-1] += b'=' + v |
|
451 | cmdline[-1] += b'=' + v | |
411 | except TypeError: |
|
452 | except TypeError: | |
412 | pass |
|
453 | pass | |
413 | cmdline = [procutil.shellquote(arg) for arg in cmdline] |
|
454 | cmdline = [procutil.shellquote(arg) for arg in cmdline] | |
414 | if not self.ui.debugflag: |
|
455 | if not self.ui.debugflag: | |
415 | cmdline += [b'2>', pycompat.bytestr(os.devnull)] |
|
456 | cmdline += [b'2>', pycompat.bytestr(os.devnull)] | |
416 | cmdline = b' '.join(cmdline) |
|
457 | cmdline = b' '.join(cmdline) | |
417 | return cmdline |
|
458 | return cmdline | |
418 |
|
459 | |||
419 | def _run(self, cmd, *args, **kwargs): |
|
460 | def _run(self, cmd: bytes, *args: bytes, **kwargs): | |
420 | def popen(cmdline): |
|
461 | def popen(cmdline): | |
421 | p = subprocess.Popen( |
|
462 | p = subprocess.Popen( | |
422 | procutil.tonativestr(cmdline), |
|
463 | procutil.tonativestr(cmdline), | |
423 | shell=True, |
|
464 | shell=True, | |
424 | bufsize=-1, |
|
465 | bufsize=-1, | |
425 | close_fds=procutil.closefds, |
|
466 | close_fds=procutil.closefds, | |
426 | stdout=subprocess.PIPE, |
|
467 | stdout=subprocess.PIPE, | |
427 | ) |
|
468 | ) | |
428 | return p |
|
469 | return p | |
429 |
|
470 | |||
430 | return self._dorun(popen, cmd, *args, **kwargs) |
|
471 | return self._dorun(popen, cmd, *args, **kwargs) | |
431 |
|
472 | |||
432 | def _run2(self, cmd, *args, **kwargs): |
|
473 | def _run2(self, cmd: bytes, *args: bytes, **kwargs): | |
433 | return self._dorun(procutil.popen2, cmd, *args, **kwargs) |
|
474 | return self._dorun(procutil.popen2, cmd, *args, **kwargs) | |
434 |
|
475 | |||
435 | def _run3(self, cmd, *args, **kwargs): |
|
476 | def _run3(self, cmd: bytes, *args: bytes, **kwargs): | |
436 | return self._dorun(procutil.popen3, cmd, *args, **kwargs) |
|
477 | return self._dorun(procutil.popen3, cmd, *args, **kwargs) | |
437 |
|
478 | |||
438 | def _dorun(self, openfunc, cmd, *args, **kwargs): |
|
479 | def _dorun(self, openfunc, cmd: bytes, *args: bytes, **kwargs): | |
439 | cmdline = self._cmdline(cmd, *args, **kwargs) |
|
480 | cmdline = self._cmdline(cmd, *args, **kwargs) | |
440 | self.ui.debug(b'running: %s\n' % (cmdline,)) |
|
481 | self.ui.debug(b'running: %s\n' % (cmdline,)) | |
441 | self.prerun() |
|
482 | self.prerun() | |
442 | try: |
|
483 | try: | |
443 | return openfunc(cmdline) |
|
484 | return openfunc(cmdline) | |
444 | finally: |
|
485 | finally: | |
445 | self.postrun() |
|
486 | self.postrun() | |
446 |
|
487 | |||
447 | def run(self, cmd, *args, **kwargs): |
|
488 | def run(self, cmd: bytes, *args: bytes, **kwargs): | |
448 | p = self._run(cmd, *args, **kwargs) |
|
489 | p = self._run(cmd, *args, **kwargs) | |
449 | output = p.communicate()[0] |
|
490 | output = p.communicate()[0] | |
450 | self.ui.debug(output) |
|
491 | self.ui.debug(output) | |
451 | return output, p.returncode |
|
492 | return output, p.returncode | |
452 |
|
493 | |||
453 | def runlines(self, cmd, *args, **kwargs): |
|
494 | def runlines(self, cmd: bytes, *args: bytes, **kwargs): | |
454 | p = self._run(cmd, *args, **kwargs) |
|
495 | p = self._run(cmd, *args, **kwargs) | |
455 | output = p.stdout.readlines() |
|
496 | output = p.stdout.readlines() | |
456 | p.wait() |
|
497 | p.wait() | |
457 | self.ui.debug(b''.join(output)) |
|
498 | self.ui.debug(b''.join(output)) | |
458 | return output, p.returncode |
|
499 | return output, p.returncode | |
459 |
|
500 | |||
460 | def checkexit(self, status, output=b''): |
|
501 | def checkexit(self, status, output: bytes = b'') -> None: | |
461 | if status: |
|
502 | if status: | |
462 | if output: |
|
503 | if output: | |
463 | self.ui.warn(_(b'%s error:\n') % self.command) |
|
504 | self.ui.warn(_(b'%s error:\n') % self.command) | |
464 | self.ui.warn(output) |
|
505 | self.ui.warn(output) | |
465 | msg = procutil.explainexit(status) |
|
506 | msg = procutil.explainexit(status) | |
466 | raise error.Abort(b'%s %s' % (self.command, msg)) |
|
507 | raise error.Abort(b'%s %s' % (self.command, msg)) | |
467 |
|
508 | |||
468 | def run0(self, cmd, *args, **kwargs): |
|
509 | def run0(self, cmd: bytes, *args: bytes, **kwargs): | |
469 | output, status = self.run(cmd, *args, **kwargs) |
|
510 | output, status = self.run(cmd, *args, **kwargs) | |
470 | self.checkexit(status, output) |
|
511 | self.checkexit(status, output) | |
471 | return output |
|
512 | return output | |
472 |
|
513 | |||
473 | def runlines0(self, cmd, *args, **kwargs): |
|
514 | def runlines0(self, cmd: bytes, *args: bytes, **kwargs): | |
474 | output, status = self.runlines(cmd, *args, **kwargs) |
|
515 | output, status = self.runlines(cmd, *args, **kwargs) | |
475 | self.checkexit(status, b''.join(output)) |
|
516 | self.checkexit(status, b''.join(output)) | |
476 | return output |
|
517 | return output | |
477 |
|
518 | |||
478 | @propertycache |
|
519 | @propertycache | |
479 | def argmax(self): |
|
520 | def argmax(self): | |
480 | # POSIX requires at least 4096 bytes for ARG_MAX |
|
521 | # POSIX requires at least 4096 bytes for ARG_MAX | |
481 | argmax = 4096 |
|
522 | argmax = 4096 | |
482 | try: |
|
523 | try: | |
483 | argmax = os.sysconf("SC_ARG_MAX") |
|
524 | argmax = os.sysconf("SC_ARG_MAX") | |
484 | except (AttributeError, ValueError): |
|
525 | except (AttributeError, ValueError): | |
485 | pass |
|
526 | pass | |
486 |
|
527 | |||
487 | # Windows shells impose their own limits on command line length, |
|
528 | # Windows shells impose their own limits on command line length, | |
488 | # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes |
|
529 | # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes | |
489 | # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for |
|
530 | # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for | |
490 | # details about cmd.exe limitations. |
|
531 | # details about cmd.exe limitations. | |
491 |
|
532 | |||
492 | # Since ARG_MAX is for command line _and_ environment, lower our limit |
|
533 | # Since ARG_MAX is for command line _and_ environment, lower our limit | |
493 | # (and make happy Windows shells while doing this). |
|
534 | # (and make happy Windows shells while doing this). | |
494 | return argmax // 2 - 1 |
|
535 | return argmax // 2 - 1 | |
495 |
|
536 | |||
496 | def _limit_arglist(self, arglist, cmd, *args, **kwargs): |
|
537 | def _limit_arglist(self, arglist, cmd: bytes, *args: bytes, **kwargs): | |
497 | cmdlen = len(self._cmdline(cmd, *args, **kwargs)) |
|
538 | cmdlen = len(self._cmdline(cmd, *args, **kwargs)) | |
498 | limit = self.argmax - cmdlen |
|
539 | limit = self.argmax - cmdlen | |
499 | numbytes = 0 |
|
540 | numbytes = 0 | |
500 | fl = [] |
|
541 | fl = [] | |
501 | for fn in arglist: |
|
542 | for fn in arglist: | |
502 | b = len(fn) + 3 |
|
543 | b = len(fn) + 3 | |
503 | if numbytes + b < limit or len(fl) == 0: |
|
544 | if numbytes + b < limit or len(fl) == 0: | |
504 | fl.append(fn) |
|
545 | fl.append(fn) | |
505 | numbytes += b |
|
546 | numbytes += b | |
506 | else: |
|
547 | else: | |
507 | yield fl |
|
548 | yield fl | |
508 | fl = [fn] |
|
549 | fl = [fn] | |
509 | numbytes = b |
|
550 | numbytes = b | |
510 | if fl: |
|
551 | if fl: | |
511 | yield fl |
|
552 | yield fl | |
512 |
|
553 | |||
513 | def xargs(self, arglist, cmd, *args, **kwargs): |
|
554 | def xargs(self, arglist, cmd: bytes, *args: bytes, **kwargs): | |
514 | for l in self._limit_arglist(arglist, cmd, *args, **kwargs): |
|
555 | for l in self._limit_arglist(arglist, cmd, *args, **kwargs): | |
515 | self.run0(cmd, *(list(args) + l), **kwargs) |
|
556 | self.run0(cmd, *(list(args) + l), **kwargs) | |
516 |
|
557 | |||
517 |
|
558 | |||
518 | class mapfile(dict): |
|
559 | class mapfile(dict): | |
519 | def __init__(self, ui, path): |
|
560 | def __init__(self, ui: "uimod.ui", path: bytes) -> None: | |
520 | super(mapfile, self).__init__() |
|
561 | super(mapfile, self).__init__() | |
521 | self.ui = ui |
|
562 | self.ui = ui | |
522 | self.path = path |
|
563 | self.path = path | |
523 | self.fp = None |
|
564 | self.fp = None | |
524 | self.order = [] |
|
565 | self.order = [] | |
525 | self._read() |
|
566 | self._read() | |
526 |
|
567 | |||
527 | def _read(self): |
|
568 | def _read(self) -> None: | |
528 | if not self.path: |
|
569 | if not self.path: | |
529 | return |
|
570 | return | |
530 | try: |
|
571 | try: | |
531 | fp = open(self.path, b'rb') |
|
572 | fp = open(self.path, b'rb') | |
532 | except FileNotFoundError: |
|
573 | except FileNotFoundError: | |
533 | return |
|
574 | return | |
534 | for i, line in enumerate(fp): |
|
575 | for i, line in enumerate(fp): | |
535 | line = line.splitlines()[0].rstrip() |
|
576 | line = line.splitlines()[0].rstrip() | |
536 | if not line: |
|
577 | if not line: | |
537 | # Ignore blank lines |
|
578 | # Ignore blank lines | |
538 | continue |
|
579 | continue | |
539 | try: |
|
580 | try: | |
540 | key, value = line.rsplit(b' ', 1) |
|
581 | key, value = line.rsplit(b' ', 1) | |
541 | except ValueError: |
|
582 | except ValueError: | |
542 | raise error.Abort( |
|
583 | raise error.Abort( | |
543 | _(b'syntax error in %s(%d): key/value pair expected') |
|
584 | _(b'syntax error in %s(%d): key/value pair expected') | |
544 | % (self.path, i + 1) |
|
585 | % (self.path, i + 1) | |
545 | ) |
|
586 | ) | |
546 | if key not in self: |
|
587 | if key not in self: | |
547 | self.order.append(key) |
|
588 | self.order.append(key) | |
548 | super(mapfile, self).__setitem__(key, value) |
|
589 | super(mapfile, self).__setitem__(key, value) | |
549 | fp.close() |
|
590 | fp.close() | |
550 |
|
591 | |||
551 | def __setitem__(self, key, value): |
|
592 | def __setitem__(self, key, value) -> None: | |
552 | if self.fp is None: |
|
593 | if self.fp is None: | |
553 | try: |
|
594 | try: | |
554 | self.fp = open(self.path, b'ab') |
|
595 | self.fp = open(self.path, b'ab') | |
555 | except IOError as err: |
|
596 | except IOError as err: | |
556 | raise error.Abort( |
|
597 | raise error.Abort( | |
557 | _(b'could not open map file %r: %s') |
|
598 | _(b'could not open map file %r: %s') | |
558 | % (self.path, encoding.strtolocal(err.strerror)) |
|
599 | % (self.path, encoding.strtolocal(err.strerror)) | |
559 | ) |
|
600 | ) | |
560 | self.fp.write(util.tonativeeol(b'%s %s\n' % (key, value))) |
|
601 | self.fp.write(util.tonativeeol(b'%s %s\n' % (key, value))) | |
561 | self.fp.flush() |
|
602 | self.fp.flush() | |
562 | super(mapfile, self).__setitem__(key, value) |
|
603 | super(mapfile, self).__setitem__(key, value) | |
563 |
|
604 | |||
564 | def close(self): |
|
605 | def close(self) -> None: | |
565 | if self.fp: |
|
606 | if self.fp: | |
566 | self.fp.close() |
|
607 | self.fp.close() | |
567 | self.fp = None |
|
608 | self.fp = None | |
568 |
|
609 | |||
569 |
|
610 | |||
570 | def makedatetimestamp(t: float) -> dateutil.hgdate: |
|
611 | def makedatetimestamp(t: float) -> dateutil.hgdate: | |
571 | return dateutil.makedate(t) |
|
612 | return dateutil.makedate(t) |
@@ -1,692 +1,711 | |||||
1 | # convcmd - convert extension commands definition |
|
1 | # convcmd - convert extension commands definition | |
2 | # |
|
2 | # | |
3 | # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> |
|
3 | # Copyright 2005-2007 Olivia Mackall <olivia@selenic.com> | |
4 | # |
|
4 | # | |
5 | # This software may be used and distributed according to the terms of the |
|
5 | # This software may be used and distributed according to the terms of the | |
6 | # GNU General Public License version 2 or any later version. |
|
6 | # GNU General Public License version 2 or any later version. | |
7 |
|
7 | |||
8 | import collections |
|
8 | import collections | |
9 | import heapq |
|
9 | import heapq | |
10 | import os |
|
10 | import os | |
11 | import shutil |
|
11 | import shutil | |
|
12 | import typing | |||
|
13 | ||||
|
14 | from typing import ( | |||
|
15 | AnyStr, | |||
|
16 | Mapping, | |||
|
17 | Optional, | |||
|
18 | Union, | |||
|
19 | ) | |||
12 |
|
20 | |||
13 | from mercurial.i18n import _ |
|
21 | from mercurial.i18n import _ | |
14 | from mercurial.pycompat import open |
|
22 | from mercurial.pycompat import open | |
15 | from mercurial import ( |
|
23 | from mercurial import ( | |
16 | encoding, |
|
24 | encoding, | |
17 | error, |
|
25 | error, | |
18 | hg, |
|
26 | hg, | |
19 | pycompat, |
|
27 | pycompat, | |
20 | scmutil, |
|
28 | scmutil, | |
21 | util, |
|
29 | util, | |
22 | ) |
|
30 | ) | |
23 | from mercurial.utils import dateutil |
|
31 | from mercurial.utils import dateutil | |
24 |
|
32 | |||
25 | from . import ( |
|
33 | from . import ( | |
26 | bzr, |
|
34 | bzr, | |
27 | common, |
|
35 | common, | |
28 | cvs, |
|
36 | cvs, | |
29 | darcs, |
|
37 | darcs, | |
30 | filemap, |
|
38 | filemap, | |
31 | git, |
|
39 | git, | |
32 | gnuarch, |
|
40 | gnuarch, | |
33 | hg as hgconvert, |
|
41 | hg as hgconvert, | |
34 | monotone, |
|
42 | monotone, | |
35 | p4, |
|
43 | p4, | |
36 | subversion, |
|
44 | subversion, | |
37 | ) |
|
45 | ) | |
38 |
|
46 | |||
|
47 | if typing.TYPE_CHECKING: | |||
|
48 | from mercurial import ( | |||
|
49 | ui as uimod, | |||
|
50 | ) | |||
|
51 | ||||
39 | mapfile = common.mapfile |
|
52 | mapfile = common.mapfile | |
40 | MissingTool = common.MissingTool |
|
53 | MissingTool = common.MissingTool | |
41 | NoRepo = common.NoRepo |
|
54 | NoRepo = common.NoRepo | |
42 | SKIPREV = common.SKIPREV |
|
55 | SKIPREV = common.SKIPREV | |
43 |
|
56 | |||
44 | bzr_source = bzr.bzr_source |
|
57 | bzr_source = bzr.bzr_source | |
45 | convert_cvs = cvs.convert_cvs |
|
58 | convert_cvs = cvs.convert_cvs | |
46 | convert_git = git.convert_git |
|
59 | convert_git = git.convert_git | |
47 | darcs_source = darcs.darcs_source |
|
60 | darcs_source = darcs.darcs_source | |
48 | gnuarch_source = gnuarch.gnuarch_source |
|
61 | gnuarch_source = gnuarch.gnuarch_source | |
49 | mercurial_sink = hgconvert.mercurial_sink |
|
62 | mercurial_sink = hgconvert.mercurial_sink | |
50 | mercurial_source = hgconvert.mercurial_source |
|
63 | mercurial_source = hgconvert.mercurial_source | |
51 | monotone_source = monotone.monotone_source |
|
64 | monotone_source = monotone.monotone_source | |
52 | p4_source = p4.p4_source |
|
65 | p4_source = p4.p4_source | |
53 | svn_sink = subversion.svn_sink |
|
66 | svn_sink = subversion.svn_sink | |
54 | svn_source = subversion.svn_source |
|
67 | svn_source = subversion.svn_source | |
55 |
|
68 | |||
56 | orig_encoding = b'ascii' |
|
69 | orig_encoding: bytes = b'ascii' | |
57 |
|
70 | |||
58 |
|
71 | |||
59 | def readauthormap(ui, authorfile, authors=None): |
|
72 | def readauthormap(ui: "uimod.ui", authorfile, authors=None): | |
60 | if authors is None: |
|
73 | if authors is None: | |
61 | authors = {} |
|
74 | authors = {} | |
62 | with open(authorfile, b'rb') as afile: |
|
75 | with open(authorfile, b'rb') as afile: | |
63 | for line in afile: |
|
76 | for line in afile: | |
64 |
|
77 | |||
65 | line = line.strip() |
|
78 | line = line.strip() | |
66 | if not line or line.startswith(b'#'): |
|
79 | if not line or line.startswith(b'#'): | |
67 | continue |
|
80 | continue | |
68 |
|
81 | |||
69 | try: |
|
82 | try: | |
70 | srcauthor, dstauthor = line.split(b'=', 1) |
|
83 | srcauthor, dstauthor = line.split(b'=', 1) | |
71 | except ValueError: |
|
84 | except ValueError: | |
72 | msg = _(b'ignoring bad line in author map file %s: %s\n') |
|
85 | msg = _(b'ignoring bad line in author map file %s: %s\n') | |
73 | ui.warn(msg % (authorfile, line.rstrip())) |
|
86 | ui.warn(msg % (authorfile, line.rstrip())) | |
74 | continue |
|
87 | continue | |
75 |
|
88 | |||
76 | srcauthor = srcauthor.strip() |
|
89 | srcauthor = srcauthor.strip() | |
77 | dstauthor = dstauthor.strip() |
|
90 | dstauthor = dstauthor.strip() | |
78 | if authors.get(srcauthor) in (None, dstauthor): |
|
91 | if authors.get(srcauthor) in (None, dstauthor): | |
79 | msg = _(b'mapping author %s to %s\n') |
|
92 | msg = _(b'mapping author %s to %s\n') | |
80 | ui.debug(msg % (srcauthor, dstauthor)) |
|
93 | ui.debug(msg % (srcauthor, dstauthor)) | |
81 | authors[srcauthor] = dstauthor |
|
94 | authors[srcauthor] = dstauthor | |
82 | continue |
|
95 | continue | |
83 |
|
96 | |||
84 | m = _(b'overriding mapping for author %s, was %s, will be %s\n') |
|
97 | m = _(b'overriding mapping for author %s, was %s, will be %s\n') | |
85 | ui.status(m % (srcauthor, authors[srcauthor], dstauthor)) |
|
98 | ui.status(m % (srcauthor, authors[srcauthor], dstauthor)) | |
86 | return authors |
|
99 | return authors | |
87 |
|
100 | |||
88 |
|
101 | |||
89 | def recode(s): |
|
102 | def recode(s: AnyStr) -> bytes: | |
90 | if isinstance(s, str): |
|
103 | if isinstance(s, str): | |
91 | return s.encode(pycompat.sysstr(orig_encoding), 'replace') |
|
104 | return s.encode(pycompat.sysstr(orig_encoding), 'replace') | |
92 | else: |
|
105 | else: | |
93 | return s.decode('utf-8').encode( |
|
106 | return s.decode('utf-8').encode( | |
94 | pycompat.sysstr(orig_encoding), 'replace' |
|
107 | pycompat.sysstr(orig_encoding), 'replace' | |
95 | ) |
|
108 | ) | |
96 |
|
109 | |||
97 |
|
110 | |||
98 | def mapbranch(branch, branchmap): |
|
111 | def mapbranch(branch: bytes, branchmap: Mapping[bytes, bytes]) -> bytes: | |
99 | """ |
|
112 | """ | |
100 | >>> bmap = {b'default': b'branch1'} |
|
113 | >>> bmap = {b'default': b'branch1'} | |
101 | >>> for i in [b'', None]: |
|
114 | >>> for i in [b'', None]: | |
102 | ... mapbranch(i, bmap) |
|
115 | ... mapbranch(i, bmap) | |
103 | 'branch1' |
|
116 | 'branch1' | |
104 | 'branch1' |
|
117 | 'branch1' | |
105 | >>> bmap = {b'None': b'branch2'} |
|
118 | >>> bmap = {b'None': b'branch2'} | |
106 | >>> for i in [b'', None]: |
|
119 | >>> for i in [b'', None]: | |
107 | ... mapbranch(i, bmap) |
|
120 | ... mapbranch(i, bmap) | |
108 | 'branch2' |
|
121 | 'branch2' | |
109 | 'branch2' |
|
122 | 'branch2' | |
110 | >>> bmap = {b'None': b'branch3', b'default': b'branch4'} |
|
123 | >>> bmap = {b'None': b'branch3', b'default': b'branch4'} | |
111 | >>> for i in [b'None', b'', None, b'default', b'branch5']: |
|
124 | >>> for i in [b'None', b'', None, b'default', b'branch5']: | |
112 | ... mapbranch(i, bmap) |
|
125 | ... mapbranch(i, bmap) | |
113 | 'branch3' |
|
126 | 'branch3' | |
114 | 'branch4' |
|
127 | 'branch4' | |
115 | 'branch4' |
|
128 | 'branch4' | |
116 | 'branch4' |
|
129 | 'branch4' | |
117 | 'branch5' |
|
130 | 'branch5' | |
118 | """ |
|
131 | """ | |
119 | # If branch is None or empty, this commit is coming from the source |
|
132 | # If branch is None or empty, this commit is coming from the source | |
120 | # repository's default branch and destined for the default branch in the |
|
133 | # repository's default branch and destined for the default branch in the | |
121 | # destination repository. For such commits, using a literal "default" |
|
134 | # destination repository. For such commits, using a literal "default" | |
122 | # in branchmap below allows the user to map "default" to an alternate |
|
135 | # in branchmap below allows the user to map "default" to an alternate | |
123 | # default branch in the destination repository. |
|
136 | # default branch in the destination repository. | |
124 | branch = branchmap.get(branch or b'default', branch) |
|
137 | branch = branchmap.get(branch or b'default', branch) | |
125 | # At some point we used "None" literal to denote the default branch, |
|
138 | # At some point we used "None" literal to denote the default branch, | |
126 | # attempt to use that for backward compatibility. |
|
139 | # attempt to use that for backward compatibility. | |
127 | if not branch: |
|
140 | if not branch: | |
128 | branch = branchmap.get(b'None', branch) |
|
141 | branch = branchmap.get(b'None', branch) | |
129 | return branch |
|
142 | return branch | |
130 |
|
143 | |||
131 |
|
144 | |||
132 | source_converters = [ |
|
145 | source_converters = [ | |
133 | (b'cvs', convert_cvs, b'branchsort'), |
|
146 | (b'cvs', convert_cvs, b'branchsort'), | |
134 | (b'git', convert_git, b'branchsort'), |
|
147 | (b'git', convert_git, b'branchsort'), | |
135 | (b'svn', svn_source, b'branchsort'), |
|
148 | (b'svn', svn_source, b'branchsort'), | |
136 | (b'hg', mercurial_source, b'sourcesort'), |
|
149 | (b'hg', mercurial_source, b'sourcesort'), | |
137 | (b'darcs', darcs_source, b'branchsort'), |
|
150 | (b'darcs', darcs_source, b'branchsort'), | |
138 | (b'mtn', monotone_source, b'branchsort'), |
|
151 | (b'mtn', monotone_source, b'branchsort'), | |
139 | (b'gnuarch', gnuarch_source, b'branchsort'), |
|
152 | (b'gnuarch', gnuarch_source, b'branchsort'), | |
140 | (b'bzr', bzr_source, b'branchsort'), |
|
153 | (b'bzr', bzr_source, b'branchsort'), | |
141 | (b'p4', p4_source, b'branchsort'), |
|
154 | (b'p4', p4_source, b'branchsort'), | |
142 | ] |
|
155 | ] | |
143 |
|
156 | |||
144 | sink_converters = [ |
|
157 | sink_converters = [ | |
145 | (b'hg', mercurial_sink), |
|
158 | (b'hg', mercurial_sink), | |
146 | (b'svn', svn_sink), |
|
159 | (b'svn', svn_sink), | |
147 | ] |
|
160 | ] | |
148 |
|
161 | |||
149 |
|
162 | |||
150 | def convertsource(ui, path, type, revs): |
|
163 | def convertsource(ui: "uimod.ui", path: bytes, type: bytes, revs): | |
151 | exceptions = [] |
|
164 | exceptions = [] | |
152 | if type and type not in [s[0] for s in source_converters]: |
|
165 | if type and type not in [s[0] for s in source_converters]: | |
153 | raise error.Abort(_(b'%s: invalid source repository type') % type) |
|
166 | raise error.Abort(_(b'%s: invalid source repository type') % type) | |
154 | for name, source, sortmode in source_converters: |
|
167 | for name, source, sortmode in source_converters: | |
155 | try: |
|
168 | try: | |
156 | if not type or name == type: |
|
169 | if not type or name == type: | |
157 | return source(ui, name, path, revs), sortmode |
|
170 | return source(ui, name, path, revs), sortmode | |
158 | except (NoRepo, MissingTool) as inst: |
|
171 | except (NoRepo, MissingTool) as inst: | |
159 | exceptions.append(inst) |
|
172 | exceptions.append(inst) | |
160 | if not ui.quiet: |
|
173 | if not ui.quiet: | |
161 | for inst in exceptions: |
|
174 | for inst in exceptions: | |
162 | ui.write(b"%s\n" % pycompat.bytestr(inst.args[0])) |
|
175 | ui.write(b"%s\n" % pycompat.bytestr(inst.args[0])) | |
163 | raise error.Abort(_(b'%s: missing or unsupported repository') % path) |
|
176 | raise error.Abort(_(b'%s: missing or unsupported repository') % path) | |
164 |
|
177 | |||
165 |
|
178 | |||
166 |
def convertsink( |
|
179 | def convertsink( | |
|
180 | ui: "uimod.ui", path: bytes, type: bytes | |||
|
181 | ) -> Union[hgconvert.mercurial_sink, subversion.svn_sink]: | |||
167 | if type and type not in [s[0] for s in sink_converters]: |
|
182 | if type and type not in [s[0] for s in sink_converters]: | |
168 | raise error.Abort(_(b'%s: invalid destination repository type') % type) |
|
183 | raise error.Abort(_(b'%s: invalid destination repository type') % type) | |
169 | for name, sink in sink_converters: |
|
184 | for name, sink in sink_converters: | |
170 | try: |
|
185 | try: | |
171 | if not type or name == type: |
|
186 | if not type or name == type: | |
172 | return sink(ui, name, path) |
|
187 | return sink(ui, name, path) | |
173 | except NoRepo as inst: |
|
188 | except NoRepo as inst: | |
174 | ui.note(_(b"convert: %s\n") % inst) |
|
189 | ui.note(_(b"convert: %s\n") % inst) | |
175 | except MissingTool as inst: |
|
190 | except MissingTool as inst: | |
176 | raise error.Abort(b'%s\n' % inst) |
|
191 | raise error.Abort(b'%s\n' % inst) | |
177 | raise error.Abort(_(b'%s: unknown repository type') % path) |
|
192 | raise error.Abort(_(b'%s: unknown repository type') % path) | |
178 |
|
193 | |||
179 |
|
194 | |||
180 | class progresssource: |
|
195 | class progresssource: | |
181 | def __init__(self, ui, source, filecount): |
|
196 | def __init__( | |
|
197 | self, ui: "uimod.ui", source, filecount: Optional[int] | |||
|
198 | ) -> None: | |||
182 | self.ui = ui |
|
199 | self.ui = ui | |
183 | self.source = source |
|
200 | self.source = source | |
184 | self.progress = ui.makeprogress( |
|
201 | self.progress = ui.makeprogress( | |
185 | _(b'getting files'), unit=_(b'files'), total=filecount |
|
202 | _(b'getting files'), unit=_(b'files'), total=filecount | |
186 | ) |
|
203 | ) | |
187 |
|
204 | |||
188 | def getfile(self, file, rev): |
|
205 | def getfile(self, file, rev): | |
189 | self.progress.increment(item=file) |
|
206 | self.progress.increment(item=file) | |
190 | return self.source.getfile(file, rev) |
|
207 | return self.source.getfile(file, rev) | |
191 |
|
208 | |||
192 | def targetfilebelongstosource(self, targetfilename): |
|
209 | def targetfilebelongstosource(self, targetfilename): | |
193 | return self.source.targetfilebelongstosource(targetfilename) |
|
210 | return self.source.targetfilebelongstosource(targetfilename) | |
194 |
|
211 | |||
195 | def lookuprev(self, rev): |
|
212 | def lookuprev(self, rev): | |
196 | return self.source.lookuprev(rev) |
|
213 | return self.source.lookuprev(rev) | |
197 |
|
214 | |||
198 | def close(self): |
|
215 | def close(self): | |
199 | self.progress.complete() |
|
216 | self.progress.complete() | |
200 |
|
217 | |||
201 |
|
218 | |||
202 | # Sorters are used by the `toposort` function to maintain a set of revisions |
|
219 | # Sorters are used by the `toposort` function to maintain a set of revisions | |
203 | # which can be converted immediately and pick one |
|
220 | # which can be converted immediately and pick one | |
204 | class branchsorter: |
|
221 | class branchsorter: | |
205 | """If the previously converted revision has a child in the |
|
222 | """If the previously converted revision has a child in the | |
206 | eligible revisions list, pick it. Return the list head |
|
223 | eligible revisions list, pick it. Return the list head | |
207 | otherwise. Branch sort attempts to minimize branch |
|
224 | otherwise. Branch sort attempts to minimize branch | |
208 | switching, which is harmful for Mercurial backend |
|
225 | switching, which is harmful for Mercurial backend | |
209 | compression. |
|
226 | compression. | |
210 | """ |
|
227 | """ | |
211 |
|
228 | |||
212 | def __init__(self, parents): |
|
229 | def __init__(self, parents): | |
213 | self.nodes = [] |
|
230 | self.nodes = [] | |
214 | self.parents = parents |
|
231 | self.parents = parents | |
215 | self.prev = None |
|
232 | self.prev = None | |
216 |
|
233 | |||
217 | def picknext(self): |
|
234 | def picknext(self): | |
218 | next = self.nodes[0] |
|
235 | next = self.nodes[0] | |
219 | for n in self.nodes: |
|
236 | for n in self.nodes: | |
220 | if self.prev in self.parents[n]: |
|
237 | if self.prev in self.parents[n]: | |
221 | next = n |
|
238 | next = n | |
222 | break |
|
239 | break | |
223 | self.prev = next |
|
240 | self.prev = next | |
224 | self.nodes.remove(next) |
|
241 | self.nodes.remove(next) | |
225 | return next |
|
242 | return next | |
226 |
|
243 | |||
227 | def insert(self, node): |
|
244 | def insert(self, node): | |
228 | self.nodes.insert(0, node) |
|
245 | self.nodes.insert(0, node) | |
229 |
|
246 | |||
230 | def __len__(self): |
|
247 | def __len__(self): | |
231 | return self.nodes.__len__() |
|
248 | return self.nodes.__len__() | |
232 |
|
249 | |||
233 |
|
250 | |||
234 | class keysorter: |
|
251 | class keysorter: | |
235 | """Key-based sort, ties broken by insertion order""" |
|
252 | """Key-based sort, ties broken by insertion order""" | |
236 |
|
253 | |||
237 | def __init__(self, keyfn): |
|
254 | def __init__(self, keyfn): | |
238 | self.heap = [] |
|
255 | self.heap = [] | |
239 | self.keyfn = keyfn |
|
256 | self.keyfn = keyfn | |
240 | self.counter = 0 |
|
257 | self.counter = 0 | |
241 |
|
258 | |||
242 | def picknext(self): |
|
259 | def picknext(self): | |
243 | return heapq.heappop(self.heap)[2] |
|
260 | return heapq.heappop(self.heap)[2] | |
244 |
|
261 | |||
245 | def insert(self, node): |
|
262 | def insert(self, node): | |
246 | counter = self.counter |
|
263 | counter = self.counter | |
247 | self.counter = counter + 1 |
|
264 | self.counter = counter + 1 | |
248 | key = self.keyfn(node) |
|
265 | key = self.keyfn(node) | |
249 | heapq.heappush(self.heap, (key, counter, node)) |
|
266 | heapq.heappush(self.heap, (key, counter, node)) | |
250 |
|
267 | |||
251 | def __len__(self): |
|
268 | def __len__(self): | |
252 | return self.heap.__len__() |
|
269 | return self.heap.__len__() | |
253 |
|
270 | |||
254 |
|
271 | |||
255 | class converter: |
|
272 | class converter: | |
256 | def __init__(self, ui, source, dest, revmapfile, opts): |
|
273 | def __init__(self, ui: "uimod.ui", source, dest, revmapfile, opts) -> None: | |
257 |
|
274 | |||
258 | self.source = source |
|
275 | self.source = source | |
259 | self.dest = dest |
|
276 | self.dest = dest | |
260 | self.ui = ui |
|
277 | self.ui = ui | |
261 | self.opts = opts |
|
278 | self.opts = opts | |
262 | self.commitcache = {} |
|
279 | self.commitcache = {} | |
263 | self.authors = {} |
|
280 | self.authors = {} | |
264 | self.authorfile = None |
|
281 | self.authorfile = None | |
265 |
|
282 | |||
266 | # Record converted revisions persistently: maps source revision |
|
283 | # Record converted revisions persistently: maps source revision | |
267 | # ID to target revision ID (both strings). (This is how |
|
284 | # ID to target revision ID (both strings). (This is how | |
268 | # incremental conversions work.) |
|
285 | # incremental conversions work.) | |
269 | self.map = mapfile(ui, revmapfile) |
|
286 | self.map = mapfile(ui, revmapfile) | |
270 |
|
287 | |||
271 | # Read first the dst author map if any |
|
288 | # Read first the dst author map if any | |
272 | authorfile = self.dest.authorfile() |
|
289 | authorfile = self.dest.authorfile() | |
273 | if authorfile and os.path.exists(authorfile): |
|
290 | if authorfile and os.path.exists(authorfile): | |
274 | self.readauthormap(authorfile) |
|
291 | self.readauthormap(authorfile) | |
275 | # Extend/Override with new author map if necessary |
|
292 | # Extend/Override with new author map if necessary | |
276 | if opts.get(b'authormap'): |
|
293 | if opts.get(b'authormap'): | |
277 | self.readauthormap(opts.get(b'authormap')) |
|
294 | self.readauthormap(opts.get(b'authormap')) | |
278 | self.authorfile = self.dest.authorfile() |
|
295 | self.authorfile = self.dest.authorfile() | |
279 |
|
296 | |||
280 | self.splicemap = self.parsesplicemap(opts.get(b'splicemap')) |
|
297 | self.splicemap = self.parsesplicemap(opts.get(b'splicemap')) | |
281 | self.branchmap = mapfile(ui, opts.get(b'branchmap')) |
|
298 | self.branchmap = mapfile(ui, opts.get(b'branchmap')) | |
282 |
|
299 | |||
283 | def parsesplicemap(self, path): |
|
300 | def parsesplicemap(self, path: bytes): | |
284 | """check and validate the splicemap format and |
|
301 | """check and validate the splicemap format and | |
285 | return a child/parents dictionary. |
|
302 | return a child/parents dictionary. | |
286 | Format checking has two parts. |
|
303 | Format checking has two parts. | |
287 | 1. generic format which is same across all source types |
|
304 | 1. generic format which is same across all source types | |
288 | 2. specific format checking which may be different for |
|
305 | 2. specific format checking which may be different for | |
289 | different source type. This logic is implemented in |
|
306 | different source type. This logic is implemented in | |
290 | checkrevformat function in source files like |
|
307 | checkrevformat function in source files like | |
291 | hg.py, subversion.py etc. |
|
308 | hg.py, subversion.py etc. | |
292 | """ |
|
309 | """ | |
293 |
|
310 | |||
294 | if not path: |
|
311 | if not path: | |
295 | return {} |
|
312 | return {} | |
296 | m = {} |
|
313 | m = {} | |
297 | try: |
|
314 | try: | |
298 | fp = open(path, b'rb') |
|
315 | fp = open(path, b'rb') | |
299 | for i, line in enumerate(fp): |
|
316 | for i, line in enumerate(fp): | |
300 | line = line.splitlines()[0].rstrip() |
|
317 | line = line.splitlines()[0].rstrip() | |
301 | if not line: |
|
318 | if not line: | |
302 | # Ignore blank lines |
|
319 | # Ignore blank lines | |
303 | continue |
|
320 | continue | |
304 | # split line |
|
321 | # split line | |
305 | lex = common.shlexer(data=line, whitespace=b',') |
|
322 | lex = common.shlexer(data=line, whitespace=b',') | |
306 | line = list(lex) |
|
323 | line = list(lex) | |
307 | # check number of parents |
|
324 | # check number of parents | |
308 | if not (2 <= len(line) <= 3): |
|
325 | if not (2 <= len(line) <= 3): | |
309 | raise error.Abort( |
|
326 | raise error.Abort( | |
310 | _( |
|
327 | _( | |
311 | b'syntax error in %s(%d): child parent1' |
|
328 | b'syntax error in %s(%d): child parent1' | |
312 | b'[,parent2] expected' |
|
329 | b'[,parent2] expected' | |
313 | ) |
|
330 | ) | |
314 | % (path, i + 1) |
|
331 | % (path, i + 1) | |
315 | ) |
|
332 | ) | |
316 | for part in line: |
|
333 | for part in line: | |
317 | self.source.checkrevformat(part) |
|
334 | self.source.checkrevformat(part) | |
318 | child, p1, p2 = line[0], line[1:2], line[2:] |
|
335 | child, p1, p2 = line[0], line[1:2], line[2:] | |
319 | if p1 == p2: |
|
336 | if p1 == p2: | |
320 | m[child] = p1 |
|
337 | m[child] = p1 | |
321 | else: |
|
338 | else: | |
322 | m[child] = p1 + p2 |
|
339 | m[child] = p1 + p2 | |
323 | # if file does not exist or error reading, exit |
|
340 | # if file does not exist or error reading, exit | |
324 | except IOError: |
|
341 | except IOError: | |
325 | raise error.Abort( |
|
342 | raise error.Abort( | |
326 | _(b'splicemap file not found or error reading %s:') % path |
|
343 | _(b'splicemap file not found or error reading %s:') % path | |
327 | ) |
|
344 | ) | |
328 | return m |
|
345 | return m | |
329 |
|
346 | |||
330 | def walktree(self, heads): |
|
347 | def walktree(self, heads): | |
331 | """Return a mapping that identifies the uncommitted parents of every |
|
348 | """Return a mapping that identifies the uncommitted parents of every | |
332 | uncommitted changeset.""" |
|
349 | uncommitted changeset.""" | |
333 | visit = list(heads) |
|
350 | visit = list(heads) | |
334 | known = set() |
|
351 | known = set() | |
335 | parents = {} |
|
352 | parents = {} | |
336 | numcommits = self.source.numcommits() |
|
353 | numcommits = self.source.numcommits() | |
337 | progress = self.ui.makeprogress( |
|
354 | progress = self.ui.makeprogress( | |
338 | _(b'scanning'), unit=_(b'revisions'), total=numcommits |
|
355 | _(b'scanning'), unit=_(b'revisions'), total=numcommits | |
339 | ) |
|
356 | ) | |
340 | while visit: |
|
357 | while visit: | |
341 | n = visit.pop(0) |
|
358 | n = visit.pop(0) | |
342 | if n in known: |
|
359 | if n in known: | |
343 | continue |
|
360 | continue | |
344 | if n in self.map: |
|
361 | if n in self.map: | |
345 | m = self.map[n] |
|
362 | m = self.map[n] | |
346 | if m == SKIPREV or self.dest.hascommitfrommap(m): |
|
363 | if m == SKIPREV or self.dest.hascommitfrommap(m): | |
347 | continue |
|
364 | continue | |
348 | known.add(n) |
|
365 | known.add(n) | |
349 | progress.update(len(known)) |
|
366 | progress.update(len(known)) | |
350 | commit = self.cachecommit(n) |
|
367 | commit = self.cachecommit(n) | |
351 | parents[n] = [] |
|
368 | parents[n] = [] | |
352 | for p in commit.parents: |
|
369 | for p in commit.parents: | |
353 | parents[n].append(p) |
|
370 | parents[n].append(p) | |
354 | visit.append(p) |
|
371 | visit.append(p) | |
355 | progress.complete() |
|
372 | progress.complete() | |
356 |
|
373 | |||
357 | return parents |
|
374 | return parents | |
358 |
|
375 | |||
359 | def mergesplicemap(self, parents, splicemap): |
|
376 | def mergesplicemap(self, parents, splicemap) -> None: | |
360 | """A splicemap redefines child/parent relationships. Check the |
|
377 | """A splicemap redefines child/parent relationships. Check the | |
361 | map contains valid revision identifiers and merge the new |
|
378 | map contains valid revision identifiers and merge the new | |
362 | links in the source graph. |
|
379 | links in the source graph. | |
363 | """ |
|
380 | """ | |
364 | for c in sorted(splicemap): |
|
381 | for c in sorted(splicemap): | |
365 | if c not in parents: |
|
382 | if c not in parents: | |
366 | if not self.dest.hascommitforsplicemap(self.map.get(c, c)): |
|
383 | if not self.dest.hascommitforsplicemap(self.map.get(c, c)): | |
367 | # Could be in source but not converted during this run |
|
384 | # Could be in source but not converted during this run | |
368 | self.ui.warn( |
|
385 | self.ui.warn( | |
369 | _( |
|
386 | _( | |
370 | b'splice map revision %s is not being ' |
|
387 | b'splice map revision %s is not being ' | |
371 | b'converted, ignoring\n' |
|
388 | b'converted, ignoring\n' | |
372 | ) |
|
389 | ) | |
373 | % c |
|
390 | % c | |
374 | ) |
|
391 | ) | |
375 | continue |
|
392 | continue | |
376 | pc = [] |
|
393 | pc = [] | |
377 | for p in splicemap[c]: |
|
394 | for p in splicemap[c]: | |
378 | # We do not have to wait for nodes already in dest. |
|
395 | # We do not have to wait for nodes already in dest. | |
379 | if self.dest.hascommitforsplicemap(self.map.get(p, p)): |
|
396 | if self.dest.hascommitforsplicemap(self.map.get(p, p)): | |
380 | continue |
|
397 | continue | |
381 | # Parent is not in dest and not being converted, not good |
|
398 | # Parent is not in dest and not being converted, not good | |
382 | if p not in parents: |
|
399 | if p not in parents: | |
383 | raise error.Abort(_(b'unknown splice map parent: %s') % p) |
|
400 | raise error.Abort(_(b'unknown splice map parent: %s') % p) | |
384 | pc.append(p) |
|
401 | pc.append(p) | |
385 | parents[c] = pc |
|
402 | parents[c] = pc | |
386 |
|
403 | |||
387 | def toposort(self, parents, sortmode): |
|
404 | def toposort(self, parents, sortmode): | |
388 | """Return an ordering such that every uncommitted changeset is |
|
405 | """Return an ordering such that every uncommitted changeset is | |
389 | preceded by all its uncommitted ancestors.""" |
|
406 | preceded by all its uncommitted ancestors.""" | |
390 |
|
407 | |||
391 | def mapchildren(parents): |
|
408 | def mapchildren(parents): | |
392 | """Return a (children, roots) tuple where 'children' maps parent |
|
409 | """Return a (children, roots) tuple where 'children' maps parent | |
393 | revision identifiers to children ones, and 'roots' is the list of |
|
410 | revision identifiers to children ones, and 'roots' is the list of | |
394 | revisions without parents. 'parents' must be a mapping of revision |
|
411 | revisions without parents. 'parents' must be a mapping of revision | |
395 | identifier to its parents ones. |
|
412 | identifier to its parents ones. | |
396 | """ |
|
413 | """ | |
397 | visit = collections.deque(sorted(parents)) |
|
414 | visit = collections.deque(sorted(parents)) | |
398 | seen = set() |
|
415 | seen = set() | |
399 | children = {} |
|
416 | children = {} | |
400 | roots = [] |
|
417 | roots = [] | |
401 |
|
418 | |||
402 | while visit: |
|
419 | while visit: | |
403 | n = visit.popleft() |
|
420 | n = visit.popleft() | |
404 | if n in seen: |
|
421 | if n in seen: | |
405 | continue |
|
422 | continue | |
406 | seen.add(n) |
|
423 | seen.add(n) | |
407 | # Ensure that nodes without parents are present in the |
|
424 | # Ensure that nodes without parents are present in the | |
408 | # 'children' mapping. |
|
425 | # 'children' mapping. | |
409 | children.setdefault(n, []) |
|
426 | children.setdefault(n, []) | |
410 | hasparent = False |
|
427 | hasparent = False | |
411 | for p in parents[n]: |
|
428 | for p in parents[n]: | |
412 | if p not in self.map: |
|
429 | if p not in self.map: | |
413 | visit.append(p) |
|
430 | visit.append(p) | |
414 | hasparent = True |
|
431 | hasparent = True | |
415 | children.setdefault(p, []).append(n) |
|
432 | children.setdefault(p, []).append(n) | |
416 | if not hasparent: |
|
433 | if not hasparent: | |
417 | roots.append(n) |
|
434 | roots.append(n) | |
418 |
|
435 | |||
419 | return children, roots |
|
436 | return children, roots | |
420 |
|
437 | |||
421 | def makesourcesorter(): |
|
438 | def makesourcesorter(): | |
422 | """Source specific sort.""" |
|
439 | """Source specific sort.""" | |
423 | keyfn = lambda n: self.commitcache[n].sortkey |
|
440 | keyfn = lambda n: self.commitcache[n].sortkey | |
424 | return keysorter(keyfn) |
|
441 | return keysorter(keyfn) | |
425 |
|
442 | |||
426 | def makeclosesorter(): |
|
443 | def makeclosesorter(): | |
427 | """Close order sort.""" |
|
444 | """Close order sort.""" | |
428 | keyfn = lambda n: ( |
|
445 | keyfn = lambda n: ( | |
429 | b'close' not in self.commitcache[n].extra, |
|
446 | b'close' not in self.commitcache[n].extra, | |
430 | self.commitcache[n].sortkey, |
|
447 | self.commitcache[n].sortkey, | |
431 | ) |
|
448 | ) | |
432 | return keysorter(keyfn) |
|
449 | return keysorter(keyfn) | |
433 |
|
450 | |||
434 | def makedatesorter(): |
|
451 | def makedatesorter(): | |
435 | """Sort revisions by date.""" |
|
452 | """Sort revisions by date.""" | |
436 |
|
453 | |||
437 | def getdate(n): |
|
454 | def getdate(n): | |
438 | commit = self.commitcache[n] |
|
455 | commit = self.commitcache[n] | |
439 | # The other entries are here as tie breaker for stability |
|
456 | # The other entries are here as tie breaker for stability | |
440 | return ( |
|
457 | return ( | |
441 | dateutil.parsedate(commit.date), |
|
458 | dateutil.parsedate(commit.date), | |
442 | commit.rev, |
|
459 | commit.rev, | |
443 | commit.branch, |
|
460 | commit.branch, | |
444 | ) |
|
461 | ) | |
445 |
|
462 | |||
446 | return keysorter(getdate) |
|
463 | return keysorter(getdate) | |
447 |
|
464 | |||
448 | if sortmode == b'branchsort': |
|
465 | if sortmode == b'branchsort': | |
449 | sorter = branchsorter(parents) |
|
466 | sorter = branchsorter(parents) | |
450 | elif sortmode == b'datesort': |
|
467 | elif sortmode == b'datesort': | |
451 | sorter = makedatesorter() |
|
468 | sorter = makedatesorter() | |
452 | elif sortmode == b'sourcesort': |
|
469 | elif sortmode == b'sourcesort': | |
453 | sorter = makesourcesorter() |
|
470 | sorter = makesourcesorter() | |
454 | elif sortmode == b'closesort': |
|
471 | elif sortmode == b'closesort': | |
455 | sorter = makeclosesorter() |
|
472 | sorter = makeclosesorter() | |
456 | else: |
|
473 | else: | |
457 | raise error.Abort(_(b'unknown sort mode: %s') % sortmode) |
|
474 | raise error.Abort(_(b'unknown sort mode: %s') % sortmode) | |
458 |
|
475 | |||
459 | children, roots = mapchildren(parents) |
|
476 | children, roots = mapchildren(parents) | |
460 |
|
477 | |||
461 | for node in roots: |
|
478 | for node in roots: | |
462 | sorter.insert(node) |
|
479 | sorter.insert(node) | |
463 |
|
480 | |||
464 | s = [] |
|
481 | s = [] | |
465 | pendings = {} |
|
482 | pendings = {} | |
466 | while sorter: |
|
483 | while sorter: | |
467 | n = sorter.picknext() |
|
484 | n = sorter.picknext() | |
468 | s.append(n) |
|
485 | s.append(n) | |
469 |
|
486 | |||
470 | # Update dependents list |
|
487 | # Update dependents list | |
471 | for c in children.get(n, []): |
|
488 | for c in children.get(n, []): | |
472 | if c not in pendings: |
|
489 | if c not in pendings: | |
473 | pendings[c] = [p for p in parents[c] if p not in self.map] |
|
490 | pendings[c] = [p for p in parents[c] if p not in self.map] | |
474 | try: |
|
491 | try: | |
475 | pendings[c].remove(n) |
|
492 | pendings[c].remove(n) | |
476 | except ValueError: |
|
493 | except ValueError: | |
477 | raise error.Abort( |
|
494 | raise error.Abort( | |
478 | _(b'cycle detected between %s and %s') |
|
495 | _(b'cycle detected between %s and %s') | |
479 | % (recode(c), recode(n)) |
|
496 | % (recode(c), recode(n)) | |
480 | ) |
|
497 | ) | |
481 | if not pendings[c]: |
|
498 | if not pendings[c]: | |
482 | # Parents are converted, node is eligible |
|
499 | # Parents are converted, node is eligible | |
483 | sorter.insert(c) |
|
500 | sorter.insert(c) | |
484 | pendings[c] = None |
|
501 | pendings[c] = None | |
485 |
|
502 | |||
486 | if len(s) != len(parents): |
|
503 | if len(s) != len(parents): | |
487 | raise error.Abort(_(b"not all revisions were sorted")) |
|
504 | raise error.Abort(_(b"not all revisions were sorted")) | |
488 |
|
505 | |||
489 | return s |
|
506 | return s | |
490 |
|
507 | |||
491 | def writeauthormap(self): |
|
508 | def writeauthormap(self) -> None: | |
492 | authorfile = self.authorfile |
|
509 | authorfile = self.authorfile | |
493 | if authorfile: |
|
510 | if authorfile: | |
494 | self.ui.status(_(b'writing author map file %s\n') % authorfile) |
|
511 | self.ui.status(_(b'writing author map file %s\n') % authorfile) | |
495 | ofile = open(authorfile, b'wb+') |
|
512 | ofile = open(authorfile, b'wb+') | |
496 | for author in self.authors: |
|
513 | for author in self.authors: | |
497 | ofile.write( |
|
514 | ofile.write( | |
498 | util.tonativeeol( |
|
515 | util.tonativeeol( | |
499 | b"%s=%s\n" % (author, self.authors[author]) |
|
516 | b"%s=%s\n" % (author, self.authors[author]) | |
500 | ) |
|
517 | ) | |
501 | ) |
|
518 | ) | |
502 | ofile.close() |
|
519 | ofile.close() | |
503 |
|
520 | |||
504 | def readauthormap(self, authorfile): |
|
521 | def readauthormap(self, authorfile) -> None: | |
505 | self.authors = readauthormap(self.ui, authorfile, self.authors) |
|
522 | self.authors = readauthormap(self.ui, authorfile, self.authors) | |
506 |
|
523 | |||
507 | def cachecommit(self, rev): |
|
524 | def cachecommit(self, rev): | |
508 | commit = self.source.getcommit(rev) |
|
525 | commit = self.source.getcommit(rev) | |
509 | commit.author = self.authors.get(commit.author, commit.author) |
|
526 | commit.author = self.authors.get(commit.author, commit.author) | |
510 | commit.branch = mapbranch(commit.branch, self.branchmap) |
|
527 | commit.branch = mapbranch(commit.branch, self.branchmap) | |
511 | self.commitcache[rev] = commit |
|
528 | self.commitcache[rev] = commit | |
512 | return commit |
|
529 | return commit | |
513 |
|
530 | |||
514 | def copy(self, rev): |
|
531 | def copy(self, rev) -> None: | |
515 | commit = self.commitcache[rev] |
|
532 | commit = self.commitcache[rev] | |
516 | full = self.opts.get(b'full') |
|
533 | full = self.opts.get(b'full') | |
517 | changes = self.source.getchanges(rev, full) |
|
534 | changes = self.source.getchanges(rev, full) | |
518 | if isinstance(changes, bytes): |
|
535 | if isinstance(changes, bytes): | |
519 | if changes == SKIPREV: |
|
536 | if changes == SKIPREV: | |
520 | dest = SKIPREV |
|
537 | dest = SKIPREV | |
521 | else: |
|
538 | else: | |
522 | dest = self.map[changes] |
|
539 | dest = self.map[changes] | |
523 | self.map[rev] = dest |
|
540 | self.map[rev] = dest | |
524 | return |
|
541 | return | |
525 | files, copies, cleanp2 = changes |
|
542 | files, copies, cleanp2 = changes | |
526 | pbranches = [] |
|
543 | pbranches = [] | |
527 | if commit.parents: |
|
544 | if commit.parents: | |
528 | for prev in commit.parents: |
|
545 | for prev in commit.parents: | |
529 | if prev not in self.commitcache: |
|
546 | if prev not in self.commitcache: | |
530 | self.cachecommit(prev) |
|
547 | self.cachecommit(prev) | |
531 | pbranches.append( |
|
548 | pbranches.append( | |
532 | (self.map[prev], self.commitcache[prev].branch) |
|
549 | (self.map[prev], self.commitcache[prev].branch) | |
533 | ) |
|
550 | ) | |
534 | self.dest.setbranch(commit.branch, pbranches) |
|
551 | self.dest.setbranch(commit.branch, pbranches) | |
535 | try: |
|
552 | try: | |
536 | parents = self.splicemap[rev] |
|
553 | parents = self.splicemap[rev] | |
537 | self.ui.status( |
|
554 | self.ui.status( | |
538 | _(b'spliced in %s as parents of %s\n') |
|
555 | _(b'spliced in %s as parents of %s\n') | |
539 | % (_(b' and ').join(parents), rev) |
|
556 | % (_(b' and ').join(parents), rev) | |
540 | ) |
|
557 | ) | |
541 | parents = [self.map.get(p, p) for p in parents] |
|
558 | parents = [self.map.get(p, p) for p in parents] | |
542 | except KeyError: |
|
559 | except KeyError: | |
543 | parents = [b[0] for b in pbranches] |
|
560 | parents = [b[0] for b in pbranches] | |
544 | parents.extend( |
|
561 | parents.extend( | |
545 | self.map[x] for x in commit.optparents if x in self.map |
|
562 | self.map[x] for x in commit.optparents if x in self.map | |
546 | ) |
|
563 | ) | |
547 | if len(pbranches) != 2: |
|
564 | if len(pbranches) != 2: | |
548 | cleanp2 = set() |
|
565 | cleanp2 = set() | |
549 | if len(parents) < 3: |
|
566 | if len(parents) < 3: | |
550 | source = progresssource(self.ui, self.source, len(files)) |
|
567 | source = progresssource(self.ui, self.source, len(files)) | |
551 | else: |
|
568 | else: | |
552 | # For an octopus merge, we end up traversing the list of |
|
569 | # For an octopus merge, we end up traversing the list of | |
553 | # changed files N-1 times. This tweak to the number of |
|
570 | # changed files N-1 times. This tweak to the number of | |
554 | # files makes it so the progress bar doesn't overflow |
|
571 | # files makes it so the progress bar doesn't overflow | |
555 | # itself. |
|
572 | # itself. | |
556 | source = progresssource( |
|
573 | source = progresssource( | |
557 | self.ui, self.source, len(files) * (len(parents) - 1) |
|
574 | self.ui, self.source, len(files) * (len(parents) - 1) | |
558 | ) |
|
575 | ) | |
559 | newnode = self.dest.putcommit( |
|
576 | newnode = self.dest.putcommit( | |
560 | files, copies, parents, commit, source, self.map, full, cleanp2 |
|
577 | files, copies, parents, commit, source, self.map, full, cleanp2 | |
561 | ) |
|
578 | ) | |
562 | source.close() |
|
579 | source.close() | |
563 | self.source.converted(rev, newnode) |
|
580 | self.source.converted(rev, newnode) | |
564 | self.map[rev] = newnode |
|
581 | self.map[rev] = newnode | |
565 |
|
582 | |||
566 | def convert(self, sortmode): |
|
583 | def convert(self, sortmode) -> None: | |
567 | try: |
|
584 | try: | |
568 | self.source.before() |
|
585 | self.source.before() | |
569 | self.dest.before() |
|
586 | self.dest.before() | |
570 | self.source.setrevmap(self.map) |
|
587 | self.source.setrevmap(self.map) | |
571 | self.ui.status(_(b"scanning source...\n")) |
|
588 | self.ui.status(_(b"scanning source...\n")) | |
572 | heads = self.source.getheads() |
|
589 | heads = self.source.getheads() | |
573 | parents = self.walktree(heads) |
|
590 | parents = self.walktree(heads) | |
574 | self.mergesplicemap(parents, self.splicemap) |
|
591 | self.mergesplicemap(parents, self.splicemap) | |
575 | self.ui.status(_(b"sorting...\n")) |
|
592 | self.ui.status(_(b"sorting...\n")) | |
576 | t = self.toposort(parents, sortmode) |
|
593 | t = self.toposort(parents, sortmode) | |
577 | num = len(t) |
|
594 | num = len(t) | |
578 | c = None |
|
595 | c = None | |
579 |
|
596 | |||
580 | self.ui.status(_(b"converting...\n")) |
|
597 | self.ui.status(_(b"converting...\n")) | |
581 | progress = self.ui.makeprogress( |
|
598 | progress = self.ui.makeprogress( | |
582 | _(b'converting'), unit=_(b'revisions'), total=len(t) |
|
599 | _(b'converting'), unit=_(b'revisions'), total=len(t) | |
583 | ) |
|
600 | ) | |
584 | for i, c in enumerate(t): |
|
601 | for i, c in enumerate(t): | |
585 | num -= 1 |
|
602 | num -= 1 | |
586 | desc = self.commitcache[c].desc |
|
603 | desc = self.commitcache[c].desc | |
587 | if b"\n" in desc: |
|
604 | if b"\n" in desc: | |
588 | desc = desc.splitlines()[0] |
|
605 | desc = desc.splitlines()[0] | |
589 | # convert log message to local encoding without using |
|
606 | # convert log message to local encoding without using | |
590 | # tolocal() because the encoding.encoding convert() |
|
607 | # tolocal() because the encoding.encoding convert() | |
591 | # uses is 'utf-8' |
|
608 | # uses is 'utf-8' | |
592 | self.ui.status(b"%d %s\n" % (num, recode(desc))) |
|
609 | self.ui.status(b"%d %s\n" % (num, recode(desc))) | |
593 | self.ui.note(_(b"source: %s\n") % recode(c)) |
|
610 | self.ui.note(_(b"source: %s\n") % recode(c)) | |
594 | progress.update(i) |
|
611 | progress.update(i) | |
595 | self.copy(c) |
|
612 | self.copy(c) | |
596 | progress.complete() |
|
613 | progress.complete() | |
597 |
|
614 | |||
598 | if not self.ui.configbool(b'convert', b'skiptags'): |
|
615 | if not self.ui.configbool(b'convert', b'skiptags'): | |
599 | tags = self.source.gettags() |
|
616 | tags = self.source.gettags() | |
600 | ctags = {} |
|
617 | ctags = {} | |
601 | for k in tags: |
|
618 | for k in tags: | |
602 | v = tags[k] |
|
619 | v = tags[k] | |
603 | if self.map.get(v, SKIPREV) != SKIPREV: |
|
620 | if self.map.get(v, SKIPREV) != SKIPREV: | |
604 | ctags[k] = self.map[v] |
|
621 | ctags[k] = self.map[v] | |
605 |
|
622 | |||
606 | if c and ctags: |
|
623 | if c and ctags: | |
607 | nrev, tagsparent = self.dest.puttags(ctags) |
|
624 | nrev, tagsparent = self.dest.puttags(ctags) | |
608 | if nrev and tagsparent: |
|
625 | if nrev and tagsparent: | |
609 | # write another hash correspondence to override the |
|
626 | # write another hash correspondence to override the | |
610 | # previous one so we don't end up with extra tag heads |
|
627 | # previous one so we don't end up with extra tag heads | |
611 | tagsparents = [ |
|
628 | tagsparents = [ | |
612 | e for e in self.map.items() if e[1] == tagsparent |
|
629 | e for e in self.map.items() if e[1] == tagsparent | |
613 | ] |
|
630 | ] | |
614 | if tagsparents: |
|
631 | if tagsparents: | |
615 | self.map[tagsparents[0][0]] = nrev |
|
632 | self.map[tagsparents[0][0]] = nrev | |
616 |
|
633 | |||
617 | bookmarks = self.source.getbookmarks() |
|
634 | bookmarks = self.source.getbookmarks() | |
618 | cbookmarks = {} |
|
635 | cbookmarks = {} | |
619 | for k in bookmarks: |
|
636 | for k in bookmarks: | |
620 | v = bookmarks[k] |
|
637 | v = bookmarks[k] | |
621 | if self.map.get(v, SKIPREV) != SKIPREV: |
|
638 | if self.map.get(v, SKIPREV) != SKIPREV: | |
622 | cbookmarks[k] = self.map[v] |
|
639 | cbookmarks[k] = self.map[v] | |
623 |
|
640 | |||
624 | if c and cbookmarks: |
|
641 | if c and cbookmarks: | |
625 | self.dest.putbookmarks(cbookmarks) |
|
642 | self.dest.putbookmarks(cbookmarks) | |
626 |
|
643 | |||
627 | self.writeauthormap() |
|
644 | self.writeauthormap() | |
628 | finally: |
|
645 | finally: | |
629 | self.cleanup() |
|
646 | self.cleanup() | |
630 |
|
647 | |||
631 | def cleanup(self): |
|
648 | def cleanup(self) -> None: | |
632 | try: |
|
649 | try: | |
633 | self.dest.after() |
|
650 | self.dest.after() | |
634 | finally: |
|
651 | finally: | |
635 | self.source.after() |
|
652 | self.source.after() | |
636 | self.map.close() |
|
653 | self.map.close() | |
637 |
|
654 | |||
638 |
|
655 | |||
639 | def convert(ui, src, dest=None, revmapfile=None, **opts): |
|
656 | def convert( | |
|
657 | ui: "uimod.ui", src, dest: Optional[bytes] = None, revmapfile=None, **opts | |||
|
658 | ) -> None: | |||
640 | opts = pycompat.byteskwargs(opts) |
|
659 | opts = pycompat.byteskwargs(opts) | |
641 | global orig_encoding |
|
660 | global orig_encoding | |
642 | orig_encoding = encoding.encoding |
|
661 | orig_encoding = encoding.encoding | |
643 | encoding.encoding = b'UTF-8' |
|
662 | encoding.encoding = b'UTF-8' | |
644 |
|
663 | |||
645 | # support --authors as an alias for --authormap |
|
664 | # support --authors as an alias for --authormap | |
646 | if not opts.get(b'authormap'): |
|
665 | if not opts.get(b'authormap'): | |
647 | opts[b'authormap'] = opts.get(b'authors') |
|
666 | opts[b'authormap'] = opts.get(b'authors') | |
648 |
|
667 | |||
649 | if not dest: |
|
668 | if not dest: | |
650 | dest = hg.defaultdest(src) + b"-hg" |
|
669 | dest = hg.defaultdest(src) + b"-hg" | |
651 | ui.status(_(b"assuming destination %s\n") % dest) |
|
670 | ui.status(_(b"assuming destination %s\n") % dest) | |
652 |
|
671 | |||
653 | destc = convertsink(ui, dest, opts.get(b'dest_type')) |
|
672 | destc = convertsink(ui, dest, opts.get(b'dest_type')) | |
654 | destc = scmutil.wrapconvertsink(destc) |
|
673 | destc = scmutil.wrapconvertsink(destc) | |
655 |
|
674 | |||
656 | try: |
|
675 | try: | |
657 | srcc, defaultsort = convertsource( |
|
676 | srcc, defaultsort = convertsource( | |
658 | ui, src, opts.get(b'source_type'), opts.get(b'rev') |
|
677 | ui, src, opts.get(b'source_type'), opts.get(b'rev') | |
659 | ) |
|
678 | ) | |
660 | except Exception: |
|
679 | except Exception: | |
661 | for path in destc.created: |
|
680 | for path in destc.created: | |
662 | shutil.rmtree(path, True) |
|
681 | shutil.rmtree(path, True) | |
663 | raise |
|
682 | raise | |
664 |
|
683 | |||
665 | sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort') |
|
684 | sortmodes = (b'branchsort', b'datesort', b'sourcesort', b'closesort') | |
666 | sortmode = [m for m in sortmodes if opts.get(m)] |
|
685 | sortmode = [m for m in sortmodes if opts.get(m)] | |
667 | if len(sortmode) > 1: |
|
686 | if len(sortmode) > 1: | |
668 | raise error.Abort(_(b'more than one sort mode specified')) |
|
687 | raise error.Abort(_(b'more than one sort mode specified')) | |
669 | if sortmode: |
|
688 | if sortmode: | |
670 | sortmode = sortmode[0] |
|
689 | sortmode = sortmode[0] | |
671 | else: |
|
690 | else: | |
672 | sortmode = defaultsort |
|
691 | sortmode = defaultsort | |
673 |
|
692 | |||
674 | if sortmode == b'sourcesort' and not srcc.hasnativeorder(): |
|
693 | if sortmode == b'sourcesort' and not srcc.hasnativeorder(): | |
675 | raise error.Abort( |
|
694 | raise error.Abort( | |
676 | _(b'--sourcesort is not supported by this data source') |
|
695 | _(b'--sourcesort is not supported by this data source') | |
677 | ) |
|
696 | ) | |
678 | if sortmode == b'closesort' and not srcc.hasnativeclose(): |
|
697 | if sortmode == b'closesort' and not srcc.hasnativeclose(): | |
679 | raise error.Abort( |
|
698 | raise error.Abort( | |
680 | _(b'--closesort is not supported by this data source') |
|
699 | _(b'--closesort is not supported by this data source') | |
681 | ) |
|
700 | ) | |
682 |
|
701 | |||
683 | fmap = opts.get(b'filemap') |
|
702 | fmap = opts.get(b'filemap') | |
684 | if fmap: |
|
703 | if fmap: | |
685 | srcc = filemap.filemap_source(ui, srcc, fmap) |
|
704 | srcc = filemap.filemap_source(ui, srcc, fmap) | |
686 | destc.setfilemapmode(True) |
|
705 | destc.setfilemapmode(True) | |
687 |
|
706 | |||
688 | if not revmapfile: |
|
707 | if not revmapfile: | |
689 | revmapfile = destc.revmapfile() |
|
708 | revmapfile = destc.revmapfile() | |
690 |
|
709 | |||
691 | c = converter(ui, srcc, destc, revmapfile, opts) |
|
710 | c = converter(ui, srcc, destc, revmapfile, opts) | |
692 | c.convert(sortmode) |
|
711 | c.convert(sortmode) |
@@ -1,497 +1,530 | |||||
1 | # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com> |
|
1 | # Copyright 2007 Bryan O'Sullivan <bos@serpentine.com> | |
2 | # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> |
|
2 | # Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> | |
3 | # |
|
3 | # | |
4 | # This software may be used and distributed according to the terms of the |
|
4 | # This software may be used and distributed according to the terms of the | |
5 | # GNU General Public License version 2 or any later version. |
|
5 | # GNU General Public License version 2 or any later version. | |
6 |
|
6 | |||
7 |
|
7 | |||
8 | import posixpath |
|
8 | import posixpath | |
|
9 | import typing | |||
|
10 | ||||
|
11 | from typing import ( | |||
|
12 | Iterator, | |||
|
13 | Mapping, | |||
|
14 | MutableMapping, | |||
|
15 | Optional, | |||
|
16 | Set, | |||
|
17 | Tuple, | |||
|
18 | overload, | |||
|
19 | ) | |||
9 |
|
20 | |||
10 | from mercurial.i18n import _ |
|
21 | from mercurial.i18n import _ | |
11 | from mercurial import ( |
|
22 | from mercurial import ( | |
12 | error, |
|
23 | error, | |
13 | pycompat, |
|
24 | pycompat, | |
14 | ) |
|
25 | ) | |
15 | from . import common |
|
26 | from . import common | |
16 |
|
27 | |||
|
28 | if typing.TYPE_CHECKING: | |||
|
29 | from mercurial import ( | |||
|
30 | ui as uimod, | |||
|
31 | ) | |||
|
32 | ||||
17 | SKIPREV = common.SKIPREV |
|
33 | SKIPREV = common.SKIPREV | |
18 |
|
34 | |||
19 |
|
35 | |||
20 | def rpairs(path): |
|
36 | def rpairs(path: bytes) -> Iterator[Tuple[bytes, bytes]]: | |
21 | """Yield tuples with path split at '/', starting with the full path. |
|
37 | """Yield tuples with path split at '/', starting with the full path. | |
22 | No leading, trailing or double '/', please. |
|
38 | No leading, trailing or double '/', please. | |
23 | >>> for x in rpairs(b'foo/bar/baz'): print(x) |
|
39 | >>> for x in rpairs(b'foo/bar/baz'): print(x) | |
24 | ('foo/bar/baz', '') |
|
40 | ('foo/bar/baz', '') | |
25 | ('foo/bar', 'baz') |
|
41 | ('foo/bar', 'baz') | |
26 | ('foo', 'bar/baz') |
|
42 | ('foo', 'bar/baz') | |
27 | ('.', 'foo/bar/baz') |
|
43 | ('.', 'foo/bar/baz') | |
28 | """ |
|
44 | """ | |
29 | i = len(path) |
|
45 | i = len(path) | |
30 | while i != -1: |
|
46 | while i != -1: | |
31 | yield path[:i], path[i + 1 :] |
|
47 | yield path[:i], path[i + 1 :] | |
32 | i = path.rfind(b'/', 0, i) |
|
48 | i = path.rfind(b'/', 0, i) | |
33 | yield b'.', path |
|
49 | yield b'.', path | |
34 |
|
50 | |||
35 |
|
51 | |||
|
52 | if typing.TYPE_CHECKING: | |||
|
53 | ||||
|
54 | @overload | |||
|
55 | def normalize(path: bytes) -> bytes: | |||
|
56 | pass | |||
|
57 | ||||
|
58 | @overload | |||
|
59 | def normalize(path: None) -> None: | |||
|
60 | pass | |||
|
61 | ||||
|
62 | ||||
36 | def normalize(path): |
|
63 | def normalize(path): | |
37 | """We use posixpath.normpath to support cross-platform path format. |
|
64 | """We use posixpath.normpath to support cross-platform path format. | |
38 | However, it doesn't handle None input. So we wrap it up.""" |
|
65 | However, it doesn't handle None input. So we wrap it up.""" | |
39 | if path is None: |
|
66 | if path is None: | |
40 | return None |
|
67 | return None | |
41 | return posixpath.normpath(path) |
|
68 | return posixpath.normpath(path) | |
42 |
|
69 | |||
43 |
|
70 | |||
44 | class filemapper: |
|
71 | class filemapper: | |
45 | """Map and filter filenames when importing. |
|
72 | """Map and filter filenames when importing. | |
46 | A name can be mapped to itself, a new name, or None (omit from new |
|
73 | A name can be mapped to itself, a new name, or None (omit from new | |
47 | repository).""" |
|
74 | repository).""" | |
48 |
|
75 | |||
49 | def __init__(self, ui, path=None): |
|
76 | rename: MutableMapping[bytes, bytes] | |
|
77 | targetprefixes: Optional[Set[bytes]] | |||
|
78 | ||||
|
79 | def __init__(self, ui: "uimod.ui", path=None) -> None: | |||
50 | self.ui = ui |
|
80 | self.ui = ui | |
51 | self.include = {} |
|
81 | self.include = {} | |
52 | self.exclude = {} |
|
82 | self.exclude = {} | |
53 | self.rename = {} |
|
83 | self.rename = {} | |
54 | self.targetprefixes = None |
|
84 | self.targetprefixes = None | |
55 | if path: |
|
85 | if path: | |
56 | if self.parse(path): |
|
86 | if self.parse(path): | |
57 | raise error.Abort(_(b'errors in filemap')) |
|
87 | raise error.Abort(_(b'errors in filemap')) | |
58 |
|
88 | |||
59 | def parse(self, path): |
|
89 | # TODO: cmd==b'source' case breaks if ``path``is str | |
|
90 | def parse(self, path) -> int: | |||
60 | errs = 0 |
|
91 | errs = 0 | |
61 |
|
92 | |||
62 | def check(name, mapping, listname): |
|
93 | def check(name: bytes, mapping, listname: bytes): | |
63 | if not name: |
|
94 | if not name: | |
64 | self.ui.warn( |
|
95 | self.ui.warn( | |
65 | _(b'%s:%d: path to %s is missing\n') |
|
96 | _(b'%s:%d: path to %s is missing\n') | |
66 | % (lex.infile, lex.lineno, listname) |
|
97 | % (lex.infile, lex.lineno, listname) | |
67 | ) |
|
98 | ) | |
68 | return 1 |
|
99 | return 1 | |
69 | if name in mapping: |
|
100 | if name in mapping: | |
70 | self.ui.warn( |
|
101 | self.ui.warn( | |
71 | _(b'%s:%d: %r already in %s list\n') |
|
102 | _(b'%s:%d: %r already in %s list\n') | |
72 | % (lex.infile, lex.lineno, name, listname) |
|
103 | % (lex.infile, lex.lineno, name, listname) | |
73 | ) |
|
104 | ) | |
74 | return 1 |
|
105 | return 1 | |
75 | if name.startswith(b'/') or name.endswith(b'/') or b'//' in name: |
|
106 | if name.startswith(b'/') or name.endswith(b'/') or b'//' in name: | |
76 | self.ui.warn( |
|
107 | self.ui.warn( | |
77 | _(b'%s:%d: superfluous / in %s %r\n') |
|
108 | _(b'%s:%d: superfluous / in %s %r\n') | |
78 | % (lex.infile, lex.lineno, listname, pycompat.bytestr(name)) |
|
109 | % (lex.infile, lex.lineno, listname, pycompat.bytestr(name)) | |
79 | ) |
|
110 | ) | |
80 | return 1 |
|
111 | return 1 | |
81 | return 0 |
|
112 | return 0 | |
82 |
|
113 | |||
83 | lex = common.shlexer( |
|
114 | lex = common.shlexer( | |
84 | filepath=path, wordchars=b'!@#$%^&*()-=+[]{}|;:,./<>?' |
|
115 | filepath=path, wordchars=b'!@#$%^&*()-=+[]{}|;:,./<>?' | |
85 | ) |
|
116 | ) | |
86 | cmd = lex.get_token() |
|
117 | cmd = lex.get_token() | |
87 | while cmd: |
|
118 | while cmd: | |
88 | if cmd == b'include': |
|
119 | if cmd == b'include': | |
89 | name = normalize(lex.get_token()) |
|
120 | name = normalize(lex.get_token()) | |
90 | errs += check(name, self.exclude, b'exclude') |
|
121 | errs += check(name, self.exclude, b'exclude') | |
91 | self.include[name] = name |
|
122 | self.include[name] = name | |
92 | elif cmd == b'exclude': |
|
123 | elif cmd == b'exclude': | |
93 | name = normalize(lex.get_token()) |
|
124 | name = normalize(lex.get_token()) | |
94 | errs += check(name, self.include, b'include') |
|
125 | errs += check(name, self.include, b'include') | |
95 | errs += check(name, self.rename, b'rename') |
|
126 | errs += check(name, self.rename, b'rename') | |
96 | self.exclude[name] = name |
|
127 | self.exclude[name] = name | |
97 | elif cmd == b'rename': |
|
128 | elif cmd == b'rename': | |
98 | src = normalize(lex.get_token()) |
|
129 | src = normalize(lex.get_token()) | |
99 | dest = normalize(lex.get_token()) |
|
130 | dest = normalize(lex.get_token()) | |
100 | errs += check(src, self.exclude, b'exclude') |
|
131 | errs += check(src, self.exclude, b'exclude') | |
101 | self.rename[src] = dest |
|
132 | self.rename[src] = dest | |
102 | elif cmd == b'source': |
|
133 | elif cmd == b'source': | |
103 | errs += self.parse(normalize(lex.get_token())) |
|
134 | errs += self.parse(normalize(lex.get_token())) | |
104 | else: |
|
135 | else: | |
105 | self.ui.warn( |
|
136 | self.ui.warn( | |
106 | _(b'%s:%d: unknown directive %r\n') |
|
137 | _(b'%s:%d: unknown directive %r\n') | |
107 | % (lex.infile, lex.lineno, pycompat.bytestr(cmd)) |
|
138 | % (lex.infile, lex.lineno, pycompat.bytestr(cmd)) | |
108 | ) |
|
139 | ) | |
109 | errs += 1 |
|
140 | errs += 1 | |
110 | cmd = lex.get_token() |
|
141 | cmd = lex.get_token() | |
111 | return errs |
|
142 | return errs | |
112 |
|
143 | |||
113 | def lookup(self, name, mapping): |
|
144 | def lookup( | |
|
145 | self, name: bytes, mapping: Mapping[bytes, bytes] | |||
|
146 | ) -> Tuple[bytes, bytes, bytes]: | |||
114 | name = normalize(name) |
|
147 | name = normalize(name) | |
115 | for pre, suf in rpairs(name): |
|
148 | for pre, suf in rpairs(name): | |
116 | try: |
|
149 | try: | |
117 | return mapping[pre], pre, suf |
|
150 | return mapping[pre], pre, suf | |
118 | except KeyError: |
|
151 | except KeyError: | |
119 | pass |
|
152 | pass | |
120 | return b'', name, b'' |
|
153 | return b'', name, b'' | |
121 |
|
154 | |||
122 | def istargetfile(self, filename): |
|
155 | def istargetfile(self, filename: bytes) -> bool: | |
123 | """Return true if the given target filename is covered as a destination |
|
156 | """Return true if the given target filename is covered as a destination | |
124 | of the filemap. This is useful for identifying what parts of the target |
|
157 | of the filemap. This is useful for identifying what parts of the target | |
125 | repo belong to the source repo and what parts don't.""" |
|
158 | repo belong to the source repo and what parts don't.""" | |
126 | if self.targetprefixes is None: |
|
159 | if self.targetprefixes is None: | |
127 | self.targetprefixes = set() |
|
160 | self.targetprefixes = set() | |
128 | for before, after in self.rename.items(): |
|
161 | for before, after in self.rename.items(): | |
129 | self.targetprefixes.add(after) |
|
162 | self.targetprefixes.add(after) | |
130 |
|
163 | |||
131 | # If "." is a target, then all target files are considered from the |
|
164 | # If "." is a target, then all target files are considered from the | |
132 | # source. |
|
165 | # source. | |
133 | if not self.targetprefixes or b'.' in self.targetprefixes: |
|
166 | if not self.targetprefixes or b'.' in self.targetprefixes: | |
134 | return True |
|
167 | return True | |
135 |
|
168 | |||
136 | filename = normalize(filename) |
|
169 | filename = normalize(filename) | |
137 | for pre, suf in rpairs(filename): |
|
170 | for pre, suf in rpairs(filename): | |
138 | # This check is imperfect since it doesn't account for the |
|
171 | # This check is imperfect since it doesn't account for the | |
139 | # include/exclude list, but it should work in filemaps that don't |
|
172 | # include/exclude list, but it should work in filemaps that don't | |
140 | # apply include/exclude to the same source directories they are |
|
173 | # apply include/exclude to the same source directories they are | |
141 | # renaming. |
|
174 | # renaming. | |
142 | if pre in self.targetprefixes: |
|
175 | if pre in self.targetprefixes: | |
143 | return True |
|
176 | return True | |
144 | return False |
|
177 | return False | |
145 |
|
178 | |||
146 | def __call__(self, name): |
|
179 | def __call__(self, name: bytes) -> Optional[bytes]: | |
147 | if self.include: |
|
180 | if self.include: | |
148 | inc = self.lookup(name, self.include)[0] |
|
181 | inc = self.lookup(name, self.include)[0] | |
149 | else: |
|
182 | else: | |
150 | inc = name |
|
183 | inc = name | |
151 | if self.exclude: |
|
184 | if self.exclude: | |
152 | exc = self.lookup(name, self.exclude)[0] |
|
185 | exc = self.lookup(name, self.exclude)[0] | |
153 | else: |
|
186 | else: | |
154 | exc = b'' |
|
187 | exc = b'' | |
155 | if (not self.include and exc) or (len(inc) <= len(exc)): |
|
188 | if (not self.include and exc) or (len(inc) <= len(exc)): | |
156 | return None |
|
189 | return None | |
157 | newpre, pre, suf = self.lookup(name, self.rename) |
|
190 | newpre, pre, suf = self.lookup(name, self.rename) | |
158 | if newpre: |
|
191 | if newpre: | |
159 | if newpre == b'.': |
|
192 | if newpre == b'.': | |
160 | return suf |
|
193 | return suf | |
161 | if suf: |
|
194 | if suf: | |
162 | if newpre.endswith(b'/'): |
|
195 | if newpre.endswith(b'/'): | |
163 | return newpre + suf |
|
196 | return newpre + suf | |
164 | return newpre + b'/' + suf |
|
197 | return newpre + b'/' + suf | |
165 | return newpre |
|
198 | return newpre | |
166 | return name |
|
199 | return name | |
167 |
|
200 | |||
168 | def active(self): |
|
201 | def active(self) -> bool: | |
169 | return bool(self.include or self.exclude or self.rename) |
|
202 | return bool(self.include or self.exclude or self.rename) | |
170 |
|
203 | |||
171 |
|
204 | |||
172 | # This class does two additional things compared to a regular source: |
|
205 | # This class does two additional things compared to a regular source: | |
173 | # |
|
206 | # | |
174 | # - Filter and rename files. This is mostly wrapped by the filemapper |
|
207 | # - Filter and rename files. This is mostly wrapped by the filemapper | |
175 | # class above. We hide the original filename in the revision that is |
|
208 | # class above. We hide the original filename in the revision that is | |
176 | # returned by getchanges to be able to find things later in getfile. |
|
209 | # returned by getchanges to be able to find things later in getfile. | |
177 | # |
|
210 | # | |
178 | # - Return only revisions that matter for the files we're interested in. |
|
211 | # - Return only revisions that matter for the files we're interested in. | |
179 | # This involves rewriting the parents of the original revision to |
|
212 | # This involves rewriting the parents of the original revision to | |
180 | # create a graph that is restricted to those revisions. |
|
213 | # create a graph that is restricted to those revisions. | |
181 | # |
|
214 | # | |
182 | # This set of revisions includes not only revisions that directly |
|
215 | # This set of revisions includes not only revisions that directly | |
183 | # touch files we're interested in, but also merges that merge two |
|
216 | # touch files we're interested in, but also merges that merge two | |
184 | # or more interesting revisions. |
|
217 | # or more interesting revisions. | |
185 |
|
218 | |||
186 |
|
219 | |||
187 | class filemap_source(common.converter_source): |
|
220 | class filemap_source(common.converter_source): | |
188 | def __init__(self, ui, baseconverter, filemap): |
|
221 | def __init__(self, ui: "uimod.ui", baseconverter, filemap) -> None: | |
189 | super(filemap_source, self).__init__(ui, baseconverter.repotype) |
|
222 | super(filemap_source, self).__init__(ui, baseconverter.repotype) | |
190 | self.base = baseconverter |
|
223 | self.base = baseconverter | |
191 | self.filemapper = filemapper(ui, filemap) |
|
224 | self.filemapper = filemapper(ui, filemap) | |
192 | self.commits = {} |
|
225 | self.commits = {} | |
193 | # if a revision rev has parent p in the original revision graph, then |
|
226 | # if a revision rev has parent p in the original revision graph, then | |
194 | # rev will have parent self.parentmap[p] in the restricted graph. |
|
227 | # rev will have parent self.parentmap[p] in the restricted graph. | |
195 | self.parentmap = {} |
|
228 | self.parentmap = {} | |
196 | # self.wantedancestors[rev] is the set of all ancestors of rev that |
|
229 | # self.wantedancestors[rev] is the set of all ancestors of rev that | |
197 | # are in the restricted graph. |
|
230 | # are in the restricted graph. | |
198 | self.wantedancestors = {} |
|
231 | self.wantedancestors = {} | |
199 | self.convertedorder = None |
|
232 | self.convertedorder = None | |
200 | self._rebuilt = False |
|
233 | self._rebuilt = False | |
201 | self.origparents = {} |
|
234 | self.origparents = {} | |
202 | self.children = {} |
|
235 | self.children = {} | |
203 | self.seenchildren = {} |
|
236 | self.seenchildren = {} | |
204 | # experimental config: convert.ignoreancestorcheck |
|
237 | # experimental config: convert.ignoreancestorcheck | |
205 | self.ignoreancestorcheck = self.ui.configbool( |
|
238 | self.ignoreancestorcheck = self.ui.configbool( | |
206 | b'convert', b'ignoreancestorcheck' |
|
239 | b'convert', b'ignoreancestorcheck' | |
207 | ) |
|
240 | ) | |
208 |
|
241 | |||
209 | def before(self): |
|
242 | def before(self) -> None: | |
210 | self.base.before() |
|
243 | self.base.before() | |
211 |
|
244 | |||
212 | def after(self): |
|
245 | def after(self) -> None: | |
213 | self.base.after() |
|
246 | self.base.after() | |
214 |
|
247 | |||
215 | def setrevmap(self, revmap): |
|
248 | def setrevmap(self, revmap): | |
216 | # rebuild our state to make things restartable |
|
249 | # rebuild our state to make things restartable | |
217 | # |
|
250 | # | |
218 | # To avoid calling getcommit for every revision that has already |
|
251 | # To avoid calling getcommit for every revision that has already | |
219 | # been converted, we rebuild only the parentmap, delaying the |
|
252 | # been converted, we rebuild only the parentmap, delaying the | |
220 | # rebuild of wantedancestors until we need it (i.e. until a |
|
253 | # rebuild of wantedancestors until we need it (i.e. until a | |
221 | # merge). |
|
254 | # merge). | |
222 | # |
|
255 | # | |
223 | # We assume the order argument lists the revisions in |
|
256 | # We assume the order argument lists the revisions in | |
224 | # topological order, so that we can infer which revisions were |
|
257 | # topological order, so that we can infer which revisions were | |
225 | # wanted by previous runs. |
|
258 | # wanted by previous runs. | |
226 | self._rebuilt = not revmap |
|
259 | self._rebuilt = not revmap | |
227 | seen = {SKIPREV: SKIPREV} |
|
260 | seen = {SKIPREV: SKIPREV} | |
228 | dummyset = set() |
|
261 | dummyset = set() | |
229 | converted = [] |
|
262 | converted = [] | |
230 | for rev in revmap.order: |
|
263 | for rev in revmap.order: | |
231 | mapped = revmap[rev] |
|
264 | mapped = revmap[rev] | |
232 | wanted = mapped not in seen |
|
265 | wanted = mapped not in seen | |
233 | if wanted: |
|
266 | if wanted: | |
234 | seen[mapped] = rev |
|
267 | seen[mapped] = rev | |
235 | self.parentmap[rev] = rev |
|
268 | self.parentmap[rev] = rev | |
236 | else: |
|
269 | else: | |
237 | self.parentmap[rev] = seen[mapped] |
|
270 | self.parentmap[rev] = seen[mapped] | |
238 | self.wantedancestors[rev] = dummyset |
|
271 | self.wantedancestors[rev] = dummyset | |
239 | arg = seen[mapped] |
|
272 | arg = seen[mapped] | |
240 | if arg == SKIPREV: |
|
273 | if arg == SKIPREV: | |
241 | arg = None |
|
274 | arg = None | |
242 | converted.append((rev, wanted, arg)) |
|
275 | converted.append((rev, wanted, arg)) | |
243 | self.convertedorder = converted |
|
276 | self.convertedorder = converted | |
244 | return self.base.setrevmap(revmap) |
|
277 | return self.base.setrevmap(revmap) | |
245 |
|
278 | |||
246 | def rebuild(self): |
|
279 | def rebuild(self) -> bool: | |
247 | if self._rebuilt: |
|
280 | if self._rebuilt: | |
248 | return True |
|
281 | return True | |
249 | self._rebuilt = True |
|
282 | self._rebuilt = True | |
250 | self.parentmap.clear() |
|
283 | self.parentmap.clear() | |
251 | self.wantedancestors.clear() |
|
284 | self.wantedancestors.clear() | |
252 | self.seenchildren.clear() |
|
285 | self.seenchildren.clear() | |
253 | for rev, wanted, arg in self.convertedorder: |
|
286 | for rev, wanted, arg in self.convertedorder: | |
254 | if rev not in self.origparents: |
|
287 | if rev not in self.origparents: | |
255 | try: |
|
288 | try: | |
256 | self.origparents[rev] = self.getcommit(rev).parents |
|
289 | self.origparents[rev] = self.getcommit(rev).parents | |
257 | except error.RepoLookupError: |
|
290 | except error.RepoLookupError: | |
258 | self.ui.debug(b"unknown revmap source: %s\n" % rev) |
|
291 | self.ui.debug(b"unknown revmap source: %s\n" % rev) | |
259 | continue |
|
292 | continue | |
260 | if arg is not None: |
|
293 | if arg is not None: | |
261 | self.children[arg] = self.children.get(arg, 0) + 1 |
|
294 | self.children[arg] = self.children.get(arg, 0) + 1 | |
262 |
|
295 | |||
263 | for rev, wanted, arg in self.convertedorder: |
|
296 | for rev, wanted, arg in self.convertedorder: | |
264 | try: |
|
297 | try: | |
265 | parents = self.origparents[rev] |
|
298 | parents = self.origparents[rev] | |
266 | except KeyError: |
|
299 | except KeyError: | |
267 | continue # unknown revmap source |
|
300 | continue # unknown revmap source | |
268 | if wanted: |
|
301 | if wanted: | |
269 | self.mark_wanted(rev, parents) |
|
302 | self.mark_wanted(rev, parents) | |
270 | else: |
|
303 | else: | |
271 | self.mark_not_wanted(rev, arg) |
|
304 | self.mark_not_wanted(rev, arg) | |
272 | self._discard(arg, *parents) |
|
305 | self._discard(arg, *parents) | |
273 |
|
306 | |||
274 | return True |
|
307 | return True | |
275 |
|
308 | |||
276 | def getheads(self): |
|
309 | def getheads(self): | |
277 | return self.base.getheads() |
|
310 | return self.base.getheads() | |
278 |
|
311 | |||
279 | def getcommit(self, rev): |
|
312 | def getcommit(self, rev: bytes): | |
280 | # We want to save a reference to the commit objects to be able |
|
313 | # We want to save a reference to the commit objects to be able | |
281 | # to rewrite their parents later on. |
|
314 | # to rewrite their parents later on. | |
282 | c = self.commits[rev] = self.base.getcommit(rev) |
|
315 | c = self.commits[rev] = self.base.getcommit(rev) | |
283 | for p in c.parents: |
|
316 | for p in c.parents: | |
284 | self.children[p] = self.children.get(p, 0) + 1 |
|
317 | self.children[p] = self.children.get(p, 0) + 1 | |
285 | return c |
|
318 | return c | |
286 |
|
319 | |||
287 | def numcommits(self): |
|
320 | def numcommits(self): | |
288 | return self.base.numcommits() |
|
321 | return self.base.numcommits() | |
289 |
|
322 | |||
290 | def _cachedcommit(self, rev): |
|
323 | def _cachedcommit(self, rev): | |
291 | if rev in self.commits: |
|
324 | if rev in self.commits: | |
292 | return self.commits[rev] |
|
325 | return self.commits[rev] | |
293 | return self.base.getcommit(rev) |
|
326 | return self.base.getcommit(rev) | |
294 |
|
327 | |||
295 | def _discard(self, *revs): |
|
328 | def _discard(self, *revs) -> None: | |
296 | for r in revs: |
|
329 | for r in revs: | |
297 | if r is None: |
|
330 | if r is None: | |
298 | continue |
|
331 | continue | |
299 | self.seenchildren[r] = self.seenchildren.get(r, 0) + 1 |
|
332 | self.seenchildren[r] = self.seenchildren.get(r, 0) + 1 | |
300 | if self.seenchildren[r] == self.children[r]: |
|
333 | if self.seenchildren[r] == self.children[r]: | |
301 | self.wantedancestors.pop(r, None) |
|
334 | self.wantedancestors.pop(r, None) | |
302 | self.parentmap.pop(r, None) |
|
335 | self.parentmap.pop(r, None) | |
303 | del self.seenchildren[r] |
|
336 | del self.seenchildren[r] | |
304 | if self._rebuilt: |
|
337 | if self._rebuilt: | |
305 | del self.children[r] |
|
338 | del self.children[r] | |
306 |
|
339 | |||
307 | def wanted(self, rev, i): |
|
340 | def wanted(self, rev, i) -> bool: | |
308 | # Return True if we're directly interested in rev. |
|
341 | # Return True if we're directly interested in rev. | |
309 | # |
|
342 | # | |
310 | # i is an index selecting one of the parents of rev (if rev |
|
343 | # i is an index selecting one of the parents of rev (if rev | |
311 | # has no parents, i is None). getchangedfiles will give us |
|
344 | # has no parents, i is None). getchangedfiles will give us | |
312 | # the list of files that are different in rev and in the parent |
|
345 | # the list of files that are different in rev and in the parent | |
313 | # indicated by i. If we're interested in any of these files, |
|
346 | # indicated by i. If we're interested in any of these files, | |
314 | # we're interested in rev. |
|
347 | # we're interested in rev. | |
315 | try: |
|
348 | try: | |
316 | files = self.base.getchangedfiles(rev, i) |
|
349 | files = self.base.getchangedfiles(rev, i) | |
317 | except NotImplementedError: |
|
350 | except NotImplementedError: | |
318 | raise error.Abort(_(b"source repository doesn't support --filemap")) |
|
351 | raise error.Abort(_(b"source repository doesn't support --filemap")) | |
319 | for f in files: |
|
352 | for f in files: | |
320 | if self.filemapper(f): |
|
353 | if self.filemapper(f): | |
321 | return True |
|
354 | return True | |
322 |
|
355 | |||
323 | # The include directive is documented to include nothing else (though |
|
356 | # The include directive is documented to include nothing else (though | |
324 | # valid branch closes are included). |
|
357 | # valid branch closes are included). | |
325 | if self.filemapper.include: |
|
358 | if self.filemapper.include: | |
326 | return False |
|
359 | return False | |
327 |
|
360 | |||
328 | # Allow empty commits in the source revision through. The getchanges() |
|
361 | # Allow empty commits in the source revision through. The getchanges() | |
329 | # method doesn't even bother calling this if it determines that the |
|
362 | # method doesn't even bother calling this if it determines that the | |
330 | # close marker is significant (i.e. all of the branch ancestors weren't |
|
363 | # close marker is significant (i.e. all of the branch ancestors weren't | |
331 | # eliminated). Therefore if there *is* a close marker, getchanges() |
|
364 | # eliminated). Therefore if there *is* a close marker, getchanges() | |
332 | # doesn't consider it significant, and this revision should be dropped. |
|
365 | # doesn't consider it significant, and this revision should be dropped. | |
333 | return not files and b'close' not in self.commits[rev].extra |
|
366 | return not files and b'close' not in self.commits[rev].extra | |
334 |
|
367 | |||
335 | def mark_not_wanted(self, rev, p): |
|
368 | def mark_not_wanted(self, rev, p) -> None: | |
336 | # Mark rev as not interesting and update data structures. |
|
369 | # Mark rev as not interesting and update data structures. | |
337 |
|
370 | |||
338 | if p is None: |
|
371 | if p is None: | |
339 | # A root revision. Use SKIPREV to indicate that it doesn't |
|
372 | # A root revision. Use SKIPREV to indicate that it doesn't | |
340 | # map to any revision in the restricted graph. Put SKIPREV |
|
373 | # map to any revision in the restricted graph. Put SKIPREV | |
341 | # in the set of wanted ancestors to simplify code elsewhere |
|
374 | # in the set of wanted ancestors to simplify code elsewhere | |
342 | self.parentmap[rev] = SKIPREV |
|
375 | self.parentmap[rev] = SKIPREV | |
343 | self.wantedancestors[rev] = {SKIPREV} |
|
376 | self.wantedancestors[rev] = {SKIPREV} | |
344 | return |
|
377 | return | |
345 |
|
378 | |||
346 | # Reuse the data from our parent. |
|
379 | # Reuse the data from our parent. | |
347 | self.parentmap[rev] = self.parentmap[p] |
|
380 | self.parentmap[rev] = self.parentmap[p] | |
348 | self.wantedancestors[rev] = self.wantedancestors[p] |
|
381 | self.wantedancestors[rev] = self.wantedancestors[p] | |
349 |
|
382 | |||
350 | def mark_wanted(self, rev, parents): |
|
383 | def mark_wanted(self, rev, parents) -> None: | |
351 | # Mark rev ss wanted and update data structures. |
|
384 | # Mark rev ss wanted and update data structures. | |
352 |
|
385 | |||
353 | # rev will be in the restricted graph, so children of rev in |
|
386 | # rev will be in the restricted graph, so children of rev in | |
354 | # the original graph should still have rev as a parent in the |
|
387 | # the original graph should still have rev as a parent in the | |
355 | # restricted graph. |
|
388 | # restricted graph. | |
356 | self.parentmap[rev] = rev |
|
389 | self.parentmap[rev] = rev | |
357 |
|
390 | |||
358 | # The set of wanted ancestors of rev is the union of the sets |
|
391 | # The set of wanted ancestors of rev is the union of the sets | |
359 | # of wanted ancestors of its parents. Plus rev itself. |
|
392 | # of wanted ancestors of its parents. Plus rev itself. | |
360 | wrev = set() |
|
393 | wrev = set() | |
361 | for p in parents: |
|
394 | for p in parents: | |
362 | if p in self.wantedancestors: |
|
395 | if p in self.wantedancestors: | |
363 | wrev.update(self.wantedancestors[p]) |
|
396 | wrev.update(self.wantedancestors[p]) | |
364 | else: |
|
397 | else: | |
365 | self.ui.warn( |
|
398 | self.ui.warn( | |
366 | _(b'warning: %s parent %s is missing\n') % (rev, p) |
|
399 | _(b'warning: %s parent %s is missing\n') % (rev, p) | |
367 | ) |
|
400 | ) | |
368 | wrev.add(rev) |
|
401 | wrev.add(rev) | |
369 | self.wantedancestors[rev] = wrev |
|
402 | self.wantedancestors[rev] = wrev | |
370 |
|
403 | |||
371 | def getchanges(self, rev, full): |
|
404 | def getchanges(self, rev, full): | |
372 | parents = self.commits[rev].parents |
|
405 | parents = self.commits[rev].parents | |
373 | if len(parents) > 1 and not self.ignoreancestorcheck: |
|
406 | if len(parents) > 1 and not self.ignoreancestorcheck: | |
374 | self.rebuild() |
|
407 | self.rebuild() | |
375 |
|
408 | |||
376 | # To decide whether we're interested in rev we: |
|
409 | # To decide whether we're interested in rev we: | |
377 | # |
|
410 | # | |
378 | # - calculate what parents rev will have if it turns out we're |
|
411 | # - calculate what parents rev will have if it turns out we're | |
379 | # interested in it. If it's going to have more than 1 parent, |
|
412 | # interested in it. If it's going to have more than 1 parent, | |
380 | # we're interested in it. |
|
413 | # we're interested in it. | |
381 | # |
|
414 | # | |
382 | # - otherwise, we'll compare it with the single parent we found. |
|
415 | # - otherwise, we'll compare it with the single parent we found. | |
383 | # If any of the files we're interested in is different in the |
|
416 | # If any of the files we're interested in is different in the | |
384 | # the two revisions, we're interested in rev. |
|
417 | # the two revisions, we're interested in rev. | |
385 |
|
418 | |||
386 | # A parent p is interesting if its mapped version (self.parentmap[p]): |
|
419 | # A parent p is interesting if its mapped version (self.parentmap[p]): | |
387 | # - is not SKIPREV |
|
420 | # - is not SKIPREV | |
388 | # - is still not in the list of parents (we don't want duplicates) |
|
421 | # - is still not in the list of parents (we don't want duplicates) | |
389 | # - is not an ancestor of the mapped versions of the other parents or |
|
422 | # - is not an ancestor of the mapped versions of the other parents or | |
390 | # there is no parent in the same branch than the current revision. |
|
423 | # there is no parent in the same branch than the current revision. | |
391 | mparents = [] |
|
424 | mparents = [] | |
392 | knownparents = set() |
|
425 | knownparents = set() | |
393 | branch = self.commits[rev].branch |
|
426 | branch = self.commits[rev].branch | |
394 | hasbranchparent = False |
|
427 | hasbranchparent = False | |
395 | for i, p1 in enumerate(parents): |
|
428 | for i, p1 in enumerate(parents): | |
396 | mp1 = self.parentmap[p1] |
|
429 | mp1 = self.parentmap[p1] | |
397 | if mp1 == SKIPREV or mp1 in knownparents: |
|
430 | if mp1 == SKIPREV or mp1 in knownparents: | |
398 | continue |
|
431 | continue | |
399 |
|
432 | |||
400 | isancestor = not self.ignoreancestorcheck and any( |
|
433 | isancestor = not self.ignoreancestorcheck and any( | |
401 | p2 |
|
434 | p2 | |
402 | for p2 in parents |
|
435 | for p2 in parents | |
403 | if p1 != p2 |
|
436 | if p1 != p2 | |
404 | and mp1 != self.parentmap[p2] |
|
437 | and mp1 != self.parentmap[p2] | |
405 | and mp1 in self.wantedancestors[p2] |
|
438 | and mp1 in self.wantedancestors[p2] | |
406 | ) |
|
439 | ) | |
407 | if not isancestor and not hasbranchparent and len(parents) > 1: |
|
440 | if not isancestor and not hasbranchparent and len(parents) > 1: | |
408 | # This could be expensive, avoid unnecessary calls. |
|
441 | # This could be expensive, avoid unnecessary calls. | |
409 | if self._cachedcommit(p1).branch == branch: |
|
442 | if self._cachedcommit(p1).branch == branch: | |
410 | hasbranchparent = True |
|
443 | hasbranchparent = True | |
411 | mparents.append((p1, mp1, i, isancestor)) |
|
444 | mparents.append((p1, mp1, i, isancestor)) | |
412 | knownparents.add(mp1) |
|
445 | knownparents.add(mp1) | |
413 | # Discard parents ancestors of other parents if there is a |
|
446 | # Discard parents ancestors of other parents if there is a | |
414 | # non-ancestor one on the same branch than current revision. |
|
447 | # non-ancestor one on the same branch than current revision. | |
415 | if hasbranchparent: |
|
448 | if hasbranchparent: | |
416 | mparents = [p for p in mparents if not p[3]] |
|
449 | mparents = [p for p in mparents if not p[3]] | |
417 | wp = None |
|
450 | wp = None | |
418 | if mparents: |
|
451 | if mparents: | |
419 | wp = max(p[2] for p in mparents) |
|
452 | wp = max(p[2] for p in mparents) | |
420 | mparents = [p[1] for p in mparents] |
|
453 | mparents = [p[1] for p in mparents] | |
421 | elif parents: |
|
454 | elif parents: | |
422 | wp = 0 |
|
455 | wp = 0 | |
423 |
|
456 | |||
424 | self.origparents[rev] = parents |
|
457 | self.origparents[rev] = parents | |
425 |
|
458 | |||
426 | closed = False |
|
459 | closed = False | |
427 | if b'close' in self.commits[rev].extra: |
|
460 | if b'close' in self.commits[rev].extra: | |
428 | # A branch closing revision is only useful if one of its |
|
461 | # A branch closing revision is only useful if one of its | |
429 | # parents belong to the branch being closed |
|
462 | # parents belong to the branch being closed | |
430 | pbranches = [self._cachedcommit(p).branch for p in mparents] |
|
463 | pbranches = [self._cachedcommit(p).branch for p in mparents] | |
431 | if branch in pbranches: |
|
464 | if branch in pbranches: | |
432 | closed = True |
|
465 | closed = True | |
433 |
|
466 | |||
434 | if len(mparents) < 2 and not closed and not self.wanted(rev, wp): |
|
467 | if len(mparents) < 2 and not closed and not self.wanted(rev, wp): | |
435 | # We don't want this revision. |
|
468 | # We don't want this revision. | |
436 | # Update our state and tell the convert process to map this |
|
469 | # Update our state and tell the convert process to map this | |
437 | # revision to the same revision its parent as mapped to. |
|
470 | # revision to the same revision its parent as mapped to. | |
438 | p = None |
|
471 | p = None | |
439 | if parents: |
|
472 | if parents: | |
440 | p = parents[wp] |
|
473 | p = parents[wp] | |
441 | self.mark_not_wanted(rev, p) |
|
474 | self.mark_not_wanted(rev, p) | |
442 | self.convertedorder.append((rev, False, p)) |
|
475 | self.convertedorder.append((rev, False, p)) | |
443 | self._discard(*parents) |
|
476 | self._discard(*parents) | |
444 | return self.parentmap[rev] |
|
477 | return self.parentmap[rev] | |
445 |
|
478 | |||
446 | # We want this revision. |
|
479 | # We want this revision. | |
447 | # Rewrite the parents of the commit object |
|
480 | # Rewrite the parents of the commit object | |
448 | self.commits[rev].parents = mparents |
|
481 | self.commits[rev].parents = mparents | |
449 | self.mark_wanted(rev, parents) |
|
482 | self.mark_wanted(rev, parents) | |
450 | self.convertedorder.append((rev, True, None)) |
|
483 | self.convertedorder.append((rev, True, None)) | |
451 | self._discard(*parents) |
|
484 | self._discard(*parents) | |
452 |
|
485 | |||
453 | # Get the real changes and do the filtering/mapping. To be |
|
486 | # Get the real changes and do the filtering/mapping. To be | |
454 | # able to get the files later on in getfile, we hide the |
|
487 | # able to get the files later on in getfile, we hide the | |
455 | # original filename in the rev part of the return value. |
|
488 | # original filename in the rev part of the return value. | |
456 | changes, copies, cleanp2 = self.base.getchanges(rev, full) |
|
489 | changes, copies, cleanp2 = self.base.getchanges(rev, full) | |
457 | files = {} |
|
490 | files = {} | |
458 | ncleanp2 = set(cleanp2) |
|
491 | ncleanp2 = set(cleanp2) | |
459 | for f, r in changes: |
|
492 | for f, r in changes: | |
460 | newf = self.filemapper(f) |
|
493 | newf = self.filemapper(f) | |
461 | if newf and (newf != f or newf not in files): |
|
494 | if newf and (newf != f or newf not in files): | |
462 | files[newf] = (f, r) |
|
495 | files[newf] = (f, r) | |
463 | if newf != f: |
|
496 | if newf != f: | |
464 | ncleanp2.discard(f) |
|
497 | ncleanp2.discard(f) | |
465 | files = sorted(files.items()) |
|
498 | files = sorted(files.items()) | |
466 |
|
499 | |||
467 | ncopies = {} |
|
500 | ncopies = {} | |
468 | for c in copies: |
|
501 | for c in copies: | |
469 | newc = self.filemapper(c) |
|
502 | newc = self.filemapper(c) | |
470 | if newc: |
|
503 | if newc: | |
471 | newsource = self.filemapper(copies[c]) |
|
504 | newsource = self.filemapper(copies[c]) | |
472 | if newsource: |
|
505 | if newsource: | |
473 | ncopies[newc] = newsource |
|
506 | ncopies[newc] = newsource | |
474 |
|
507 | |||
475 | return files, ncopies, ncleanp2 |
|
508 | return files, ncopies, ncleanp2 | |
476 |
|
509 | |||
477 | def targetfilebelongstosource(self, targetfilename): |
|
510 | def targetfilebelongstosource(self, targetfilename: bytes) -> bool: | |
478 | return self.filemapper.istargetfile(targetfilename) |
|
511 | return self.filemapper.istargetfile(targetfilename) | |
479 |
|
512 | |||
480 | def getfile(self, name, rev): |
|
513 | def getfile(self, name, rev): | |
481 | realname, realrev = rev |
|
514 | realname, realrev = rev | |
482 | return self.base.getfile(realname, realrev) |
|
515 | return self.base.getfile(realname, realrev) | |
483 |
|
516 | |||
484 | def gettags(self): |
|
517 | def gettags(self): | |
485 | return self.base.gettags() |
|
518 | return self.base.gettags() | |
486 |
|
519 | |||
487 | def hasnativeorder(self): |
|
520 | def hasnativeorder(self) -> bool: | |
488 | return self.base.hasnativeorder() |
|
521 | return self.base.hasnativeorder() | |
489 |
|
522 | |||
490 | def lookuprev(self, rev): |
|
523 | def lookuprev(self, rev): | |
491 | return self.base.lookuprev(rev) |
|
524 | return self.base.lookuprev(rev) | |
492 |
|
525 | |||
493 | def getbookmarks(self): |
|
526 | def getbookmarks(self): | |
494 | return self.base.getbookmarks() |
|
527 | return self.base.getbookmarks() | |
495 |
|
528 | |||
496 | def converted(self, rev, sinkrev): |
|
529 | def converted(self, rev, sinkrev): | |
497 | self.base.converted(rev, sinkrev) |
|
530 | self.base.converted(rev, sinkrev) |
General Comments 0
You need to be logged in to leave comments.
Login now