Show More
@@ -196,15 +196,16 b' class branchcache:' | |||||
196 |
|
196 | |||
197 | def __init__( |
|
197 | def __init__( | |
198 | self, |
|
198 | self, | |
199 | repo, |
|
199 | repo: "localrepo.localrepository", | |
200 |
entries |
|
200 | entries: Union[ | |
201 | tipnode=None, |
|
201 | Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]] | |
202 | tiprev=nullrev, |
|
202 | ] = (), | |
203 | filteredhash=None, |
|
203 | tipnode: Optional[bytes] = None, | |
204 | closednodes=None, |
|
204 | tiprev: Optional[int] = nullrev, | |
205 | hasnode=None, |
|
205 | filteredhash: Optional[bytes] = None, | |
206 | ): |
|
206 | closednodes: Optional[Set[bytes]] = None, | |
207 | # type: (localrepo.localrepository, Union[Dict[bytes, List[bytes]], Iterable[Tuple[bytes, List[bytes]]]], bytes, int, Optional[bytes], Optional[Set[bytes]], Optional[Callable[[bytes], bool]]) -> None |
|
207 | hasnode: Optional[Callable[[bytes], bool]] = None, | |
|
208 | ) -> None: | |||
208 | """hasnode is a function which can be used to verify whether changelog |
|
209 | """hasnode is a function which can be used to verify whether changelog | |
209 | has a given node or not. If it's not provided, we assume that every node |
|
210 | has a given node or not. If it's not provided, we assume that every node | |
210 | we have exists in changelog""" |
|
211 | we have exists in changelog""" |
@@ -4118,8 +4118,10 b' def abortgraft(ui, repo, graftstate):' | |||||
4118 | return 0 |
|
4118 | return 0 | |
4119 |
|
4119 | |||
4120 |
|
4120 | |||
4121 |
def readgraftstate( |
|
4121 | def readgraftstate( | |
4122 | # type: (Any, statemod.cmdstate) -> Dict[bytes, Any] |
|
4122 | repo: Any, | |
|
4123 | graftstate: statemod.cmdstate, | |||
|
4124 | ) -> Dict[bytes, Any]: | |||
4123 | """read the graft state file and return a dict of the data stored in it""" |
|
4125 | """read the graft state file and return a dict of the data stored in it""" | |
4124 | try: |
|
4126 | try: | |
4125 | return graftstate.read() |
|
4127 | return graftstate.read() |
@@ -59,8 +59,7 b' unichr = chr' | |||||
59 | assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore) |
|
59 | assert all(i.startswith((b"\xe2", b"\xef")) for i in _ignore) | |
60 |
|
60 | |||
61 |
|
61 | |||
62 | def hfsignoreclean(s): |
|
62 | def hfsignoreclean(s: bytes) -> bytes: | |
63 | # type: (bytes) -> bytes |
|
|||
64 | """Remove codepoints ignored by HFS+ from s. |
|
63 | """Remove codepoints ignored by HFS+ from s. | |
65 |
|
64 | |||
66 | >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8')) |
|
65 | >>> hfsignoreclean(u'.h\u200cg'.encode('utf-8')) | |
@@ -133,8 +132,7 b' class localstr(bytes):' | |||||
133 |
|
132 | |||
134 | if typing.TYPE_CHECKING: |
|
133 | if typing.TYPE_CHECKING: | |
135 | # pseudo implementation to help pytype see localstr() constructor |
|
134 | # pseudo implementation to help pytype see localstr() constructor | |
136 | def __init__(self, u, l): |
|
135 | def __init__(self, u: bytes, l: bytes) -> None: | |
137 | # type: (bytes, bytes) -> None |
|
|||
138 | super(localstr, self).__init__(l) |
|
136 | super(localstr, self).__init__(l) | |
139 | self._utf8 = u |
|
137 | self._utf8 = u | |
140 |
|
138 | |||
@@ -153,8 +151,7 b' class safelocalstr(bytes):' | |||||
153 | """ |
|
151 | """ | |
154 |
|
152 | |||
155 |
|
153 | |||
156 | def tolocal(s): |
|
154 | def tolocal(s: bytes) -> bytes: | |
157 | # type: (bytes) -> bytes |
|
|||
158 | """ |
|
155 | """ | |
159 | Convert a string from internal UTF-8 to local encoding |
|
156 | Convert a string from internal UTF-8 to local encoding | |
160 |
|
157 | |||
@@ -222,8 +219,7 b' def tolocal(s):' | |||||
222 | ) |
|
219 | ) | |
223 |
|
220 | |||
224 |
|
221 | |||
225 | def fromlocal(s): |
|
222 | def fromlocal(s: bytes) -> bytes: | |
226 | # type: (bytes) -> bytes |
|
|||
227 | """ |
|
223 | """ | |
228 | Convert a string from the local character encoding to UTF-8 |
|
224 | Convert a string from the local character encoding to UTF-8 | |
229 |
|
225 | |||
@@ -254,20 +250,17 b' def fromlocal(s):' | |||||
254 | ) |
|
250 | ) | |
255 |
|
251 | |||
256 |
|
252 | |||
257 | def unitolocal(u): |
|
253 | def unitolocal(u: str) -> bytes: | |
258 | # type: (Text) -> bytes |
|
|||
259 | """Convert a unicode string to a byte string of local encoding""" |
|
254 | """Convert a unicode string to a byte string of local encoding""" | |
260 | return tolocal(u.encode('utf-8')) |
|
255 | return tolocal(u.encode('utf-8')) | |
261 |
|
256 | |||
262 |
|
257 | |||
263 | def unifromlocal(s): |
|
258 | def unifromlocal(s: bytes) -> str: | |
264 | # type: (bytes) -> Text |
|
|||
265 | """Convert a byte string of local encoding to a unicode string""" |
|
259 | """Convert a byte string of local encoding to a unicode string""" | |
266 | return fromlocal(s).decode('utf-8') |
|
260 | return fromlocal(s).decode('utf-8') | |
267 |
|
261 | |||
268 |
|
262 | |||
269 | def unimethod(bytesfunc): |
|
263 | def unimethod(bytesfunc: Callable[[Any], bytes]) -> Callable[[Any], str]: | |
270 | # type: (Callable[[Any], bytes]) -> Callable[[Any], Text] |
|
|||
271 | """Create a proxy method that forwards __unicode__() and __str__() of |
|
264 | """Create a proxy method that forwards __unicode__() and __str__() of | |
272 | Python 3 to __bytes__()""" |
|
265 | Python 3 to __bytes__()""" | |
273 |
|
266 | |||
@@ -285,8 +278,7 b' strfromlocal = unifromlocal' | |||||
285 | strmethod = unimethod |
|
278 | strmethod = unimethod | |
286 |
|
279 | |||
287 |
|
280 | |||
288 | def lower(s): |
|
281 | def lower(s: bytes) -> bytes: | |
289 | # type: (bytes) -> bytes |
|
|||
290 | """best-effort encoding-aware case-folding of local string s""" |
|
282 | """best-effort encoding-aware case-folding of local string s""" | |
291 | try: |
|
283 | try: | |
292 | return asciilower(s) |
|
284 | return asciilower(s) | |
@@ -310,8 +302,7 b' def lower(s):' | |||||
310 | ) |
|
302 | ) | |
311 |
|
303 | |||
312 |
|
304 | |||
313 | def upper(s): |
|
305 | def upper(s: bytes) -> bytes: | |
314 | # type: (bytes) -> bytes |
|
|||
315 | """best-effort encoding-aware case-folding of local string s""" |
|
306 | """best-effort encoding-aware case-folding of local string s""" | |
316 | try: |
|
307 | try: | |
317 | return asciiupper(s) |
|
308 | return asciiupper(s) | |
@@ -319,8 +310,7 b' def upper(s):' | |||||
319 | return upperfallback(s) |
|
310 | return upperfallback(s) | |
320 |
|
311 | |||
321 |
|
312 | |||
322 | def upperfallback(s): |
|
313 | def upperfallback(s: Any) -> Any: | |
323 | # type: (Any) -> Any |
|
|||
324 | try: |
|
314 | try: | |
325 | if isinstance(s, localstr): |
|
315 | if isinstance(s, localstr): | |
326 | u = s._utf8.decode("utf-8") |
|
316 | u = s._utf8.decode("utf-8") | |
@@ -395,14 +385,12 b' else:' | |||||
395 | ) |
|
385 | ) | |
396 |
|
386 | |||
397 |
|
387 | |||
398 | def colwidth(s): |
|
388 | def colwidth(s: bytes) -> int: | |
399 | # type: (bytes) -> int |
|
|||
400 | """Find the column width of a string for display in the local encoding""" |
|
389 | """Find the column width of a string for display in the local encoding""" | |
401 | return ucolwidth(s.decode(_sysstr(encoding), 'replace')) |
|
390 | return ucolwidth(s.decode(_sysstr(encoding), 'replace')) | |
402 |
|
391 | |||
403 |
|
392 | |||
404 | def ucolwidth(d): |
|
393 | def ucolwidth(d: Text) -> int: | |
405 | # type: (Text) -> int |
|
|||
406 | """Find the column width of a Unicode string for display""" |
|
394 | """Find the column width of a Unicode string for display""" | |
407 | eaw = getattr(unicodedata, 'east_asian_width', None) |
|
395 | eaw = getattr(unicodedata, 'east_asian_width', None) | |
408 | if eaw is not None: |
|
396 | if eaw is not None: | |
@@ -410,8 +398,7 b' def ucolwidth(d):' | |||||
410 | return len(d) |
|
398 | return len(d) | |
411 |
|
399 | |||
412 |
|
400 | |||
413 | def getcols(s, start, c): |
|
401 | def getcols(s: bytes, start: int, c: int) -> bytes: | |
414 | # type: (bytes, int, int) -> bytes |
|
|||
415 | """Use colwidth to find a c-column substring of s starting at byte |
|
402 | """Use colwidth to find a c-column substring of s starting at byte | |
416 | index start""" |
|
403 | index start""" | |
417 | for x in range(start + c, len(s)): |
|
404 | for x in range(start + c, len(s)): | |
@@ -421,8 +408,12 b' def getcols(s, start, c):' | |||||
421 | raise ValueError('substring not found') |
|
408 | raise ValueError('substring not found') | |
422 |
|
409 | |||
423 |
|
410 | |||
424 | def trim(s, width, ellipsis=b'', leftside=False): |
|
411 | def trim( | |
425 | # type: (bytes, int, bytes, bool) -> bytes |
|
412 | s: bytes, | |
|
413 | width: int, | |||
|
414 | ellipsis: bytes = b'', | |||
|
415 | leftside: bool = False, | |||
|
416 | ) -> bytes: | |||
426 | """Trim string 's' to at most 'width' columns (including 'ellipsis'). |
|
417 | """Trim string 's' to at most 'width' columns (including 'ellipsis'). | |
427 |
|
418 | |||
428 | If 'leftside' is True, left side of string 's' is trimmed. |
|
419 | If 'leftside' is True, left side of string 's' is trimmed. | |
@@ -540,8 +531,7 b' class normcasespecs:' | |||||
540 | other = 0 |
|
531 | other = 0 | |
541 |
|
532 | |||
542 |
|
533 | |||
543 | def jsonescape(s, paranoid=False): |
|
534 | def jsonescape(s: Any, paranoid: Any = False) -> Any: | |
544 | # type: (Any, Any) -> Any |
|
|||
545 | """returns a string suitable for JSON |
|
535 | """returns a string suitable for JSON | |
546 |
|
536 | |||
547 | JSON is problematic for us because it doesn't support non-Unicode |
|
537 | JSON is problematic for us because it doesn't support non-Unicode | |
@@ -601,8 +591,7 b' def jsonescape(s, paranoid=False):' | |||||
601 | _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4] |
|
591 | _utf8len = [0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 2, 2, 3, 4] | |
602 |
|
592 | |||
603 |
|
593 | |||
604 | def getutf8char(s, pos): |
|
594 | def getutf8char(s: bytes, pos: int) -> bytes: | |
605 | # type: (bytes, int) -> bytes |
|
|||
606 | """get the next full utf-8 character in the given string, starting at pos |
|
595 | """get the next full utf-8 character in the given string, starting at pos | |
607 |
|
596 | |||
608 | Raises a UnicodeError if the given location does not start a valid |
|
597 | Raises a UnicodeError if the given location does not start a valid | |
@@ -620,8 +609,7 b' def getutf8char(s, pos):' | |||||
620 | return c |
|
609 | return c | |
621 |
|
610 | |||
622 |
|
611 | |||
623 | def toutf8b(s): |
|
612 | def toutf8b(s: bytes) -> bytes: | |
624 | # type: (bytes) -> bytes |
|
|||
625 | """convert a local, possibly-binary string into UTF-8b |
|
613 | """convert a local, possibly-binary string into UTF-8b | |
626 |
|
614 | |||
627 | This is intended as a generic method to preserve data when working |
|
615 | This is intended as a generic method to preserve data when working | |
@@ -689,8 +677,7 b' def toutf8b(s):' | |||||
689 | return bytes(r) |
|
677 | return bytes(r) | |
690 |
|
678 | |||
691 |
|
679 | |||
692 | def fromutf8b(s): |
|
680 | def fromutf8b(s: bytes) -> bytes: | |
693 | # type: (bytes) -> bytes |
|
|||
694 | """Given a UTF-8b string, return a local, possibly-binary string. |
|
681 | """Given a UTF-8b string, return a local, possibly-binary string. | |
695 |
|
682 | |||
696 | return the original binary string. This |
|
683 | return the original binary string. This |
@@ -40,8 +40,7 b' assert [' | |||||
40 | ] |
|
40 | ] | |
41 |
|
41 | |||
42 |
|
42 | |||
43 | def _tobytes(exc): |
|
43 | def _tobytes(exc) -> bytes: | |
44 | # type: (...) -> bytes |
|
|||
45 | """Byte-stringify exception in the same way as BaseException_str()""" |
|
44 | """Byte-stringify exception in the same way as BaseException_str()""" | |
46 | if not exc.args: |
|
45 | if not exc.args: | |
47 | return b'' |
|
46 | return b'' | |
@@ -68,8 +67,7 b' class Error(Hint, Exception):' | |||||
68 | coarse_exit_code = None |
|
67 | coarse_exit_code = None | |
69 | detailed_exit_code = None |
|
68 | detailed_exit_code = None | |
70 |
|
69 | |||
71 | def __init__(self, message, hint=None): |
|
70 | def __init__(self, message: bytes, hint: Optional[bytes] = None) -> None: | |
72 | # type: (bytes, Optional[bytes]) -> None |
|
|||
73 | self.message = message |
|
71 | self.message = message | |
74 | self.hint = hint |
|
72 | self.hint = hint | |
75 | # Pass the message into the Exception constructor to help extensions |
|
73 | # Pass the message into the Exception constructor to help extensions | |
@@ -79,15 +77,13 b' class Error(Hint, Exception):' | |||||
79 | def __bytes__(self): |
|
77 | def __bytes__(self): | |
80 | return self.message |
|
78 | return self.message | |
81 |
|
79 | |||
82 | def __str__(self): |
|
80 | def __str__(self) -> str: | |
83 | # type: () -> str |
|
|||
84 | # the output would be unreadable if the message was translated, |
|
81 | # the output would be unreadable if the message was translated, | |
85 | # but do not replace it with encoding.strfromlocal(), which |
|
82 | # but do not replace it with encoding.strfromlocal(), which | |
86 | # may raise another exception. |
|
83 | # may raise another exception. | |
87 | return pycompat.sysstr(self.__bytes__()) |
|
84 | return pycompat.sysstr(self.__bytes__()) | |
88 |
|
85 | |||
89 | def format(self): |
|
86 | def format(self) -> bytes: | |
90 | # type: () -> bytes |
|
|||
91 | from .i18n import _ |
|
87 | from .i18n import _ | |
92 |
|
88 | |||
93 | message = _(b"abort: %s\n") % self.message |
|
89 | message = _(b"abort: %s\n") % self.message | |
@@ -114,8 +110,7 b' class RevlogError(StorageError):' | |||||
114 |
|
110 | |||
115 |
|
111 | |||
116 | class SidedataHashError(RevlogError): |
|
112 | class SidedataHashError(RevlogError): | |
117 | def __init__(self, key, expected, got): |
|
113 | def __init__(self, key: int, expected: bytes, got: bytes) -> None: | |
118 | # type: (int, bytes, bytes) -> None |
|
|||
119 | self.hint = None |
|
114 | self.hint = None | |
120 | self.sidedatakey = key |
|
115 | self.sidedatakey = key | |
121 | self.expecteddigest = expected |
|
116 | self.expecteddigest = expected | |
@@ -127,8 +122,7 b' class FilteredIndexError(IndexError):' | |||||
127 |
|
122 | |||
128 |
|
123 | |||
129 | class LookupError(RevlogError, KeyError): |
|
124 | class LookupError(RevlogError, KeyError): | |
130 | def __init__(self, name, index, message): |
|
125 | def __init__(self, name: bytes, index: bytes, message: bytes) -> None: | |
131 | # type: (bytes, bytes, bytes) -> None |
|
|||
132 | self.name = name |
|
126 | self.name = name | |
133 | self.index = index |
|
127 | self.index = index | |
134 | # this can't be called 'message' because at least some installs of |
|
128 | # this can't be called 'message' because at least some installs of | |
@@ -165,8 +159,7 b' class ManifestLookupError(LookupError):' | |||||
165 | class CommandError(Exception): |
|
159 | class CommandError(Exception): | |
166 | """Exception raised on errors in parsing the command line.""" |
|
160 | """Exception raised on errors in parsing the command line.""" | |
167 |
|
161 | |||
168 | def __init__(self, command, message): |
|
162 | def __init__(self, command: Optional[bytes], message: bytes) -> None: | |
169 | # type: (Optional[bytes], bytes) -> None |
|
|||
170 | self.command = command |
|
163 | self.command = command | |
171 | self.message = message |
|
164 | self.message = message | |
172 | super(CommandError, self).__init__() |
|
165 | super(CommandError, self).__init__() | |
@@ -177,8 +170,11 b' class CommandError(Exception):' | |||||
177 | class UnknownCommand(Exception): |
|
170 | class UnknownCommand(Exception): | |
178 | """Exception raised if command is not in the command table.""" |
|
171 | """Exception raised if command is not in the command table.""" | |
179 |
|
172 | |||
180 | def __init__(self, command, all_commands=None): |
|
173 | def __init__( | |
181 | # type: (bytes, Optional[List[bytes]]) -> None |
|
174 | self, | |
|
175 | command: bytes, | |||
|
176 | all_commands: Optional[List[bytes]] = None, | |||
|
177 | ) -> None: | |||
182 | self.command = command |
|
178 | self.command = command | |
183 | self.all_commands = all_commands |
|
179 | self.all_commands = all_commands | |
184 | super(UnknownCommand, self).__init__() |
|
180 | super(UnknownCommand, self).__init__() | |
@@ -189,8 +185,7 b' class UnknownCommand(Exception):' | |||||
189 | class AmbiguousCommand(Exception): |
|
185 | class AmbiguousCommand(Exception): | |
190 | """Exception raised if command shortcut matches more than one command.""" |
|
186 | """Exception raised if command shortcut matches more than one command.""" | |
191 |
|
187 | |||
192 | def __init__(self, prefix, matches): |
|
188 | def __init__(self, prefix: bytes, matches: List[bytes]) -> None: | |
193 | # type: (bytes, List[bytes]) -> None |
|
|||
194 | self.prefix = prefix |
|
189 | self.prefix = prefix | |
195 | self.matches = matches |
|
190 | self.matches = matches | |
196 | super(AmbiguousCommand, self).__init__() |
|
191 | super(AmbiguousCommand, self).__init__() | |
@@ -201,8 +196,7 b' class AmbiguousCommand(Exception):' | |||||
201 | class WorkerError(Exception): |
|
196 | class WorkerError(Exception): | |
202 | """Exception raised when a worker process dies.""" |
|
197 | """Exception raised when a worker process dies.""" | |
203 |
|
198 | |||
204 | def __init__(self, status_code): |
|
199 | def __init__(self, status_code: int) -> None: | |
205 | # type: (int) -> None |
|
|||
206 | self.status_code = status_code |
|
200 | self.status_code = status_code | |
207 | # Pass status code to superclass just so it becomes part of __bytes__ |
|
201 | # Pass status code to superclass just so it becomes part of __bytes__ | |
208 | super(WorkerError, self).__init__(status_code) |
|
202 | super(WorkerError, self).__init__(status_code) | |
@@ -216,8 +210,7 b' class InterventionRequired(Abort):' | |||||
216 | coarse_exit_code = 1 |
|
210 | coarse_exit_code = 1 | |
217 | detailed_exit_code = 240 |
|
211 | detailed_exit_code = 240 | |
218 |
|
212 | |||
219 | def format(self): |
|
213 | def format(self) -> bytes: | |
220 | # type: () -> bytes |
|
|||
221 | from .i18n import _ |
|
214 | from .i18n import _ | |
222 |
|
215 | |||
223 | message = _(b"%s\n") % self.message |
|
216 | message = _(b"%s\n") % self.message | |
@@ -229,8 +222,7 b' class InterventionRequired(Abort):' | |||||
229 | class ConflictResolutionRequired(InterventionRequired): |
|
222 | class ConflictResolutionRequired(InterventionRequired): | |
230 | """Exception raised when a continuable command required merge conflict resolution.""" |
|
223 | """Exception raised when a continuable command required merge conflict resolution.""" | |
231 |
|
224 | |||
232 | def __init__(self, opname): |
|
225 | def __init__(self, opname: bytes) -> None: | |
233 | # type: (bytes) -> None |
|
|||
234 | from .i18n import _ |
|
226 | from .i18n import _ | |
235 |
|
227 | |||
236 | self.opname = opname |
|
228 | self.opname = opname | |
@@ -299,13 +291,16 b' class ConfigError(Abort):' | |||||
299 |
|
291 | |||
300 | detailed_exit_code = 30 |
|
292 | detailed_exit_code = 30 | |
301 |
|
293 | |||
302 | def __init__(self, message, location=None, hint=None): |
|
294 | def __init__( | |
303 | # type: (bytes, Optional[bytes], Optional[bytes]) -> None |
|
295 | self, | |
|
296 | message: bytes, | |||
|
297 | location: Optional[bytes] = None, | |||
|
298 | hint: Optional[bytes] = None, | |||
|
299 | ) -> None: | |||
304 | super(ConfigError, self).__init__(message, hint=hint) |
|
300 | super(ConfigError, self).__init__(message, hint=hint) | |
305 | self.location = location |
|
301 | self.location = location | |
306 |
|
302 | |||
307 | def format(self): |
|
303 | def format(self) -> bytes: | |
308 | # type: () -> bytes |
|
|||
309 | from .i18n import _ |
|
304 | from .i18n import _ | |
310 |
|
305 | |||
311 | if self.location is not None: |
|
306 | if self.location is not None: | |
@@ -354,8 +349,11 b' class RemoteError(Abort):' | |||||
354 | class OutOfBandError(RemoteError): |
|
349 | class OutOfBandError(RemoteError): | |
355 | """Exception raised when a remote repo reports failure""" |
|
350 | """Exception raised when a remote repo reports failure""" | |
356 |
|
351 | |||
357 | def __init__(self, message=None, hint=None): |
|
352 | def __init__( | |
358 | # type: (Optional[bytes], Optional[bytes]) -> None |
|
353 | self, | |
|
354 | message: Optional[bytes] = None, | |||
|
355 | hint: Optional[bytes] = None, | |||
|
356 | ): | |||
359 | from .i18n import _ |
|
357 | from .i18n import _ | |
360 |
|
358 | |||
361 | if message: |
|
359 | if message: | |
@@ -371,13 +369,16 b' class ParseError(Abort):' | |||||
371 |
|
369 | |||
372 | detailed_exit_code = 10 |
|
370 | detailed_exit_code = 10 | |
373 |
|
371 | |||
374 | def __init__(self, message, location=None, hint=None): |
|
372 | def __init__( | |
375 | # type: (bytes, Optional[Union[bytes, int]], Optional[bytes]) -> None |
|
373 | self, | |
|
374 | message: bytes, | |||
|
375 | location: Optional[Union[bytes, int]] = None, | |||
|
376 | hint: Optional[bytes] = None, | |||
|
377 | ): | |||
376 | super(ParseError, self).__init__(message, hint=hint) |
|
378 | super(ParseError, self).__init__(message, hint=hint) | |
377 | self.location = location |
|
379 | self.location = location | |
378 |
|
380 | |||
379 | def format(self): |
|
381 | def format(self) -> bytes: | |
380 | # type: () -> bytes |
|
|||
381 | from .i18n import _ |
|
382 | from .i18n import _ | |
382 |
|
383 | |||
383 | if self.location is not None: |
|
384 | if self.location is not None: | |
@@ -404,16 +405,14 b' class PatchApplicationError(PatchError):' | |||||
404 | __bytes__ = _tobytes |
|
405 | __bytes__ = _tobytes | |
405 |
|
406 | |||
406 |
|
407 | |||
407 | def getsimilar(symbols, value): |
|
408 | def getsimilar(symbols: Iterable[bytes], value: bytes) -> List[bytes]: | |
408 | # type: (Iterable[bytes], bytes) -> List[bytes] |
|
|||
409 | sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() |
|
409 | sim = lambda x: difflib.SequenceMatcher(None, value, x).ratio() | |
410 | # The cutoff for similarity here is pretty arbitrary. It should |
|
410 | # The cutoff for similarity here is pretty arbitrary. It should | |
411 | # probably be investigated and tweaked. |
|
411 | # probably be investigated and tweaked. | |
412 | return [s for s in symbols if sim(s) > 0.6] |
|
412 | return [s for s in symbols if sim(s) > 0.6] | |
413 |
|
413 | |||
414 |
|
414 | |||
415 | def similarity_hint(similar): |
|
415 | def similarity_hint(similar: List[bytes]) -> Optional[bytes]: | |
416 | # type: (List[bytes]) -> Optional[bytes] |
|
|||
417 | from .i18n import _ |
|
416 | from .i18n import _ | |
418 |
|
417 | |||
419 | if len(similar) == 1: |
|
418 | if len(similar) == 1: | |
@@ -428,8 +427,7 b' def similarity_hint(similar):' | |||||
428 | class UnknownIdentifier(ParseError): |
|
427 | class UnknownIdentifier(ParseError): | |
429 | """Exception raised when a {rev,file}set references an unknown identifier""" |
|
428 | """Exception raised when a {rev,file}set references an unknown identifier""" | |
430 |
|
429 | |||
431 | def __init__(self, function, symbols): |
|
430 | def __init__(self, function: bytes, symbols: Iterable[bytes]) -> None: | |
432 | # type: (bytes, Iterable[bytes]) -> None |
|
|||
433 | from .i18n import _ |
|
431 | from .i18n import _ | |
434 |
|
432 | |||
435 | similar = getsimilar(symbols, function) |
|
433 | similar = getsimilar(symbols, function) | |
@@ -463,16 +461,14 b' class RequirementError(RepoError):' | |||||
463 | class StdioError(IOError): |
|
461 | class StdioError(IOError): | |
464 | """Raised if I/O to stdout or stderr fails""" |
|
462 | """Raised if I/O to stdout or stderr fails""" | |
465 |
|
463 | |||
466 | def __init__(self, err): |
|
464 | def __init__(self, err: IOError) -> None: | |
467 | # type: (IOError) -> None |
|
|||
468 | IOError.__init__(self, err.errno, err.strerror) |
|
465 | IOError.__init__(self, err.errno, err.strerror) | |
469 |
|
466 | |||
470 | # no __bytes__() because error message is derived from the standard IOError |
|
467 | # no __bytes__() because error message is derived from the standard IOError | |
471 |
|
468 | |||
472 |
|
469 | |||
473 | class UnsupportedMergeRecords(Abort): |
|
470 | class UnsupportedMergeRecords(Abort): | |
474 | def __init__(self, recordtypes): |
|
471 | def __init__(self, recordtypes: Iterable[bytes]) -> None: | |
475 | # type: (Iterable[bytes]) -> None |
|
|||
476 | from .i18n import _ |
|
472 | from .i18n import _ | |
477 |
|
473 | |||
478 | self.recordtypes = sorted(recordtypes) |
|
474 | self.recordtypes = sorted(recordtypes) | |
@@ -490,15 +486,24 b' class UnsupportedMergeRecords(Abort):' | |||||
490 | class UnknownVersion(Abort): |
|
486 | class UnknownVersion(Abort): | |
491 | """generic exception for aborting from an encounter with an unknown version""" |
|
487 | """generic exception for aborting from an encounter with an unknown version""" | |
492 |
|
488 | |||
493 | def __init__(self, msg, hint=None, version=None): |
|
489 | def __init__( | |
494 | # type: (bytes, Optional[bytes], Optional[bytes]) -> None |
|
490 | self, | |
|
491 | msg: bytes, | |||
|
492 | hint: Optional[bytes] = None, | |||
|
493 | version: Optional[bytes] = None, | |||
|
494 | ) -> None: | |||
495 | self.version = version |
|
495 | self.version = version | |
496 | super(UnknownVersion, self).__init__(msg, hint=hint) |
|
496 | super(UnknownVersion, self).__init__(msg, hint=hint) | |
497 |
|
497 | |||
498 |
|
498 | |||
499 | class LockError(IOError): |
|
499 | class LockError(IOError): | |
500 | def __init__(self, errno, strerror, filename, desc): |
|
500 | def __init__( | |
501 | # _type: (int, str, bytes, bytes) -> None |
|
501 | self, | |
|
502 | errno: int, | |||
|
503 | strerror: str, | |||
|
504 | filename: bytes, | |||
|
505 | desc: Optional[bytes], | |||
|
506 | ) -> None: | |||
502 | IOError.__init__(self, errno, strerror, filename) |
|
507 | IOError.__init__(self, errno, strerror, filename) | |
503 | self.desc = desc |
|
508 | self.desc = desc | |
504 |
|
509 | |||
@@ -506,8 +511,15 b' class LockError(IOError):' | |||||
506 |
|
511 | |||
507 |
|
512 | |||
508 | class LockHeld(LockError): |
|
513 | class LockHeld(LockError): | |
509 | def __init__(self, errno, filename, desc, locker): |
|
514 | def __init__( | |
|
515 | self, | |||
|
516 | errno: int, | |||
|
517 | filename: bytes, | |||
|
518 | desc: Optional[bytes], | |||
|
519 | locker, | |||
|
520 | ): | |||
510 | LockError.__init__(self, errno, 'Lock held', filename, desc) |
|
521 | LockError.__init__(self, errno, 'Lock held', filename, desc) | |
|
522 | self.filename: bytes = filename | |||
511 | self.locker = locker |
|
523 | self.locker = locker | |
512 |
|
524 | |||
513 |
|
525 | |||
@@ -544,8 +556,7 b' class PushRaced(RuntimeError):' | |||||
544 | class ProgrammingError(Hint, RuntimeError): |
|
556 | class ProgrammingError(Hint, RuntimeError): | |
545 | """Raised if a mercurial (core or extension) developer made a mistake""" |
|
557 | """Raised if a mercurial (core or extension) developer made a mistake""" | |
546 |
|
558 | |||
547 | def __init__(self, msg, *args, **kwargs): |
|
559 | def __init__(self, msg: AnyStr, *args, **kwargs): | |
548 | # type: (AnyStr, Any, Any) -> None |
|
|||
549 | # On Python 3, turn the message back into a string since this is |
|
560 | # On Python 3, turn the message back into a string since this is | |
550 | # an internal-only error that won't be printed except in a |
|
561 | # an internal-only error that won't be printed except in a | |
551 | # stack traces. |
|
562 | # stack traces. | |
@@ -622,8 +633,7 b' class CensoredNodeError(StorageError):' | |||||
622 | Also contains the tombstone data substituted for the uncensored data. |
|
633 | Also contains the tombstone data substituted for the uncensored data. | |
623 | """ |
|
634 | """ | |
624 |
|
635 | |||
625 | def __init__(self, filename, node, tombstone): |
|
636 | def __init__(self, filename: bytes, node: bytes, tombstone: bytes): | |
626 | # type: (bytes, bytes, bytes) -> None |
|
|||
627 | from .node import short |
|
637 | from .node import short | |
628 |
|
638 | |||
629 | StorageError.__init__(self, b'%s:%s' % (filename, short(node))) |
|
639 | StorageError.__init__(self, b'%s:%s' % (filename, short(node))) | |
@@ -685,7 +695,10 b' class WireprotoCommandError(Exception):' | |||||
685 | The error is a formatter string and an optional iterable of arguments. |
|
695 | The error is a formatter string and an optional iterable of arguments. | |
686 | """ |
|
696 | """ | |
687 |
|
697 | |||
688 | def __init__(self, message, args=None): |
|
698 | def __init__( | |
689 | # type: (bytes, Optional[Sequence[bytes]]) -> None |
|
699 | self, | |
|
700 | message: bytes, | |||
|
701 | args: Optional[Sequence[bytes]] = None, | |||
|
702 | ) -> None: | |||
690 | self.message = message |
|
703 | self.message = message | |
691 | self.messageargs = args |
|
704 | self.messageargs = args |
@@ -516,8 +516,7 b' def changeset(web):' | |||||
516 | rev = webcommand(b'rev')(changeset) |
|
516 | rev = webcommand(b'rev')(changeset) | |
517 |
|
517 | |||
518 |
|
518 | |||
519 | def decodepath(path): |
|
519 | def decodepath(path: bytes) -> bytes: | |
520 | # type: (bytes) -> bytes |
|
|||
521 | """Hook for mapping a path in the repository to a path in the |
|
520 | """Hook for mapping a path in the repository to a path in the | |
522 | working copy. |
|
521 | working copy. | |
523 |
|
522 |
@@ -71,8 +71,7 b' except AttributeError:' | |||||
71 | _msgcache = {} # encoding: {message: translation} |
|
71 | _msgcache = {} # encoding: {message: translation} | |
72 |
|
72 | |||
73 |
|
73 | |||
74 | def gettext(message): |
|
74 | def gettext(message: bytes) -> bytes: | |
75 | # type: (bytes) -> bytes |
|
|||
76 | """Translate message. |
|
75 | """Translate message. | |
77 |
|
76 | |||
78 | The message is looked up in the catalog to get a Unicode string, |
|
77 | The message is looked up in the catalog to get a Unicode string, | |
@@ -123,6 +122,10 b' def _plain():' | |||||
123 |
|
122 | |||
124 |
|
123 | |||
125 | if _plain(): |
|
124 | if _plain(): | |
126 | _ = lambda message: message # type: Callable[[bytes], bytes] |
|
125 | ||
|
126 | def _(message: bytes) -> bytes: | |||
|
127 | return message | |||
|
128 | ||||
|
129 | ||||
127 | else: |
|
130 | else: | |
128 | _ = gettext |
|
131 | _ = gettext |
@@ -789,8 +789,11 b' class walkopts:' | |||||
789 | limit = attr.ib(default=None) |
|
789 | limit = attr.ib(default=None) | |
790 |
|
790 | |||
791 |
|
791 | |||
792 |
def parseopts( |
|
792 | def parseopts( | |
793 | # type: (Any, Sequence[bytes], Dict[bytes, Any]) -> walkopts |
|
793 | ui: Any, | |
|
794 | pats: Sequence[bytes], | |||
|
795 | opts: Dict[bytes, Any], | |||
|
796 | ) -> walkopts: | |||
794 | """Parse log command options into walkopts |
|
797 | """Parse log command options into walkopts | |
795 |
|
798 | |||
796 | The returned walkopts will be passed in to getrevs() or makewalker(). |
|
799 | The returned walkopts will be passed in to getrevs() or makewalker(). | |
@@ -1080,8 +1083,12 b' def _initialrevs(repo, wopts):' | |||||
1080 | return revs |
|
1083 | return revs | |
1081 |
|
1084 | |||
1082 |
|
1085 | |||
1083 |
def makewalker( |
|
1086 | def makewalker( | |
1084 | # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[Callable[[Any], matchmod.basematcher]]] |
|
1087 | repo: Any, | |
|
1088 | wopts: walkopts, | |||
|
1089 | ) -> Tuple[ | |||
|
1090 | smartset.abstractsmartset, Optional[Callable[[Any], matchmod.basematcher]] | |||
|
1091 | ]: | |||
1085 | """Build (revs, makefilematcher) to scan revision/file history |
|
1092 | """Build (revs, makefilematcher) to scan revision/file history | |
1086 |
|
1093 | |||
1087 | - revs is the smartset to be traversed. |
|
1094 | - revs is the smartset to be traversed. | |
@@ -1131,8 +1138,10 b' def makewalker(repo, wopts):' | |||||
1131 | return revs, filematcher |
|
1138 | return revs, filematcher | |
1132 |
|
1139 | |||
1133 |
|
1140 | |||
1134 |
def getrevs( |
|
1141 | def getrevs( | |
1135 | # type: (Any, walkopts) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]] |
|
1142 | repo: Any, | |
|
1143 | wopts: walkopts, | |||
|
1144 | ) -> Tuple[smartset.abstractsmartset, Optional[changesetdiffer]]: | |||
1136 | """Return (revs, differ) where revs is a smartset |
|
1145 | """Return (revs, differ) where revs is a smartset | |
1137 |
|
1146 | |||
1138 | differ is a changesetdiffer with pre-configured file matcher. |
|
1147 | differ is a changesetdiffer with pre-configured file matcher. |
@@ -21,6 +21,7 b' import time' | |||||
21 | from typing import ( |
|
21 | from typing import ( | |
22 | Any, |
|
22 | Any, | |
23 | List, |
|
23 | List, | |
|
24 | Optional, | |||
24 | Tuple, |
|
25 | Tuple, | |
25 | Union, |
|
26 | Union, | |
26 | ) |
|
27 | ) | |
@@ -113,8 +114,7 b' class SMTPS(smtplib.SMTP):' | |||||
113 | return new_socket |
|
114 | return new_socket | |
114 |
|
115 | |||
115 |
|
116 | |||
116 | def _pyhastls(): |
|
117 | def _pyhastls() -> bool: | |
117 | # type: () -> bool |
|
|||
118 | """Returns true iff Python has TLS support, false otherwise.""" |
|
118 | """Returns true iff Python has TLS support, false otherwise.""" | |
119 | try: |
|
119 | try: | |
120 | import ssl |
|
120 | import ssl | |
@@ -277,8 +277,7 b' def validateconfig(ui):' | |||||
277 | ) |
|
277 | ) | |
278 |
|
278 | |||
279 |
|
279 | |||
280 | def codec2iana(cs): |
|
280 | def codec2iana(cs: str) -> str: | |
281 | # type: (str) -> str |
|
|||
282 | ''' ''' |
|
281 | ''' ''' | |
283 | cs = email.charset.Charset(cs).input_charset.lower() |
|
282 | cs = email.charset.Charset(cs).input_charset.lower() | |
284 |
|
283 | |||
@@ -288,8 +287,11 b' def codec2iana(cs):' | |||||
288 | return cs |
|
287 | return cs | |
289 |
|
288 | |||
290 |
|
289 | |||
291 | def mimetextpatch(s, subtype='plain', display=False): |
|
290 | def mimetextpatch( | |
292 | # type: (bytes, str, bool) -> email.message.Message |
|
291 | s: bytes, | |
|
292 | subtype: str = 'plain', | |||
|
293 | display: bool = False, | |||
|
294 | ) -> email.message.Message: | |||
293 | """Return MIME message suitable for a patch. |
|
295 | """Return MIME message suitable for a patch. | |
294 | Charset will be detected by first trying to decode as us-ascii, then utf-8, |
|
296 | Charset will be detected by first trying to decode as us-ascii, then utf-8, | |
295 | and finally the global encodings. If all those fail, fall back to |
|
297 | and finally the global encodings. If all those fail, fall back to | |
@@ -314,8 +316,9 b" def mimetextpatch(s, subtype='plain', di" | |||||
314 | return mimetextqp(s, subtype, "iso-8859-1") |
|
316 | return mimetextqp(s, subtype, "iso-8859-1") | |
315 |
|
317 | |||
316 |
|
318 | |||
317 | def mimetextqp(body, subtype, charset): |
|
319 | def mimetextqp( | |
318 | # type: (bytes, str, str) -> email.message.Message |
|
320 | body: bytes, subtype: str, charset: str | |
|
321 | ) -> email.message.Message: | |||
319 | """Return MIME message. |
|
322 | """Return MIME message. | |
320 | Quoted-printable transfer encoding will be used if necessary. |
|
323 | Quoted-printable transfer encoding will be used if necessary. | |
321 | """ |
|
324 | """ | |
@@ -340,8 +343,7 b' def mimetextqp(body, subtype, charset):' | |||||
340 | return msg |
|
343 | return msg | |
341 |
|
344 | |||
342 |
|
345 | |||
343 | def _charsets(ui): |
|
346 | def _charsets(ui: Any) -> List[str]: | |
344 | # type: (Any) -> List[str] |
|
|||
345 | '''Obtains charsets to send mail parts not containing patches.''' |
|
347 | '''Obtains charsets to send mail parts not containing patches.''' | |
346 | charsets = [ |
|
348 | charsets = [ | |
347 | pycompat.sysstr(cs.lower()) |
|
349 | pycompat.sysstr(cs.lower()) | |
@@ -358,8 +360,7 b' def _charsets(ui):' | |||||
358 | return [cs for cs in charsets if not cs.endswith('ascii')] |
|
360 | return [cs for cs in charsets if not cs.endswith('ascii')] | |
359 |
|
361 | |||
360 |
|
362 | |||
361 | def _encode(ui, s, charsets): |
|
363 | def _encode(ui: Any, s: bytes, charsets: List[str]) -> Tuple[bytes, str]: | |
362 | # type: (Any, bytes, List[str]) -> Tuple[bytes, str] |
|
|||
363 | """Returns (converted) string, charset tuple. |
|
364 | """Returns (converted) string, charset tuple. | |
364 | Finds out best charset by cycling through sendcharsets in descending |
|
365 | Finds out best charset by cycling through sendcharsets in descending | |
365 | order. Tries both encoding and fallbackencoding for input. Only as |
|
366 | order. Tries both encoding and fallbackencoding for input. Only as | |
@@ -409,8 +410,12 b' def _encode(ui, s, charsets):' | |||||
409 | return s, 'us-ascii' |
|
410 | return s, 'us-ascii' | |
410 |
|
411 | |||
411 |
|
412 | |||
412 | def headencode(ui, s, charsets=None, display=False): |
|
413 | def headencode( | |
413 | # type: (Any, Union[bytes, str], List[str], bool) -> str |
|
414 | ui: Any, | |
|
415 | s: Union[bytes, str], | |||
|
416 | charsets: Optional[List[str]] = None, | |||
|
417 | display: bool = False, | |||
|
418 | ) -> str: | |||
414 | '''Returns RFC-2047 compliant header from given string.''' |
|
419 | '''Returns RFC-2047 compliant header from given string.''' | |
415 | if not display: |
|
420 | if not display: | |
416 | # split into words? |
|
421 | # split into words? | |
@@ -419,8 +424,9 b' def headencode(ui, s, charsets=None, dis' | |||||
419 | return encoding.strfromlocal(s) |
|
424 | return encoding.strfromlocal(s) | |
420 |
|
425 | |||
421 |
|
426 | |||
422 | def _addressencode(ui, name, addr, charsets=None): |
|
427 | def _addressencode( | |
423 | # type: (Any, str, str, List[str]) -> str |
|
428 | ui: Any, name: str, addr: str, charsets: Optional[List[str]] = None | |
|
429 | ) -> str: | |||
424 | addr = encoding.strtolocal(addr) |
|
430 | addr = encoding.strtolocal(addr) | |
425 | name = headencode(ui, name, charsets) |
|
431 | name = headencode(ui, name, charsets) | |
426 | try: |
|
432 | try: | |
@@ -439,8 +445,12 b' def _addressencode(ui, name, addr, chars' | |||||
439 | return email.utils.formataddr((name, encoding.strfromlocal(addr))) |
|
445 | return email.utils.formataddr((name, encoding.strfromlocal(addr))) | |
440 |
|
446 | |||
441 |
|
447 | |||
442 | def addressencode(ui, address, charsets=None, display=False): |
|
448 | def addressencode( | |
443 | # type: (Any, bytes, List[str], bool) -> str |
|
449 | ui: Any, | |
|
450 | address: bytes, | |||
|
451 | charsets: Optional[List[str]] = None, | |||
|
452 | display: bool = False, | |||
|
453 | ) -> str: | |||
444 | '''Turns address into RFC-2047 compliant header.''' |
|
454 | '''Turns address into RFC-2047 compliant header.''' | |
445 | if display or not address: |
|
455 | if display or not address: | |
446 | return encoding.strfromlocal(address or b'') |
|
456 | return encoding.strfromlocal(address or b'') | |
@@ -448,8 +458,12 b' def addressencode(ui, address, charsets=' | |||||
448 | return _addressencode(ui, name, addr, charsets) |
|
458 | return _addressencode(ui, name, addr, charsets) | |
449 |
|
459 | |||
450 |
|
460 | |||
451 | def addrlistencode(ui, addrs, charsets=None, display=False): |
|
461 | def addrlistencode( | |
452 | # type: (Any, List[bytes], List[str], bool) -> List[str] |
|
462 | ui: Any, | |
|
463 | addrs: List[bytes], | |||
|
464 | charsets: Optional[List[str]] = None, | |||
|
465 | display: bool = False, | |||
|
466 | ) -> List[str]: | |||
453 | """Turns a list of addresses into a list of RFC-2047 compliant headers. |
|
467 | """Turns a list of addresses into a list of RFC-2047 compliant headers. | |
454 | A single element of input list may contain multiple addresses, but output |
|
468 | A single element of input list may contain multiple addresses, but output | |
455 | always has one address per item""" |
|
469 | always has one address per item""" | |
@@ -468,8 +482,12 b' def addrlistencode(ui, addrs, charsets=N' | |||||
468 | return result |
|
482 | return result | |
469 |
|
483 | |||
470 |
|
484 | |||
471 | def mimeencode(ui, s, charsets=None, display=False): |
|
485 | def mimeencode( | |
472 | # type: (Any, bytes, List[str], bool) -> email.message.Message |
|
486 | ui: Any, | |
|
487 | s: bytes, | |||
|
488 | charsets: Optional[List[str]] = None, | |||
|
489 | display: bool = False, | |||
|
490 | ) -> email.message.Message: | |||
473 | """creates mime text object, encodes it if needed, and sets |
|
491 | """creates mime text object, encodes it if needed, and sets | |
474 | charset and transfer-encoding accordingly.""" |
|
492 | charset and transfer-encoding accordingly.""" | |
475 | cs = 'us-ascii' |
|
493 | cs = 'us-ascii' | |
@@ -481,8 +499,7 b' def mimeencode(ui, s, charsets=None, dis' | |||||
481 | Generator = email.generator.BytesGenerator |
|
499 | Generator = email.generator.BytesGenerator | |
482 |
|
500 | |||
483 |
|
501 | |||
484 | def parse(fp): |
|
502 | def parse(fp: Any) -> email.message.Message: | |
485 | # type: (Any) -> email.message.Message |
|
|||
486 | ep = email.parser.Parser() |
|
503 | ep = email.parser.Parser() | |
487 | # disable the "universal newlines" mode, which isn't binary safe. |
|
504 | # disable the "universal newlines" mode, which isn't binary safe. | |
488 | # I have no idea if ascii/surrogateescape is correct, but that's |
|
505 | # I have no idea if ascii/surrogateescape is correct, but that's | |
@@ -496,14 +513,12 b' def parse(fp):' | |||||
496 | fp.detach() |
|
513 | fp.detach() | |
497 |
|
514 | |||
498 |
|
515 | |||
499 | def parsebytes(data): |
|
516 | def parsebytes(data: bytes) -> email.message.Message: | |
500 | # type: (bytes) -> email.message.Message |
|
|||
501 | ep = email.parser.BytesParser() |
|
517 | ep = email.parser.BytesParser() | |
502 | return ep.parsebytes(data) |
|
518 | return ep.parsebytes(data) | |
503 |
|
519 | |||
504 |
|
520 | |||
505 | def headdecode(s): |
|
521 | def headdecode(s: Union[email.header.Header, bytes]) -> bytes: | |
506 | # type: (Union[email.header.Header, bytes]) -> bytes |
|
|||
507 | '''Decodes RFC-2047 header''' |
|
522 | '''Decodes RFC-2047 header''' | |
508 | uparts = [] |
|
523 | uparts = [] | |
509 | for part, charset in email.header.decode_header(s): |
|
524 | for part, charset in email.header.decode_header(s): |
@@ -32,8 +32,7 b' assert [' | |||||
32 | ] |
|
32 | ] | |
33 |
|
33 | |||
34 |
|
34 | |||
35 | def _lowerclean(s): |
|
35 | def _lowerclean(s: bytes) -> bytes: | |
36 | # type: (bytes) -> bytes |
|
|||
37 | return encoding.hfsignoreclean(s.lower()) |
|
36 | return encoding.hfsignoreclean(s.lower()) | |
38 |
|
37 | |||
39 |
|
38 | |||
@@ -72,8 +71,7 b' class pathauditor:' | |||||
72 | else: |
|
71 | else: | |
73 | self.normcase = lambda x: x |
|
72 | self.normcase = lambda x: x | |
74 |
|
73 | |||
75 | def __call__(self, path, mode=None): |
|
74 | def __call__(self, path: bytes, mode: Optional[Any] = None) -> None: | |
76 | # type: (bytes, Optional[Any]) -> None |
|
|||
77 | """Check the relative path. |
|
75 | """Check the relative path. | |
78 | path may contain a pattern (e.g. foodir/**.txt)""" |
|
76 | path may contain a pattern (e.g. foodir/**.txt)""" | |
79 |
|
77 | |||
@@ -170,8 +168,7 b' class pathauditor:' | |||||
170 | raise error.Abort(msg % (path, pycompat.bytestr(prefix))) |
|
168 | raise error.Abort(msg % (path, pycompat.bytestr(prefix))) | |
171 | return True |
|
169 | return True | |
172 |
|
170 | |||
173 | def check(self, path): |
|
171 | def check(self, path: bytes) -> bool: | |
174 | # type: (bytes) -> bool |
|
|||
175 | try: |
|
172 | try: | |
176 | self(path) |
|
173 | self(path) | |
177 | return True |
|
174 | return True | |
@@ -192,8 +189,12 b' class pathauditor:' | |||||
192 | self._cached = False |
|
189 | self._cached = False | |
193 |
|
190 | |||
194 |
|
191 | |||
195 | def canonpath(root, cwd, myname, auditor=None): |
|
192 | def canonpath( | |
196 | # type: (bytes, bytes, bytes, Optional[pathauditor]) -> bytes |
|
193 | root: bytes, | |
|
194 | cwd: bytes, | |||
|
195 | myname: bytes, | |||
|
196 | auditor: Optional[pathauditor] = None, | |||
|
197 | ) -> bytes: | |||
197 | """return the canonical path of myname, given cwd and root |
|
198 | """return the canonical path of myname, given cwd and root | |
198 |
|
199 | |||
199 | >>> def check(root, cwd, myname): |
|
200 | >>> def check(root, cwd, myname): | |
@@ -295,8 +296,7 b' def canonpath(root, cwd, myname, auditor' | |||||
295 | ) |
|
296 | ) | |
296 |
|
297 | |||
297 |
|
298 | |||
298 | def normasprefix(path): |
|
299 | def normasprefix(path: bytes) -> bytes: | |
299 | # type: (bytes) -> bytes |
|
|||
300 | """normalize the specified path as path prefix |
|
300 | """normalize the specified path as path prefix | |
301 |
|
301 | |||
302 | Returned value can be used safely for "p.startswith(prefix)", |
|
302 | Returned value can be used safely for "p.startswith(prefix)", | |
@@ -319,8 +319,7 b' def normasprefix(path):' | |||||
319 | return path |
|
319 | return path | |
320 |
|
320 | |||
321 |
|
321 | |||
322 | def finddirs(path): |
|
322 | def finddirs(path: bytes) -> Iterator[bytes]: | |
323 | # type: (bytes) -> Iterator[bytes] |
|
|||
324 | pos = path.rfind(b'/') |
|
323 | pos = path.rfind(b'/') | |
325 | while pos != -1: |
|
324 | while pos != -1: | |
326 | yield path[:pos] |
|
325 | yield path[:pos] | |
@@ -355,8 +354,7 b' class dirs:' | |||||
355 | for f in map: |
|
354 | for f in map: | |
356 | addpath(f) |
|
355 | addpath(f) | |
357 |
|
356 | |||
358 | def addpath(self, path): |
|
357 | def addpath(self, path: bytes) -> None: | |
359 | # type: (bytes) -> None |
|
|||
360 | dirs = self._dirs |
|
358 | dirs = self._dirs | |
361 | for base in finddirs(path): |
|
359 | for base in finddirs(path): | |
362 | if base.endswith(b'/'): |
|
360 | if base.endswith(b'/'): | |
@@ -368,8 +366,7 b' class dirs:' | |||||
368 | return |
|
366 | return | |
369 | dirs[base] = 1 |
|
367 | dirs[base] = 1 | |
370 |
|
368 | |||
371 | def delpath(self, path): |
|
369 | def delpath(self, path: bytes) -> None: | |
372 | # type: (bytes) -> None |
|
|||
373 | dirs = self._dirs |
|
370 | dirs = self._dirs | |
374 | for base in finddirs(path): |
|
371 | for base in finddirs(path): | |
375 | if dirs[base] > 1: |
|
372 | if dirs[base] > 1: | |
@@ -380,8 +377,7 b' class dirs:' | |||||
380 | def __iter__(self): |
|
377 | def __iter__(self): | |
381 | return iter(self._dirs) |
|
378 | return iter(self._dirs) | |
382 |
|
379 | |||
383 | def __contains__(self, d): |
|
380 | def __contains__(self, d: bytes) -> bool: | |
384 | # type: (bytes) -> bool |
|
|||
385 | return d in self._dirs |
|
381 | return d in self._dirs | |
386 |
|
382 | |||
387 |
|
383 |
@@ -192,20 +192,20 b' all_internal_phases = tuple(p for p in a' | |||||
192 | no_bundle_phases = all_internal_phases |
|
192 | no_bundle_phases = all_internal_phases | |
193 |
|
193 | |||
194 |
|
194 | |||
195 | def supportinternal(repo): |
|
195 | def supportinternal(repo: "localrepo.localrepository") -> bool: | |
196 | # type: (localrepo.localrepository) -> bool |
|
|||
197 | """True if the internal phase can be used on a repository""" |
|
196 | """True if the internal phase can be used on a repository""" | |
198 | return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements |
|
197 | return requirements.INTERNAL_PHASE_REQUIREMENT in repo.requirements | |
199 |
|
198 | |||
200 |
|
199 | |||
201 | def supportarchived(repo): |
|
200 | def supportarchived(repo: "localrepo.localrepository") -> bool: | |
202 | # type: (localrepo.localrepository) -> bool |
|
|||
203 | """True if the archived phase can be used on a repository""" |
|
201 | """True if the archived phase can be used on a repository""" | |
204 | return requirements.ARCHIVED_PHASE_REQUIREMENT in repo.requirements |
|
202 | return requirements.ARCHIVED_PHASE_REQUIREMENT in repo.requirements | |
205 |
|
203 | |||
206 |
|
204 | |||
207 | def _readroots(repo, phasedefaults=None): |
|
205 | def _readroots( | |
208 | # type: (localrepo.localrepository, Optional[Phasedefaults]) -> Tuple[Phaseroots, bool] |
|
206 | repo: "localrepo.localrepository", | |
|
207 | phasedefaults: Optional["Phasedefaults"] = None, | |||
|
208 | ) -> Tuple[Phaseroots, bool]: | |||
209 | """Read phase roots from disk |
|
209 | """Read phase roots from disk | |
210 |
|
210 | |||
211 | phasedefaults is a list of fn(repo, roots) callable, which are |
|
211 | phasedefaults is a list of fn(repo, roots) callable, which are | |
@@ -235,8 +235,7 b' def _readroots(repo, phasedefaults=None)' | |||||
235 | return roots, dirty |
|
235 | return roots, dirty | |
236 |
|
236 | |||
237 |
|
237 | |||
238 | def binaryencode(phasemapping): |
|
238 | def binaryencode(phasemapping: Dict[int, List[bytes]]) -> bytes: | |
239 | # type: (Dict[int, List[bytes]]) -> bytes |
|
|||
240 | """encode a 'phase -> nodes' mapping into a binary stream |
|
239 | """encode a 'phase -> nodes' mapping into a binary stream | |
241 |
|
240 | |||
242 | The revision lists are encoded as (phase, root) pairs. |
|
241 | The revision lists are encoded as (phase, root) pairs. | |
@@ -248,8 +247,7 b' def binaryencode(phasemapping):' | |||||
248 | return b''.join(binarydata) |
|
247 | return b''.join(binarydata) | |
249 |
|
248 | |||
250 |
|
249 | |||
251 | def binarydecode(stream): |
|
250 | def binarydecode(stream) -> Dict[int, List[bytes]]: | |
252 | # type: (...) -> Dict[int, List[bytes]] |
|
|||
253 | """decode a binary stream into a 'phase -> nodes' mapping |
|
251 | """decode a binary stream into a 'phase -> nodes' mapping | |
254 |
|
252 | |||
255 | The (phase, root) pairs are turned back into a dictionary with |
|
253 | The (phase, root) pairs are turned back into a dictionary with | |
@@ -367,8 +365,12 b' def _trackphasechange(data, rev, old, ne' | |||||
367 |
|
365 | |||
368 |
|
366 | |||
369 | class phasecache: |
|
367 | class phasecache: | |
370 | def __init__(self, repo, phasedefaults, _load=True): |
|
368 | def __init__( | |
371 | # type: (localrepo.localrepository, Optional[Phasedefaults], bool) -> None |
|
369 | self, | |
|
370 | repo: "localrepo.localrepository", | |||
|
371 | phasedefaults: Optional["Phasedefaults"], | |||
|
372 | _load: bool = True, | |||
|
373 | ): | |||
372 | if _load: |
|
374 | if _load: | |
373 | # Cheap trick to allow shallow-copy without copy module |
|
375 | # Cheap trick to allow shallow-copy without copy module | |
374 | self.phaseroots, self.dirty = _readroots(repo, phasedefaults) |
|
376 | self.phaseroots, self.dirty = _readroots(repo, phasedefaults) | |
@@ -377,8 +379,7 b' class phasecache:' | |||||
377 | self.filterunknown(repo) |
|
379 | self.filterunknown(repo) | |
378 | self.opener = repo.svfs |
|
380 | self.opener = repo.svfs | |
379 |
|
381 | |||
380 | def hasnonpublicphases(self, repo): |
|
382 | def hasnonpublicphases(self, repo: "localrepo.localrepository") -> bool: | |
381 | # type: (localrepo.localrepository) -> bool |
|
|||
382 | """detect if there are revisions with non-public phase""" |
|
383 | """detect if there are revisions with non-public phase""" | |
383 | repo = repo.unfiltered() |
|
384 | repo = repo.unfiltered() | |
384 | cl = repo.changelog |
|
385 | cl = repo.changelog | |
@@ -389,8 +390,9 b' class phasecache:' | |||||
389 | revs for phase, revs in self.phaseroots.items() if phase != public |
|
390 | revs for phase, revs in self.phaseroots.items() if phase != public | |
390 | ) |
|
391 | ) | |
391 |
|
392 | |||
392 |
def nonpublicphaseroots( |
|
393 | def nonpublicphaseroots( | |
393 |
|
|
394 | self, repo: "localrepo.localrepository" | |
|
395 | ) -> Set[bytes]: | |||
394 | """returns the roots of all non-public phases |
|
396 | """returns the roots of all non-public phases | |
395 |
|
397 | |||
396 | The roots are not minimized, so if the secret revisions are |
|
398 | The roots are not minimized, so if the secret revisions are | |
@@ -409,8 +411,12 b' class phasecache:' | |||||
409 | ] |
|
411 | ] | |
410 | ) |
|
412 | ) | |
411 |
|
413 | |||
412 | def getrevset(self, repo, phases, subset=None): |
|
414 | def getrevset( | |
413 | # type: (localrepo.localrepository, Iterable[int], Optional[Any]) -> Any |
|
415 | self, | |
|
416 | repo: "localrepo.localrepository", | |||
|
417 | phases: Iterable[int], | |||
|
418 | subset: Optional[Any] = None, | |||
|
419 | ) -> Any: | |||
414 | # TODO: finish typing this |
|
420 | # TODO: finish typing this | |
415 | """return a smartset for the given phases""" |
|
421 | """return a smartset for the given phases""" | |
416 | self.loadphaserevs(repo) # ensure phase's sets are loaded |
|
422 | self.loadphaserevs(repo) # ensure phase's sets are loaded | |
@@ -506,8 +512,7 b' class phasecache:' | |||||
506 | self._phasesets[phase] = ps |
|
512 | self._phasesets[phase] = ps | |
507 | self._loadedrevslen = len(cl) |
|
513 | self._loadedrevslen = len(cl) | |
508 |
|
514 | |||
509 | def loadphaserevs(self, repo): |
|
515 | def loadphaserevs(self, repo: "localrepo.localrepository") -> None: | |
510 | # type: (localrepo.localrepository) -> None |
|
|||
511 | """ensure phase information is loaded in the object""" |
|
516 | """ensure phase information is loaded in the object""" | |
512 | if self._phasesets is None: |
|
517 | if self._phasesets is None: | |
513 | try: |
|
518 | try: | |
@@ -520,8 +525,7 b' class phasecache:' | |||||
520 | self._loadedrevslen = 0 |
|
525 | self._loadedrevslen = 0 | |
521 | self._phasesets = None |
|
526 | self._phasesets = None | |
522 |
|
527 | |||
523 | def phase(self, repo, rev): |
|
528 | def phase(self, repo: "localrepo.localrepository", rev: int) -> int: | |
524 | # type: (localrepo.localrepository, int) -> int |
|
|||
525 | # We need a repo argument here to be able to build _phasesets |
|
529 | # We need a repo argument here to be able to build _phasesets | |
526 | # if necessary. The repository instance is not stored in |
|
530 | # if necessary. The repository instance is not stored in | |
527 | # phasecache to avoid reference cycles. The changelog instance |
|
531 | # phasecache to avoid reference cycles. The changelog instance | |
@@ -708,8 +712,7 b' class phasecache:' | |||||
708 | return True |
|
712 | return True | |
709 | return False |
|
713 | return False | |
710 |
|
714 | |||
711 | def filterunknown(self, repo): |
|
715 | def filterunknown(self, repo: "localrepo.localrepository") -> None: | |
712 | # type: (localrepo.localrepository) -> None |
|
|||
713 | """remove unknown nodes from the phase boundary |
|
716 | """remove unknown nodes from the phase boundary | |
714 |
|
717 | |||
715 | Nothing is lost as unknown nodes only hold data for their descendants. |
|
718 | Nothing is lost as unknown nodes only hold data for their descendants. | |
@@ -786,8 +789,7 b' def registernew(repo, tr, targetphase, r' | |||||
786 | repo._phasecache.replace(phcache) |
|
789 | repo._phasecache.replace(phcache) | |
787 |
|
790 | |||
788 |
|
791 | |||
789 | def listphases(repo): |
|
792 | def listphases(repo: "localrepo.localrepository") -> Dict[bytes, bytes]: | |
790 | # type: (localrepo.localrepository) -> Dict[bytes, bytes] |
|
|||
791 | """List phases root for serialization over pushkey""" |
|
793 | """List phases root for serialization over pushkey""" | |
792 | # Use ordered dictionary so behavior is deterministic. |
|
794 | # Use ordered dictionary so behavior is deterministic. | |
793 | keys = util.sortdict() |
|
795 | keys = util.sortdict() | |
@@ -818,8 +820,12 b' def listphases(repo):' | |||||
818 | return keys |
|
820 | return keys | |
819 |
|
821 | |||
820 |
|
822 | |||
821 | def pushphase(repo, nhex, oldphasestr, newphasestr): |
|
823 | def pushphase( | |
822 |
|
|
824 | repo: "localrepo.localrepository", | |
|
825 | nhex: bytes, | |||
|
826 | oldphasestr: bytes, | |||
|
827 | newphasestr: bytes, | |||
|
828 | ) -> bool: | |||
823 | """List phases root for serialization over pushkey""" |
|
829 | """List phases root for serialization over pushkey""" | |
824 | repo = repo.unfiltered() |
|
830 | repo = repo.unfiltered() | |
825 | with repo.lock(): |
|
831 | with repo.lock(): | |
@@ -966,8 +972,7 b' def newheads(repo, heads, roots):' | |||||
966 | return pycompat.maplist(cl.node, sorted(new_heads)) |
|
972 | return pycompat.maplist(cl.node, sorted(new_heads)) | |
967 |
|
973 | |||
968 |
|
974 | |||
969 | def newcommitphase(ui): |
|
975 | def newcommitphase(ui: "uimod.ui") -> int: | |
970 | # type: (uimod.ui) -> int |
|
|||
971 | """helper to get the target phase of new commit |
|
976 | """helper to get the target phase of new commit | |
972 |
|
977 | |||
973 | Handle all possible values for the phases.new-commit options. |
|
978 | Handle all possible values for the phases.new-commit options. | |
@@ -982,14 +987,16 b' def newcommitphase(ui):' | |||||
982 | ) |
|
987 | ) | |
983 |
|
988 | |||
984 |
|
989 | |||
985 | def hassecret(repo): |
|
990 | def hassecret(repo: "localrepo.localrepository") -> bool: | |
986 | # type: (localrepo.localrepository) -> bool |
|
|||
987 | """utility function that check if a repo have any secret changeset.""" |
|
991 | """utility function that check if a repo have any secret changeset.""" | |
988 | return bool(repo._phasecache.phaseroots[secret]) |
|
992 | return bool(repo._phasecache.phaseroots[secret]) | |
989 |
|
993 | |||
990 |
|
994 | |||
991 |
def preparehookargs( |
|
995 | def preparehookargs( | |
992 | # type: (bytes, Optional[int], Optional[int]) -> Dict[bytes, bytes] |
|
996 | node: bytes, | |
|
997 | old: Optional[int], | |||
|
998 | new: Optional[int], | |||
|
999 | ) -> Dict[bytes, bytes]: | |||
993 | if old is None: |
|
1000 | if old is None: | |
994 | old = b'' |
|
1001 | old = b'' | |
995 | else: |
|
1002 | else: |
@@ -72,8 +72,7 b' def _bin(bs):' | |||||
72 | return v |
|
72 | return v | |
73 |
|
73 | |||
74 |
|
74 | |||
75 | def _str(v, l): |
|
75 | def _str(v: int, l: int) -> bytes: | |
76 | # type: (int, int) -> bytes |
|
|||
77 | bs = b"" |
|
76 | bs = b"" | |
78 | for p in range(l): |
|
77 | for p in range(l): | |
79 | bs = pycompat.bytechr(v & 255) + bs |
|
78 | bs = pycompat.bytechr(v & 255) + bs |
@@ -59,8 +59,7 b' class cmdstate:' | |||||
59 | self._repo = repo |
|
59 | self._repo = repo | |
60 | self.fname = fname |
|
60 | self.fname = fname | |
61 |
|
61 | |||
62 | def read(self): |
|
62 | def read(self) -> Dict[bytes, Any]: | |
63 | # type: () -> Dict[bytes, Any] |
|
|||
64 | """read the existing state file and return a dict of data stored""" |
|
63 | """read the existing state file and return a dict of data stored""" | |
65 | return self._read() |
|
64 | return self._read() | |
66 |
|
65 |
@@ -69,8 +69,7 b' if typing.TYPE_CHECKING:' | |||||
69 | Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] |
|
69 | Substate = Dict[bytes, Tuple[bytes, bytes, bytes]] | |
70 |
|
70 | |||
71 |
|
71 | |||
72 | def state(ctx, ui): |
|
72 | def state(ctx: "context.changectx", ui: "uimod.ui") -> Substate: | |
73 | # type: (context.changectx, uimod.ui) -> Substate |
|
|||
74 | """return a state dict, mapping subrepo paths configured in .hgsub |
|
73 | """return a state dict, mapping subrepo paths configured in .hgsub | |
75 | to tuple: (source from .hgsub, revision from .hgsubstate, kind |
|
74 | to tuple: (source from .hgsub, revision from .hgsubstate, kind | |
76 | (key in types dict)) |
|
75 | (key in types dict)) | |
@@ -122,8 +121,7 b' def state(ctx, ui):' | |||||
122 | except FileNotFoundError: |
|
121 | except FileNotFoundError: | |
123 | pass |
|
122 | pass | |
124 |
|
123 | |||
125 | def remap(src): |
|
124 | def remap(src: bytes) -> bytes: | |
126 | # type: (bytes) -> bytes |
|
|||
127 | for pattern, repl in p.items(b'subpaths'): |
|
125 | for pattern, repl in p.items(b'subpaths'): | |
128 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub |
|
126 | # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub | |
129 | # does a string decode. |
|
127 | # does a string decode. | |
@@ -175,8 +173,7 b' def state(ctx, ui):' | |||||
175 | return state |
|
173 | return state | |
176 |
|
174 | |||
177 |
|
175 | |||
178 | def writestate(repo, state): |
|
176 | def writestate(repo: "localrepo.localrepository", state: Substate) -> None: | |
179 | # type: (localrepo.localrepository, Substate) -> None |
|
|||
180 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" |
|
177 | """rewrite .hgsubstate in (outer) repo with these subrepo states""" | |
181 | lines = [ |
|
178 | lines = [ | |
182 | b'%s %s\n' % (state[s][1], s) |
|
179 | b'%s %s\n' % (state[s][1], s) | |
@@ -186,8 +183,14 b' def writestate(repo, state):' | |||||
186 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') |
|
183 | repo.wwrite(b'.hgsubstate', b''.join(lines), b'') | |
187 |
|
184 | |||
188 |
|
185 | |||
189 | def submerge(repo, wctx, mctx, actx, overwrite, labels=None): |
|
186 | def submerge( | |
190 | # type: (localrepo.localrepository, context.workingctx, context.changectx, context.changectx, bool, Optional[Any]) -> Substate |
|
187 | repo: "localrepo.localrepository", | |
|
188 | wctx: "context.workingctx", | |||
|
189 | mctx: "context.changectx", | |||
|
190 | actx: "context.changectx", | |||
|
191 | overwrite: bool, | |||
|
192 | labels: Optional[Any] = None, | |||
|
193 | ) -> Substate: | |||
191 | # TODO: type the `labels` arg |
|
194 | # TODO: type the `labels` arg | |
192 | """delegated from merge.applyupdates: merging of .hgsubstate file |
|
195 | """delegated from merge.applyupdates: merging of .hgsubstate file | |
193 | in working context, merging context and ancestor context""" |
|
196 | in working context, merging context and ancestor context""" | |
@@ -327,8 +330,13 b' def submerge(repo, wctx, mctx, actx, ove' | |||||
327 | return sm |
|
330 | return sm | |
328 |
|
331 | |||
329 |
|
332 | |||
330 | def precommit(ui, wctx, status, match, force=False): |
|
333 | def precommit( | |
331 | # type: (uimod.ui, context.workingcommitctx, scmutil.status, matchmod.basematcher, bool) -> Tuple[List[bytes], Set[bytes], Substate] |
|
334 | ui: "uimod.ui", | |
|
335 | wctx: "context.workingcommitctx", | |||
|
336 | status: "scmutil.status", | |||
|
337 | match: "matchmod.basematcher", | |||
|
338 | force: bool = False, | |||
|
339 | ) -> Tuple[List[bytes], Set[bytes], Substate]: | |||
332 | """Calculate .hgsubstate changes that should be applied before committing |
|
340 | """Calculate .hgsubstate changes that should be applied before committing | |
333 |
|
341 | |||
334 | Returns (subs, commitsubs, newstate) where |
|
342 | Returns (subs, commitsubs, newstate) where | |
@@ -416,8 +424,7 b' def repo_rel_or_abs_source(repo):' | |||||
416 | return posixpath.normpath(path) |
|
424 | return posixpath.normpath(path) | |
417 |
|
425 | |||
418 |
|
426 | |||
419 | def reporelpath(repo): |
|
427 | def reporelpath(repo: "localrepo.localrepository") -> bytes: | |
420 | # type: (localrepo.localrepository) -> bytes |
|
|||
421 | """return path to this (sub)repo as seen from outermost repo""" |
|
428 | """return path to this (sub)repo as seen from outermost repo""" | |
422 | parent = repo |
|
429 | parent = repo | |
423 | while hasattr(parent, '_subparent'): |
|
430 | while hasattr(parent, '_subparent'): | |
@@ -425,14 +432,16 b' def reporelpath(repo):' | |||||
425 | return repo.root[len(pathutil.normasprefix(parent.root)) :] |
|
432 | return repo.root[len(pathutil.normasprefix(parent.root)) :] | |
426 |
|
433 | |||
427 |
|
434 | |||
428 | def subrelpath(sub): |
|
435 | def subrelpath(sub: "subrepo.abstractsubrepo") -> bytes: | |
429 | # type: (subrepo.abstractsubrepo) -> bytes |
|
|||
430 | """return path to this subrepo as seen from outermost repo""" |
|
436 | """return path to this subrepo as seen from outermost repo""" | |
431 | return sub._relpath |
|
437 | return sub._relpath | |
432 |
|
438 | |||
433 |
|
439 | |||
434 | def _abssource(repo, push=False, abort=True): |
|
440 | def _abssource( | |
435 |
|
|
441 | repo: "localrepo.localrepository", | |
|
442 | push: bool = False, | |||
|
443 | abort: bool = True, | |||
|
444 | ) -> Optional[bytes]: | |||
436 | """return pull/push path of repo - either based on parent repo .hgsub info |
|
445 | """return pull/push path of repo - either based on parent repo .hgsub info | |
437 | or on the top repo config. Abort or return None if no source found.""" |
|
446 | or on the top repo config. Abort or return None if no source found.""" | |
438 | if hasattr(repo, '_subparent'): |
|
447 | if hasattr(repo, '_subparent'): | |
@@ -480,8 +489,7 b' def _abssource(repo, push=False, abort=T' | |||||
480 | raise error.Abort(_(b"default path for subrepository not found")) |
|
489 | raise error.Abort(_(b"default path for subrepository not found")) | |
481 |
|
490 | |||
482 |
|
491 | |||
483 | def newcommitphase(ui, ctx): |
|
492 | def newcommitphase(ui: "uimod.ui", ctx: "context.changectx") -> int: | |
484 | # type: (uimod.ui, context.changectx) -> int |
|
|||
485 | commitphase = phases.newcommitphase(ui) |
|
493 | commitphase = phases.newcommitphase(ui) | |
486 | substate = getattr(ctx, "substate", None) |
|
494 | substate = getattr(ctx, "substate", None) | |
487 | if not substate: |
|
495 | if not substate: |
@@ -147,8 +147,7 b' unlink = platform.unlink' | |||||
147 | username = platform.username |
|
147 | username = platform.username | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def setumask(val): |
|
150 | def setumask(val: int) -> None: | |
151 | # type: (int) -> None |
|
|||
152 | '''updates the umask. used by chg server''' |
|
151 | '''updates the umask. used by chg server''' | |
153 | if pycompat.iswindows: |
|
152 | if pycompat.iswindows: | |
154 | return |
|
153 | return | |
@@ -1850,8 +1849,7 b' if pycompat.ispypy:' | |||||
1850 | nogc = lambda x: x |
|
1849 | nogc = lambda x: x | |
1851 |
|
1850 | |||
1852 |
|
1851 | |||
1853 | def pathto(root, n1, n2): |
|
1852 | def pathto(root: bytes, n1: bytes, n2: bytes) -> bytes: | |
1854 | # type: (bytes, bytes, bytes) -> bytes |
|
|||
1855 | """return the relative path from one place to another. |
|
1853 | """return the relative path from one place to another. | |
1856 | root should use os.sep to separate directories |
|
1854 | root should use os.sep to separate directories | |
1857 | n1 should use os.sep to separate directories |
|
1855 | n1 should use os.sep to separate directories | |
@@ -2062,8 +2060,7 b' def copyfiles(src, dst, hardlink=None, p' | |||||
2062 | _winreservedchars = b':*?"<>|' |
|
2060 | _winreservedchars = b':*?"<>|' | |
2063 |
|
2061 | |||
2064 |
|
2062 | |||
2065 | def checkwinfilename(path): |
|
2063 | def checkwinfilename(path: bytes) -> Optional[bytes]: | |
2066 | # type: (bytes) -> Optional[bytes] |
|
|||
2067 | r"""Check that the base-relative path is a valid filename on Windows. |
|
2064 | r"""Check that the base-relative path is a valid filename on Windows. | |
2068 | Returns None if the path is ok, or a UI string describing the problem. |
|
2065 | Returns None if the path is ok, or a UI string describing the problem. | |
2069 |
|
2066 | |||
@@ -2157,8 +2154,7 b' def makelock(info, pathname):' | |||||
2157 | os.close(ld) |
|
2154 | os.close(ld) | |
2158 |
|
2155 | |||
2159 |
|
2156 | |||
2160 | def readlock(pathname): |
|
2157 | def readlock(pathname: bytes) -> bytes: | |
2161 | # type: (bytes) -> bytes |
|
|||
2162 | try: |
|
2158 | try: | |
2163 | return readlink(pathname) |
|
2159 | return readlink(pathname) | |
2164 | except OSError as why: |
|
2160 | except OSError as why: | |
@@ -2181,8 +2177,7 b' def fstat(fp):' | |||||
2181 | # File system features |
|
2177 | # File system features | |
2182 |
|
2178 | |||
2183 |
|
2179 | |||
2184 | def fscasesensitive(path): |
|
2180 | def fscasesensitive(path: bytes) -> bool: | |
2185 | # type: (bytes) -> bool |
|
|||
2186 | """ |
|
2181 | """ | |
2187 | Return true if the given path is on a case-sensitive filesystem |
|
2182 | Return true if the given path is on a case-sensitive filesystem | |
2188 |
|
2183 | |||
@@ -2286,8 +2281,7 b' re = _re()' | |||||
2286 | _fspathcache = {} |
|
2281 | _fspathcache = {} | |
2287 |
|
2282 | |||
2288 |
|
2283 | |||
2289 | def fspath(name, root): |
|
2284 | def fspath(name: bytes, root: bytes) -> bytes: | |
2290 | # type: (bytes, bytes) -> bytes |
|
|||
2291 | """Get name in the case stored in the filesystem |
|
2285 | """Get name in the case stored in the filesystem | |
2292 |
|
2286 | |||
2293 | The name should be relative to root, and be normcase-ed for efficiency. |
|
2287 | The name should be relative to root, and be normcase-ed for efficiency. | |
@@ -2331,8 +2325,7 b' def fspath(name, root):' | |||||
2331 | return b''.join(result) |
|
2325 | return b''.join(result) | |
2332 |
|
2326 | |||
2333 |
|
2327 | |||
2334 | def checknlink(testfile): |
|
2328 | def checknlink(testfile: bytes) -> bool: | |
2335 | # type: (bytes) -> bool |
|
|||
2336 | '''check whether hardlink count reporting works properly''' |
|
2329 | '''check whether hardlink count reporting works properly''' | |
2337 |
|
2330 | |||
2338 | # testfile may be open, so we need a separate file for checking to |
|
2331 | # testfile may be open, so we need a separate file for checking to | |
@@ -2365,8 +2358,7 b' def checknlink(testfile):' | |||||
2365 | pass |
|
2358 | pass | |
2366 |
|
2359 | |||
2367 |
|
2360 | |||
2368 | def endswithsep(path): |
|
2361 | def endswithsep(path: bytes) -> bool: | |
2369 | # type: (bytes) -> bool |
|
|||
2370 | '''Check path ends with os.sep or os.altsep.''' |
|
2362 | '''Check path ends with os.sep or os.altsep.''' | |
2371 | return bool( # help pytype |
|
2363 | return bool( # help pytype | |
2372 | path.endswith(pycompat.ossep) |
|
2364 | path.endswith(pycompat.ossep) | |
@@ -2375,8 +2367,7 b' def endswithsep(path):' | |||||
2375 | ) |
|
2367 | ) | |
2376 |
|
2368 | |||
2377 |
|
2369 | |||
2378 | def splitpath(path): |
|
2370 | def splitpath(path: bytes) -> List[bytes]: | |
2379 | # type: (bytes) -> List[bytes] |
|
|||
2380 | """Split path by os.sep. |
|
2371 | """Split path by os.sep. | |
2381 | Note that this function does not use os.altsep because this is |
|
2372 | Note that this function does not use os.altsep because this is | |
2382 | an alternative of simple "xxx.split(os.sep)". |
|
2373 | an alternative of simple "xxx.split(os.sep)". | |
@@ -2609,8 +2600,9 b' def tryrmdir(f):' | |||||
2609 | raise |
|
2600 | raise | |
2610 |
|
2601 | |||
2611 |
|
2602 | |||
2612 | def unlinkpath(f, ignoremissing=False, rmdir=True): |
|
2603 | def unlinkpath( | |
2613 | # type: (bytes, bool, bool) -> None |
|
2604 | f: bytes, ignoremissing: bool = False, rmdir: bool = True | |
|
2605 | ) -> None: | |||
2614 | """unlink and remove the directory if it is empty""" |
|
2606 | """unlink and remove the directory if it is empty""" | |
2615 | if ignoremissing: |
|
2607 | if ignoremissing: | |
2616 | tryunlink(f) |
|
2608 | tryunlink(f) | |
@@ -2624,8 +2616,7 b' def unlinkpath(f, ignoremissing=False, r' | |||||
2624 | pass |
|
2616 | pass | |
2625 |
|
2617 | |||
2626 |
|
2618 | |||
2627 | def tryunlink(f): |
|
2619 | def tryunlink(f: bytes) -> None: | |
2628 | # type: (bytes) -> None |
|
|||
2629 | """Attempt to remove a file, ignoring FileNotFoundError.""" |
|
2620 | """Attempt to remove a file, ignoring FileNotFoundError.""" | |
2630 | try: |
|
2621 | try: | |
2631 | unlink(f) |
|
2622 | unlink(f) | |
@@ -2633,8 +2624,9 b' def tryunlink(f):' | |||||
2633 | pass |
|
2624 | pass | |
2634 |
|
2625 | |||
2635 |
|
2626 | |||
2636 | def makedirs(name, mode=None, notindexed=False): |
|
2627 | def makedirs( | |
2637 |
|
|
2628 | name: bytes, mode: Optional[int] = None, notindexed: bool = False | |
|
2629 | ) -> None: | |||
2638 | """recursive directory creation with parent mode inheritance |
|
2630 | """recursive directory creation with parent mode inheritance | |
2639 |
|
2631 | |||
2640 | Newly created directories are marked as "not to be indexed by |
|
2632 | Newly created directories are marked as "not to be indexed by | |
@@ -2663,20 +2655,17 b' def makedirs(name, mode=None, notindexed' | |||||
2663 | os.chmod(name, mode) |
|
2655 | os.chmod(name, mode) | |
2664 |
|
2656 | |||
2665 |
|
2657 | |||
2666 | def readfile(path): |
|
2658 | def readfile(path: bytes) -> bytes: | |
2667 | # type: (bytes) -> bytes |
|
|||
2668 | with open(path, b'rb') as fp: |
|
2659 | with open(path, b'rb') as fp: | |
2669 | return fp.read() |
|
2660 | return fp.read() | |
2670 |
|
2661 | |||
2671 |
|
2662 | |||
2672 | def writefile(path, text): |
|
2663 | def writefile(path: bytes, text: bytes) -> None: | |
2673 | # type: (bytes, bytes) -> None |
|
|||
2674 | with open(path, b'wb') as fp: |
|
2664 | with open(path, b'wb') as fp: | |
2675 | fp.write(text) |
|
2665 | fp.write(text) | |
2676 |
|
2666 | |||
2677 |
|
2667 | |||
2678 | def appendfile(path, text): |
|
2668 | def appendfile(path: bytes, text: bytes) -> None: | |
2679 | # type: (bytes, bytes) -> None |
|
|||
2680 | with open(path, b'ab') as fp: |
|
2669 | with open(path, b'ab') as fp: | |
2681 | fp.write(text) |
|
2670 | fp.write(text) | |
2682 |
|
2671 | |||
@@ -2837,8 +2826,7 b' def unitcountfn(*unittable):' | |||||
2837 | return go |
|
2826 | return go | |
2838 |
|
2827 | |||
2839 |
|
2828 | |||
2840 | def processlinerange(fromline, toline): |
|
2829 | def processlinerange(fromline: int, toline: int) -> Tuple[int, int]: | |
2841 | # type: (int, int) -> Tuple[int, int] |
|
|||
2842 | """Check that linerange <fromline>:<toline> makes sense and return a |
|
2830 | """Check that linerange <fromline>:<toline> makes sense and return a | |
2843 | 0-based range. |
|
2831 | 0-based range. | |
2844 |
|
2832 | |||
@@ -2897,13 +2885,11 b' class transformingwriter:' | |||||
2897 | _eolre = remod.compile(br'\r*\n') |
|
2885 | _eolre = remod.compile(br'\r*\n') | |
2898 |
|
2886 | |||
2899 |
|
2887 | |||
2900 | def tolf(s): |
|
2888 | def tolf(s: bytes) -> bytes: | |
2901 | # type: (bytes) -> bytes |
|
|||
2902 | return _eolre.sub(b'\n', s) |
|
2889 | return _eolre.sub(b'\n', s) | |
2903 |
|
2890 | |||
2904 |
|
2891 | |||
2905 | def tocrlf(s): |
|
2892 | def tocrlf(s: bytes) -> bytes: | |
2906 | # type: (bytes) -> bytes |
|
|||
2907 | return _eolre.sub(b'\r\n', s) |
|
2893 | return _eolre.sub(b'\r\n', s) | |
2908 |
|
2894 | |||
2909 |
|
2895 | |||
@@ -2926,15 +2912,13 b' def iterfile(fp):' | |||||
2926 | return fp |
|
2912 | return fp | |
2927 |
|
2913 | |||
2928 |
|
2914 | |||
2929 | def iterlines(iterator): |
|
2915 | def iterlines(iterator: Iterable[bytes]) -> Iterator[bytes]: | |
2930 | # type: (Iterable[bytes]) -> Iterator[bytes] |
|
|||
2931 | for chunk in iterator: |
|
2916 | for chunk in iterator: | |
2932 | for line in chunk.splitlines(): |
|
2917 | for line in chunk.splitlines(): | |
2933 | yield line |
|
2918 | yield line | |
2934 |
|
2919 | |||
2935 |
|
2920 | |||
2936 | def expandpath(path): |
|
2921 | def expandpath(path: bytes) -> bytes: | |
2937 | # type: (bytes) -> bytes |
|
|||
2938 | return os.path.expanduser(os.path.expandvars(path)) |
|
2922 | return os.path.expanduser(os.path.expandvars(path)) | |
2939 |
|
2923 | |||
2940 |
|
2924 | |||
@@ -3062,8 +3046,7 b' def timed(func):' | |||||
3062 | ) |
|
3046 | ) | |
3063 |
|
3047 | |||
3064 |
|
3048 | |||
3065 | def sizetoint(s): |
|
3049 | def sizetoint(s: bytes) -> int: | |
3066 | # type: (bytes) -> int |
|
|||
3067 | """Convert a space specifier to a byte count. |
|
3050 | """Convert a space specifier to a byte count. | |
3068 |
|
3051 | |||
3069 | >>> sizetoint(b'30') |
|
3052 | >>> sizetoint(b'30') | |
@@ -3285,8 +3268,7 b' def with_lc_ctype():' | |||||
3285 | yield |
|
3268 | yield | |
3286 |
|
3269 | |||
3287 |
|
3270 | |||
3288 | def _estimatememory(): |
|
3271 | def _estimatememory() -> Optional[int]: | |
3289 | # type: () -> Optional[int] |
|
|||
3290 | """Provide an estimate for the available system memory in Bytes. |
|
3272 | """Provide an estimate for the available system memory in Bytes. | |
3291 |
|
3273 | |||
3292 | If no estimate can be provided on the platform, returns None. |
|
3274 | If no estimate can be provided on the platform, returns None. |
@@ -81,8 +81,7 b' extendeddateformats = defaultdateformats' | |||||
81 | ) |
|
81 | ) | |
82 |
|
82 | |||
83 |
|
83 | |||
84 | def makedate(timestamp=None): |
|
84 | def makedate(timestamp: Optional[float] = None) -> hgdate: | |
85 | # type: (Optional[float]) -> hgdate |
|
|||
86 | """Return a unix timestamp (or the current time) as a (unixtime, |
|
85 | """Return a unix timestamp (or the current time) as a (unixtime, | |
87 | offset) tuple based off the local timezone.""" |
|
86 | offset) tuple based off the local timezone.""" | |
88 | if timestamp is None: |
|
87 | if timestamp is None: | |
@@ -103,8 +102,10 b' def makedate(timestamp=None):' | |||||
103 | return timestamp, tz |
|
102 | return timestamp, tz | |
104 |
|
103 | |||
105 |
|
104 | |||
106 | def datestr(date=None, format=b'%a %b %d %H:%M:%S %Y %1%2'): |
|
105 | def datestr( | |
107 |
|
|
106 | date: Optional[hgdate] = None, | |
|
107 | format: bytes = b'%a %b %d %H:%M:%S %Y %1%2', | |||
|
108 | ) -> bytes: | |||
108 | """represent a (unixtime, offset) tuple as a localized time. |
|
109 | """represent a (unixtime, offset) tuple as a localized time. | |
109 | unixtime is seconds since the epoch, and offset is the time zone's |
|
110 | unixtime is seconds since the epoch, and offset is the time zone's | |
110 | number of seconds away from UTC. |
|
111 | number of seconds away from UTC. | |
@@ -141,14 +142,12 b" def datestr(date=None, format=b'%a %b %d" | |||||
141 | return s |
|
142 | return s | |
142 |
|
143 | |||
143 |
|
144 | |||
144 | def shortdate(date=None): |
|
145 | def shortdate(date: Optional[hgdate] = None) -> bytes: | |
145 | # type: (Optional[hgdate]) -> bytes |
|
|||
146 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" |
|
146 | """turn (timestamp, tzoff) tuple into iso 8631 date.""" | |
147 | return datestr(date, format=b'%Y-%m-%d') |
|
147 | return datestr(date, format=b'%Y-%m-%d') | |
148 |
|
148 | |||
149 |
|
149 | |||
150 | def parsetimezone(s): |
|
150 | def parsetimezone(s: bytes) -> Tuple[Optional[int], bytes]: | |
151 | # type: (bytes) -> Tuple[Optional[int], bytes] |
|
|||
152 | """find a trailing timezone, if any, in string, and return a |
|
151 | """find a trailing timezone, if any, in string, and return a | |
153 | (offset, remainder) pair""" |
|
152 | (offset, remainder) pair""" | |
154 | s = pycompat.bytestr(s) |
|
153 | s = pycompat.bytestr(s) | |
@@ -183,8 +182,11 b' def parsetimezone(s):' | |||||
183 | return None, s |
|
182 | return None, s | |
184 |
|
183 | |||
185 |
|
184 | |||
186 | def strdate(string, format, defaults=None): |
|
185 | def strdate( | |
187 | # type: (bytes, bytes, Optional[Dict[bytes, Tuple[bytes, bytes]]]) -> hgdate |
|
186 | string: bytes, | |
|
187 | format: bytes, | |||
|
188 | defaults: Optional[Dict[bytes, Tuple[bytes, bytes]]] = None, | |||
|
189 | ) -> hgdate: | |||
188 | """parse a localized time string and return a (unixtime, offset) tuple. |
|
190 | """parse a localized time string and return a (unixtime, offset) tuple. | |
189 | if the string cannot be parsed, ValueError is raised.""" |
|
191 | if the string cannot be parsed, ValueError is raised.""" | |
190 | if defaults is None: |
|
192 | if defaults is None: | |
@@ -226,8 +228,11 b' def strdate(string, format, defaults=Non' | |||||
226 | return unixtime, offset |
|
228 | return unixtime, offset | |
227 |
|
229 | |||
228 |
|
230 | |||
229 | def parsedate(date, formats=None, bias=None): |
|
231 | def parsedate( | |
230 | # type: (Union[bytes, hgdate], Optional[Iterable[bytes]], Optional[Dict[bytes, bytes]]) -> hgdate |
|
232 | date: Union[bytes, hgdate], | |
|
233 | formats: Optional[Iterable[bytes]] = None, | |||
|
234 | bias: Optional[Dict[bytes, bytes]] = None, | |||
|
235 | ) -> hgdate: | |||
231 | """parse a localized date/time and return a (unixtime, offset) tuple. |
|
236 | """parse a localized date/time and return a (unixtime, offset) tuple. | |
232 |
|
237 | |||
233 | The date may be a "unixtime offset" string or in one of the specified |
|
238 | The date may be a "unixtime offset" string or in one of the specified | |
@@ -316,8 +321,7 b' def parsedate(date, formats=None, bias=N' | |||||
316 | return when, offset |
|
321 | return when, offset | |
317 |
|
322 | |||
318 |
|
323 | |||
319 | def matchdate(date): |
|
324 | def matchdate(date: bytes) -> Callable[[float], bool]: | |
320 | # type: (bytes) -> Callable[[float], bool] |
|
|||
321 | """Return a function that matches a given date match specifier |
|
325 | """Return a function that matches a given date match specifier | |
322 |
|
326 | |||
323 | Formats include: |
|
327 | Formats include: | |
@@ -346,13 +350,11 b' def matchdate(date):' | |||||
346 | False |
|
350 | False | |
347 | """ |
|
351 | """ | |
348 |
|
352 | |||
349 | def lower(date): |
|
353 | def lower(date: bytes) -> float: | |
350 | # type: (bytes) -> float |
|
|||
351 | d = {b'mb': b"1", b'd': b"1"} |
|
354 | d = {b'mb': b"1", b'd': b"1"} | |
352 | return parsedate(date, extendeddateformats, d)[0] |
|
355 | return parsedate(date, extendeddateformats, d)[0] | |
353 |
|
356 | |||
354 | def upper(date): |
|
357 | def upper(date: bytes) -> float: | |
355 | # type: (bytes) -> float |
|
|||
356 | d = {b'mb': b"12", b'HI': b"23", b'M': b"59", b'S': b"59"} |
|
358 | d = {b'mb': b"12", b'HI': b"23", b'M': b"59", b'S': b"59"} | |
357 | for days in (b"31", b"30", b"29"): |
|
359 | for days in (b"31", b"30", b"29"): | |
358 | try: |
|
360 | try: |
@@ -34,8 +34,7 b' assert [Union]' | |||||
34 | urlreq = urllibcompat.urlreq |
|
34 | urlreq = urllibcompat.urlreq | |
35 |
|
35 | |||
36 |
|
36 | |||
37 | def getport(port): |
|
37 | def getport(port: Union[bytes, int]) -> int: | |
38 | # type: (Union[bytes, int]) -> int |
|
|||
39 | """Return the port for a given network service. |
|
38 | """Return the port for a given network service. | |
40 |
|
39 | |||
41 | If port is an integer, it's returned as is. If it's a string, it's |
|
40 | If port is an integer, it's returned as is. If it's a string, it's | |
@@ -133,8 +132,12 b' class url:' | |||||
133 | _safepchars = b"/!~*'()+:\\" |
|
132 | _safepchars = b"/!~*'()+:\\" | |
134 | _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match |
|
133 | _matchscheme = remod.compile(b'^[a-zA-Z0-9+.\\-]+:').match | |
135 |
|
134 | |||
136 | def __init__(self, path, parsequery=True, parsefragment=True): |
|
135 | def __init__( | |
137 | # type: (bytes, bool, bool) -> None |
|
136 | self, | |
|
137 | path: bytes, | |||
|
138 | parsequery: bool = True, | |||
|
139 | parsefragment: bool = True, | |||
|
140 | ) -> None: | |||
138 | # We slowly chomp away at path until we have only the path left |
|
141 | # We slowly chomp away at path until we have only the path left | |
139 | self.scheme = self.user = self.passwd = self.host = None |
|
142 | self.scheme = self.user = self.passwd = self.host = None | |
140 | self.port = self.path = self.query = self.fragment = None |
|
143 | self.port = self.path = self.query = self.fragment = None | |
@@ -378,8 +381,7 b' class url:' | |||||
378 | return True # POSIX-style |
|
381 | return True # POSIX-style | |
379 | return False |
|
382 | return False | |
380 |
|
383 | |||
381 | def localpath(self): |
|
384 | def localpath(self) -> bytes: | |
382 | # type: () -> bytes |
|
|||
383 | if self.scheme == b'file' or self.scheme == b'bundle': |
|
385 | if self.scheme == b'file' or self.scheme == b'bundle': | |
384 | path = self.path or b'/' |
|
386 | path = self.path or b'/' | |
385 | # For Windows, we need to promote hosts containing drive |
|
387 | # For Windows, we need to promote hosts containing drive | |
@@ -402,23 +404,19 b' class url:' | |||||
402 | ) |
|
404 | ) | |
403 |
|
405 | |||
404 |
|
406 | |||
405 | def hasscheme(path): |
|
407 | def hasscheme(path: bytes) -> bool: | |
406 | # type: (bytes) -> bool |
|
|||
407 | return bool(url(path).scheme) # cast to help pytype |
|
408 | return bool(url(path).scheme) # cast to help pytype | |
408 |
|
409 | |||
409 |
|
410 | |||
410 | def hasdriveletter(path): |
|
411 | def hasdriveletter(path: bytes) -> bool: | |
411 | # type: (bytes) -> bool |
|
|||
412 | return bool(path) and path[1:2] == b':' and path[0:1].isalpha() |
|
412 | return bool(path) and path[1:2] == b':' and path[0:1].isalpha() | |
413 |
|
413 | |||
414 |
|
414 | |||
415 | def urllocalpath(path): |
|
415 | def urllocalpath(path: bytes) -> bytes: | |
416 | # type: (bytes) -> bytes |
|
|||
417 | return url(path, parsequery=False, parsefragment=False).localpath() |
|
416 | return url(path, parsequery=False, parsefragment=False).localpath() | |
418 |
|
417 | |||
419 |
|
418 | |||
420 | def checksafessh(path): |
|
419 | def checksafessh(path: bytes) -> None: | |
421 | # type: (bytes) -> None |
|
|||
422 | """check if a path / url is a potentially unsafe ssh exploit (SEC) |
|
420 | """check if a path / url is a potentially unsafe ssh exploit (SEC) | |
423 |
|
421 | |||
424 | This is a sanity check for ssh urls. ssh will parse the first item as |
|
422 | This is a sanity check for ssh urls. ssh will parse the first item as | |
@@ -435,8 +433,7 b' def checksafessh(path):' | |||||
435 | ) |
|
433 | ) | |
436 |
|
434 | |||
437 |
|
435 | |||
438 | def hidepassword(u): |
|
436 | def hidepassword(u: bytes) -> bytes: | |
439 | # type: (bytes) -> bytes |
|
|||
440 | '''hide user credential in a url string''' |
|
437 | '''hide user credential in a url string''' | |
441 | u = url(u) |
|
438 | u = url(u) | |
442 | if u.passwd: |
|
439 | if u.passwd: | |
@@ -444,8 +441,7 b' def hidepassword(u):' | |||||
444 | return bytes(u) |
|
441 | return bytes(u) | |
445 |
|
442 | |||
446 |
|
443 | |||
447 | def removeauth(u): |
|
444 | def removeauth(u: bytes) -> bytes: | |
448 | # type: (bytes) -> bytes |
|
|||
449 | '''remove all authentication information from a url string''' |
|
445 | '''remove all authentication information from a url string''' | |
450 | u = url(u) |
|
446 | u = url(u) | |
451 | u.user = u.passwd = None |
|
447 | u.user = u.passwd = None |
General Comments 0
You need to be logged in to leave comments.
Login now