##// END OF EJS Templates
hardlink: duplicate hardlink detection for copying files and directories...
Jun Wu -
r31719:456efd1b default
parent child Browse files
Show More
@@ -1,3632 +1,3636 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import os
27 import os
28 import platform as pyplatform
28 import platform as pyplatform
29 import re as remod
29 import re as remod
30 import shutil
30 import shutil
31 import signal
31 import signal
32 import socket
32 import socket
33 import stat
33 import stat
34 import string
34 import string
35 import subprocess
35 import subprocess
36 import sys
36 import sys
37 import tempfile
37 import tempfile
38 import textwrap
38 import textwrap
39 import time
39 import time
40 import traceback
40 import traceback
41 import zlib
41 import zlib
42
42
43 from . import (
43 from . import (
44 encoding,
44 encoding,
45 error,
45 error,
46 i18n,
46 i18n,
47 osutil,
47 osutil,
48 parsers,
48 parsers,
49 pycompat,
49 pycompat,
50 )
50 )
51
51
52 empty = pycompat.empty
52 empty = pycompat.empty
53 httplib = pycompat.httplib
53 httplib = pycompat.httplib
54 httpserver = pycompat.httpserver
54 httpserver = pycompat.httpserver
55 pickle = pycompat.pickle
55 pickle = pycompat.pickle
56 queue = pycompat.queue
56 queue = pycompat.queue
57 socketserver = pycompat.socketserver
57 socketserver = pycompat.socketserver
58 stderr = pycompat.stderr
58 stderr = pycompat.stderr
59 stdin = pycompat.stdin
59 stdin = pycompat.stdin
60 stdout = pycompat.stdout
60 stdout = pycompat.stdout
61 stringio = pycompat.stringio
61 stringio = pycompat.stringio
62 urlerr = pycompat.urlerr
62 urlerr = pycompat.urlerr
63 urlreq = pycompat.urlreq
63 urlreq = pycompat.urlreq
64 xmlrpclib = pycompat.xmlrpclib
64 xmlrpclib = pycompat.xmlrpclib
65
65
66 def isatty(fp):
66 def isatty(fp):
67 try:
67 try:
68 return fp.isatty()
68 return fp.isatty()
69 except AttributeError:
69 except AttributeError:
70 return False
70 return False
71
71
72 # glibc determines buffering on first write to stdout - if we replace a TTY
72 # glibc determines buffering on first write to stdout - if we replace a TTY
73 # destined stdout with a pipe destined stdout (e.g. pager), we want line
73 # destined stdout with a pipe destined stdout (e.g. pager), we want line
74 # buffering
74 # buffering
75 if isatty(stdout):
75 if isatty(stdout):
76 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
76 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
77
77
78 if pycompat.osname == 'nt':
78 if pycompat.osname == 'nt':
79 from . import windows as platform
79 from . import windows as platform
80 stdout = platform.winstdout(stdout)
80 stdout = platform.winstdout(stdout)
81 else:
81 else:
82 from . import posix as platform
82 from . import posix as platform
83
83
84 _ = i18n._
84 _ = i18n._
85
85
86 bindunixsocket = platform.bindunixsocket
86 bindunixsocket = platform.bindunixsocket
87 cachestat = platform.cachestat
87 cachestat = platform.cachestat
88 checkexec = platform.checkexec
88 checkexec = platform.checkexec
89 checklink = platform.checklink
89 checklink = platform.checklink
90 copymode = platform.copymode
90 copymode = platform.copymode
91 executablepath = platform.executablepath
91 executablepath = platform.executablepath
92 expandglobs = platform.expandglobs
92 expandglobs = platform.expandglobs
93 explainexit = platform.explainexit
93 explainexit = platform.explainexit
94 findexe = platform.findexe
94 findexe = platform.findexe
95 gethgcmd = platform.gethgcmd
95 gethgcmd = platform.gethgcmd
96 getuser = platform.getuser
96 getuser = platform.getuser
97 getpid = os.getpid
97 getpid = os.getpid
98 groupmembers = platform.groupmembers
98 groupmembers = platform.groupmembers
99 groupname = platform.groupname
99 groupname = platform.groupname
100 hidewindow = platform.hidewindow
100 hidewindow = platform.hidewindow
101 isexec = platform.isexec
101 isexec = platform.isexec
102 isowner = platform.isowner
102 isowner = platform.isowner
103 localpath = platform.localpath
103 localpath = platform.localpath
104 lookupreg = platform.lookupreg
104 lookupreg = platform.lookupreg
105 makedir = platform.makedir
105 makedir = platform.makedir
106 nlinks = platform.nlinks
106 nlinks = platform.nlinks
107 normpath = platform.normpath
107 normpath = platform.normpath
108 normcase = platform.normcase
108 normcase = platform.normcase
109 normcasespec = platform.normcasespec
109 normcasespec = platform.normcasespec
110 normcasefallback = platform.normcasefallback
110 normcasefallback = platform.normcasefallback
111 openhardlinks = platform.openhardlinks
111 openhardlinks = platform.openhardlinks
112 oslink = platform.oslink
112 oslink = platform.oslink
113 parsepatchoutput = platform.parsepatchoutput
113 parsepatchoutput = platform.parsepatchoutput
114 pconvert = platform.pconvert
114 pconvert = platform.pconvert
115 poll = platform.poll
115 poll = platform.poll
116 popen = platform.popen
116 popen = platform.popen
117 posixfile = platform.posixfile
117 posixfile = platform.posixfile
118 quotecommand = platform.quotecommand
118 quotecommand = platform.quotecommand
119 readpipe = platform.readpipe
119 readpipe = platform.readpipe
120 rename = platform.rename
120 rename = platform.rename
121 removedirs = platform.removedirs
121 removedirs = platform.removedirs
122 samedevice = platform.samedevice
122 samedevice = platform.samedevice
123 samefile = platform.samefile
123 samefile = platform.samefile
124 samestat = platform.samestat
124 samestat = platform.samestat
125 setbinary = platform.setbinary
125 setbinary = platform.setbinary
126 setflags = platform.setflags
126 setflags = platform.setflags
127 setsignalhandler = platform.setsignalhandler
127 setsignalhandler = platform.setsignalhandler
128 shellquote = platform.shellquote
128 shellquote = platform.shellquote
129 spawndetached = platform.spawndetached
129 spawndetached = platform.spawndetached
130 split = platform.split
130 split = platform.split
131 sshargs = platform.sshargs
131 sshargs = platform.sshargs
132 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
132 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
133 statisexec = platform.statisexec
133 statisexec = platform.statisexec
134 statislink = platform.statislink
134 statislink = platform.statislink
135 testpid = platform.testpid
135 testpid = platform.testpid
136 umask = platform.umask
136 umask = platform.umask
137 unlink = platform.unlink
137 unlink = platform.unlink
138 username = platform.username
138 username = platform.username
139
139
140 # Python compatibility
140 # Python compatibility
141
141
142 _notset = object()
142 _notset = object()
143
143
144 # disable Python's problematic floating point timestamps (issue4836)
144 # disable Python's problematic floating point timestamps (issue4836)
145 # (Python hypocritically says you shouldn't change this behavior in
145 # (Python hypocritically says you shouldn't change this behavior in
146 # libraries, and sure enough Mercurial is not a library.)
146 # libraries, and sure enough Mercurial is not a library.)
147 os.stat_float_times(False)
147 os.stat_float_times(False)
148
148
149 def safehasattr(thing, attr):
149 def safehasattr(thing, attr):
150 return getattr(thing, attr, _notset) is not _notset
150 return getattr(thing, attr, _notset) is not _notset
151
151
152 def bitsfrom(container):
152 def bitsfrom(container):
153 bits = 0
153 bits = 0
154 for bit in container:
154 for bit in container:
155 bits |= bit
155 bits |= bit
156 return bits
156 return bits
157
157
158 DIGESTS = {
158 DIGESTS = {
159 'md5': hashlib.md5,
159 'md5': hashlib.md5,
160 'sha1': hashlib.sha1,
160 'sha1': hashlib.sha1,
161 'sha512': hashlib.sha512,
161 'sha512': hashlib.sha512,
162 }
162 }
163 # List of digest types from strongest to weakest
163 # List of digest types from strongest to weakest
164 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
164 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
165
165
166 for k in DIGESTS_BY_STRENGTH:
166 for k in DIGESTS_BY_STRENGTH:
167 assert k in DIGESTS
167 assert k in DIGESTS
168
168
169 class digester(object):
169 class digester(object):
170 """helper to compute digests.
170 """helper to compute digests.
171
171
172 This helper can be used to compute one or more digests given their name.
172 This helper can be used to compute one or more digests given their name.
173
173
174 >>> d = digester(['md5', 'sha1'])
174 >>> d = digester(['md5', 'sha1'])
175 >>> d.update('foo')
175 >>> d.update('foo')
176 >>> [k for k in sorted(d)]
176 >>> [k for k in sorted(d)]
177 ['md5', 'sha1']
177 ['md5', 'sha1']
178 >>> d['md5']
178 >>> d['md5']
179 'acbd18db4cc2f85cedef654fccc4a4d8'
179 'acbd18db4cc2f85cedef654fccc4a4d8'
180 >>> d['sha1']
180 >>> d['sha1']
181 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
181 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
182 >>> digester.preferred(['md5', 'sha1'])
182 >>> digester.preferred(['md5', 'sha1'])
183 'sha1'
183 'sha1'
184 """
184 """
185
185
186 def __init__(self, digests, s=''):
186 def __init__(self, digests, s=''):
187 self._hashes = {}
187 self._hashes = {}
188 for k in digests:
188 for k in digests:
189 if k not in DIGESTS:
189 if k not in DIGESTS:
190 raise Abort(_('unknown digest type: %s') % k)
190 raise Abort(_('unknown digest type: %s') % k)
191 self._hashes[k] = DIGESTS[k]()
191 self._hashes[k] = DIGESTS[k]()
192 if s:
192 if s:
193 self.update(s)
193 self.update(s)
194
194
195 def update(self, data):
195 def update(self, data):
196 for h in self._hashes.values():
196 for h in self._hashes.values():
197 h.update(data)
197 h.update(data)
198
198
199 def __getitem__(self, key):
199 def __getitem__(self, key):
200 if key not in DIGESTS:
200 if key not in DIGESTS:
201 raise Abort(_('unknown digest type: %s') % k)
201 raise Abort(_('unknown digest type: %s') % k)
202 return self._hashes[key].hexdigest()
202 return self._hashes[key].hexdigest()
203
203
204 def __iter__(self):
204 def __iter__(self):
205 return iter(self._hashes)
205 return iter(self._hashes)
206
206
207 @staticmethod
207 @staticmethod
208 def preferred(supported):
208 def preferred(supported):
209 """returns the strongest digest type in both supported and DIGESTS."""
209 """returns the strongest digest type in both supported and DIGESTS."""
210
210
211 for k in DIGESTS_BY_STRENGTH:
211 for k in DIGESTS_BY_STRENGTH:
212 if k in supported:
212 if k in supported:
213 return k
213 return k
214 return None
214 return None
215
215
216 class digestchecker(object):
216 class digestchecker(object):
217 """file handle wrapper that additionally checks content against a given
217 """file handle wrapper that additionally checks content against a given
218 size and digests.
218 size and digests.
219
219
220 d = digestchecker(fh, size, {'md5': '...'})
220 d = digestchecker(fh, size, {'md5': '...'})
221
221
222 When multiple digests are given, all of them are validated.
222 When multiple digests are given, all of them are validated.
223 """
223 """
224
224
225 def __init__(self, fh, size, digests):
225 def __init__(self, fh, size, digests):
226 self._fh = fh
226 self._fh = fh
227 self._size = size
227 self._size = size
228 self._got = 0
228 self._got = 0
229 self._digests = dict(digests)
229 self._digests = dict(digests)
230 self._digester = digester(self._digests.keys())
230 self._digester = digester(self._digests.keys())
231
231
232 def read(self, length=-1):
232 def read(self, length=-1):
233 content = self._fh.read(length)
233 content = self._fh.read(length)
234 self._digester.update(content)
234 self._digester.update(content)
235 self._got += len(content)
235 self._got += len(content)
236 return content
236 return content
237
237
238 def validate(self):
238 def validate(self):
239 if self._size != self._got:
239 if self._size != self._got:
240 raise Abort(_('size mismatch: expected %d, got %d') %
240 raise Abort(_('size mismatch: expected %d, got %d') %
241 (self._size, self._got))
241 (self._size, self._got))
242 for k, v in self._digests.items():
242 for k, v in self._digests.items():
243 if v != self._digester[k]:
243 if v != self._digester[k]:
244 # i18n: first parameter is a digest name
244 # i18n: first parameter is a digest name
245 raise Abort(_('%s mismatch: expected %s, got %s') %
245 raise Abort(_('%s mismatch: expected %s, got %s') %
246 (k, v, self._digester[k]))
246 (k, v, self._digester[k]))
247
247
248 try:
248 try:
249 buffer = buffer
249 buffer = buffer
250 except NameError:
250 except NameError:
251 if not pycompat.ispy3:
251 if not pycompat.ispy3:
252 def buffer(sliceable, offset=0, length=None):
252 def buffer(sliceable, offset=0, length=None):
253 if length is not None:
253 if length is not None:
254 return sliceable[offset:offset + length]
254 return sliceable[offset:offset + length]
255 return sliceable[offset:]
255 return sliceable[offset:]
256 else:
256 else:
257 def buffer(sliceable, offset=0, length=None):
257 def buffer(sliceable, offset=0, length=None):
258 if length is not None:
258 if length is not None:
259 return memoryview(sliceable)[offset:offset + length]
259 return memoryview(sliceable)[offset:offset + length]
260 return memoryview(sliceable)[offset:]
260 return memoryview(sliceable)[offset:]
261
261
262 closefds = pycompat.osname == 'posix'
262 closefds = pycompat.osname == 'posix'
263
263
264 _chunksize = 4096
264 _chunksize = 4096
265
265
266 class bufferedinputpipe(object):
266 class bufferedinputpipe(object):
267 """a manually buffered input pipe
267 """a manually buffered input pipe
268
268
269 Python will not let us use buffered IO and lazy reading with 'polling' at
269 Python will not let us use buffered IO and lazy reading with 'polling' at
270 the same time. We cannot probe the buffer state and select will not detect
270 the same time. We cannot probe the buffer state and select will not detect
271 that data are ready to read if they are already buffered.
271 that data are ready to read if they are already buffered.
272
272
273 This class let us work around that by implementing its own buffering
273 This class let us work around that by implementing its own buffering
274 (allowing efficient readline) while offering a way to know if the buffer is
274 (allowing efficient readline) while offering a way to know if the buffer is
275 empty from the output (allowing collaboration of the buffer with polling).
275 empty from the output (allowing collaboration of the buffer with polling).
276
276
277 This class lives in the 'util' module because it makes use of the 'os'
277 This class lives in the 'util' module because it makes use of the 'os'
278 module from the python stdlib.
278 module from the python stdlib.
279 """
279 """
280
280
281 def __init__(self, input):
281 def __init__(self, input):
282 self._input = input
282 self._input = input
283 self._buffer = []
283 self._buffer = []
284 self._eof = False
284 self._eof = False
285 self._lenbuf = 0
285 self._lenbuf = 0
286
286
287 @property
287 @property
288 def hasbuffer(self):
288 def hasbuffer(self):
289 """True is any data is currently buffered
289 """True is any data is currently buffered
290
290
291 This will be used externally a pre-step for polling IO. If there is
291 This will be used externally a pre-step for polling IO. If there is
292 already data then no polling should be set in place."""
292 already data then no polling should be set in place."""
293 return bool(self._buffer)
293 return bool(self._buffer)
294
294
295 @property
295 @property
296 def closed(self):
296 def closed(self):
297 return self._input.closed
297 return self._input.closed
298
298
299 def fileno(self):
299 def fileno(self):
300 return self._input.fileno()
300 return self._input.fileno()
301
301
302 def close(self):
302 def close(self):
303 return self._input.close()
303 return self._input.close()
304
304
305 def read(self, size):
305 def read(self, size):
306 while (not self._eof) and (self._lenbuf < size):
306 while (not self._eof) and (self._lenbuf < size):
307 self._fillbuffer()
307 self._fillbuffer()
308 return self._frombuffer(size)
308 return self._frombuffer(size)
309
309
310 def readline(self, *args, **kwargs):
310 def readline(self, *args, **kwargs):
311 if 1 < len(self._buffer):
311 if 1 < len(self._buffer):
312 # this should not happen because both read and readline end with a
312 # this should not happen because both read and readline end with a
313 # _frombuffer call that collapse it.
313 # _frombuffer call that collapse it.
314 self._buffer = [''.join(self._buffer)]
314 self._buffer = [''.join(self._buffer)]
315 self._lenbuf = len(self._buffer[0])
315 self._lenbuf = len(self._buffer[0])
316 lfi = -1
316 lfi = -1
317 if self._buffer:
317 if self._buffer:
318 lfi = self._buffer[-1].find('\n')
318 lfi = self._buffer[-1].find('\n')
319 while (not self._eof) and lfi < 0:
319 while (not self._eof) and lfi < 0:
320 self._fillbuffer()
320 self._fillbuffer()
321 if self._buffer:
321 if self._buffer:
322 lfi = self._buffer[-1].find('\n')
322 lfi = self._buffer[-1].find('\n')
323 size = lfi + 1
323 size = lfi + 1
324 if lfi < 0: # end of file
324 if lfi < 0: # end of file
325 size = self._lenbuf
325 size = self._lenbuf
326 elif 1 < len(self._buffer):
326 elif 1 < len(self._buffer):
327 # we need to take previous chunks into account
327 # we need to take previous chunks into account
328 size += self._lenbuf - len(self._buffer[-1])
328 size += self._lenbuf - len(self._buffer[-1])
329 return self._frombuffer(size)
329 return self._frombuffer(size)
330
330
331 def _frombuffer(self, size):
331 def _frombuffer(self, size):
332 """return at most 'size' data from the buffer
332 """return at most 'size' data from the buffer
333
333
334 The data are removed from the buffer."""
334 The data are removed from the buffer."""
335 if size == 0 or not self._buffer:
335 if size == 0 or not self._buffer:
336 return ''
336 return ''
337 buf = self._buffer[0]
337 buf = self._buffer[0]
338 if 1 < len(self._buffer):
338 if 1 < len(self._buffer):
339 buf = ''.join(self._buffer)
339 buf = ''.join(self._buffer)
340
340
341 data = buf[:size]
341 data = buf[:size]
342 buf = buf[len(data):]
342 buf = buf[len(data):]
343 if buf:
343 if buf:
344 self._buffer = [buf]
344 self._buffer = [buf]
345 self._lenbuf = len(buf)
345 self._lenbuf = len(buf)
346 else:
346 else:
347 self._buffer = []
347 self._buffer = []
348 self._lenbuf = 0
348 self._lenbuf = 0
349 return data
349 return data
350
350
351 def _fillbuffer(self):
351 def _fillbuffer(self):
352 """read data to the buffer"""
352 """read data to the buffer"""
353 data = os.read(self._input.fileno(), _chunksize)
353 data = os.read(self._input.fileno(), _chunksize)
354 if not data:
354 if not data:
355 self._eof = True
355 self._eof = True
356 else:
356 else:
357 self._lenbuf += len(data)
357 self._lenbuf += len(data)
358 self._buffer.append(data)
358 self._buffer.append(data)
359
359
360 def popen2(cmd, env=None, newlines=False):
360 def popen2(cmd, env=None, newlines=False):
361 # Setting bufsize to -1 lets the system decide the buffer size.
361 # Setting bufsize to -1 lets the system decide the buffer size.
362 # The default for bufsize is 0, meaning unbuffered. This leads to
362 # The default for bufsize is 0, meaning unbuffered. This leads to
363 # poor performance on Mac OS X: http://bugs.python.org/issue4194
363 # poor performance on Mac OS X: http://bugs.python.org/issue4194
364 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
364 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
365 close_fds=closefds,
365 close_fds=closefds,
366 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
366 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
367 universal_newlines=newlines,
367 universal_newlines=newlines,
368 env=env)
368 env=env)
369 return p.stdin, p.stdout
369 return p.stdin, p.stdout
370
370
371 def popen3(cmd, env=None, newlines=False):
371 def popen3(cmd, env=None, newlines=False):
372 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
372 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
373 return stdin, stdout, stderr
373 return stdin, stdout, stderr
374
374
375 def popen4(cmd, env=None, newlines=False, bufsize=-1):
375 def popen4(cmd, env=None, newlines=False, bufsize=-1):
376 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
376 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
377 close_fds=closefds,
377 close_fds=closefds,
378 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
378 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
379 stderr=subprocess.PIPE,
379 stderr=subprocess.PIPE,
380 universal_newlines=newlines,
380 universal_newlines=newlines,
381 env=env)
381 env=env)
382 return p.stdin, p.stdout, p.stderr, p
382 return p.stdin, p.stdout, p.stderr, p
383
383
384 def version():
384 def version():
385 """Return version information if available."""
385 """Return version information if available."""
386 try:
386 try:
387 from . import __version__
387 from . import __version__
388 return __version__.version
388 return __version__.version
389 except ImportError:
389 except ImportError:
390 return 'unknown'
390 return 'unknown'
391
391
392 def versiontuple(v=None, n=4):
392 def versiontuple(v=None, n=4):
393 """Parses a Mercurial version string into an N-tuple.
393 """Parses a Mercurial version string into an N-tuple.
394
394
395 The version string to be parsed is specified with the ``v`` argument.
395 The version string to be parsed is specified with the ``v`` argument.
396 If it isn't defined, the current Mercurial version string will be parsed.
396 If it isn't defined, the current Mercurial version string will be parsed.
397
397
398 ``n`` can be 2, 3, or 4. Here is how some version strings map to
398 ``n`` can be 2, 3, or 4. Here is how some version strings map to
399 returned values:
399 returned values:
400
400
401 >>> v = '3.6.1+190-df9b73d2d444'
401 >>> v = '3.6.1+190-df9b73d2d444'
402 >>> versiontuple(v, 2)
402 >>> versiontuple(v, 2)
403 (3, 6)
403 (3, 6)
404 >>> versiontuple(v, 3)
404 >>> versiontuple(v, 3)
405 (3, 6, 1)
405 (3, 6, 1)
406 >>> versiontuple(v, 4)
406 >>> versiontuple(v, 4)
407 (3, 6, 1, '190-df9b73d2d444')
407 (3, 6, 1, '190-df9b73d2d444')
408
408
409 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
409 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
410 (3, 6, 1, '190-df9b73d2d444+20151118')
410 (3, 6, 1, '190-df9b73d2d444+20151118')
411
411
412 >>> v = '3.6'
412 >>> v = '3.6'
413 >>> versiontuple(v, 2)
413 >>> versiontuple(v, 2)
414 (3, 6)
414 (3, 6)
415 >>> versiontuple(v, 3)
415 >>> versiontuple(v, 3)
416 (3, 6, None)
416 (3, 6, None)
417 >>> versiontuple(v, 4)
417 >>> versiontuple(v, 4)
418 (3, 6, None, None)
418 (3, 6, None, None)
419
419
420 >>> v = '3.9-rc'
420 >>> v = '3.9-rc'
421 >>> versiontuple(v, 2)
421 >>> versiontuple(v, 2)
422 (3, 9)
422 (3, 9)
423 >>> versiontuple(v, 3)
423 >>> versiontuple(v, 3)
424 (3, 9, None)
424 (3, 9, None)
425 >>> versiontuple(v, 4)
425 >>> versiontuple(v, 4)
426 (3, 9, None, 'rc')
426 (3, 9, None, 'rc')
427
427
428 >>> v = '3.9-rc+2-02a8fea4289b'
428 >>> v = '3.9-rc+2-02a8fea4289b'
429 >>> versiontuple(v, 2)
429 >>> versiontuple(v, 2)
430 (3, 9)
430 (3, 9)
431 >>> versiontuple(v, 3)
431 >>> versiontuple(v, 3)
432 (3, 9, None)
432 (3, 9, None)
433 >>> versiontuple(v, 4)
433 >>> versiontuple(v, 4)
434 (3, 9, None, 'rc+2-02a8fea4289b')
434 (3, 9, None, 'rc+2-02a8fea4289b')
435 """
435 """
436 if not v:
436 if not v:
437 v = version()
437 v = version()
438 parts = remod.split('[\+-]', v, 1)
438 parts = remod.split('[\+-]', v, 1)
439 if len(parts) == 1:
439 if len(parts) == 1:
440 vparts, extra = parts[0], None
440 vparts, extra = parts[0], None
441 else:
441 else:
442 vparts, extra = parts
442 vparts, extra = parts
443
443
444 vints = []
444 vints = []
445 for i in vparts.split('.'):
445 for i in vparts.split('.'):
446 try:
446 try:
447 vints.append(int(i))
447 vints.append(int(i))
448 except ValueError:
448 except ValueError:
449 break
449 break
450 # (3, 6) -> (3, 6, None)
450 # (3, 6) -> (3, 6, None)
451 while len(vints) < 3:
451 while len(vints) < 3:
452 vints.append(None)
452 vints.append(None)
453
453
454 if n == 2:
454 if n == 2:
455 return (vints[0], vints[1])
455 return (vints[0], vints[1])
456 if n == 3:
456 if n == 3:
457 return (vints[0], vints[1], vints[2])
457 return (vints[0], vints[1], vints[2])
458 if n == 4:
458 if n == 4:
459 return (vints[0], vints[1], vints[2], extra)
459 return (vints[0], vints[1], vints[2], extra)
460
460
461 # used by parsedate
461 # used by parsedate
462 defaultdateformats = (
462 defaultdateformats = (
463 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
463 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
464 '%Y-%m-%dT%H:%M', # without seconds
464 '%Y-%m-%dT%H:%M', # without seconds
465 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
465 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
466 '%Y-%m-%dT%H%M', # without seconds
466 '%Y-%m-%dT%H%M', # without seconds
467 '%Y-%m-%d %H:%M:%S', # our common legal variant
467 '%Y-%m-%d %H:%M:%S', # our common legal variant
468 '%Y-%m-%d %H:%M', # without seconds
468 '%Y-%m-%d %H:%M', # without seconds
469 '%Y-%m-%d %H%M%S', # without :
469 '%Y-%m-%d %H%M%S', # without :
470 '%Y-%m-%d %H%M', # without seconds
470 '%Y-%m-%d %H%M', # without seconds
471 '%Y-%m-%d %I:%M:%S%p',
471 '%Y-%m-%d %I:%M:%S%p',
472 '%Y-%m-%d %H:%M',
472 '%Y-%m-%d %H:%M',
473 '%Y-%m-%d %I:%M%p',
473 '%Y-%m-%d %I:%M%p',
474 '%Y-%m-%d',
474 '%Y-%m-%d',
475 '%m-%d',
475 '%m-%d',
476 '%m/%d',
476 '%m/%d',
477 '%m/%d/%y',
477 '%m/%d/%y',
478 '%m/%d/%Y',
478 '%m/%d/%Y',
479 '%a %b %d %H:%M:%S %Y',
479 '%a %b %d %H:%M:%S %Y',
480 '%a %b %d %I:%M:%S%p %Y',
480 '%a %b %d %I:%M:%S%p %Y',
481 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
481 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
482 '%b %d %H:%M:%S %Y',
482 '%b %d %H:%M:%S %Y',
483 '%b %d %I:%M:%S%p %Y',
483 '%b %d %I:%M:%S%p %Y',
484 '%b %d %H:%M:%S',
484 '%b %d %H:%M:%S',
485 '%b %d %I:%M:%S%p',
485 '%b %d %I:%M:%S%p',
486 '%b %d %H:%M',
486 '%b %d %H:%M',
487 '%b %d %I:%M%p',
487 '%b %d %I:%M%p',
488 '%b %d %Y',
488 '%b %d %Y',
489 '%b %d',
489 '%b %d',
490 '%H:%M:%S',
490 '%H:%M:%S',
491 '%I:%M:%S%p',
491 '%I:%M:%S%p',
492 '%H:%M',
492 '%H:%M',
493 '%I:%M%p',
493 '%I:%M%p',
494 )
494 )
495
495
496 extendeddateformats = defaultdateformats + (
496 extendeddateformats = defaultdateformats + (
497 "%Y",
497 "%Y",
498 "%Y-%m",
498 "%Y-%m",
499 "%b",
499 "%b",
500 "%b %Y",
500 "%b %Y",
501 )
501 )
502
502
503 def cachefunc(func):
503 def cachefunc(func):
504 '''cache the result of function calls'''
504 '''cache the result of function calls'''
505 # XXX doesn't handle keywords args
505 # XXX doesn't handle keywords args
506 if func.__code__.co_argcount == 0:
506 if func.__code__.co_argcount == 0:
507 cache = []
507 cache = []
508 def f():
508 def f():
509 if len(cache) == 0:
509 if len(cache) == 0:
510 cache.append(func())
510 cache.append(func())
511 return cache[0]
511 return cache[0]
512 return f
512 return f
513 cache = {}
513 cache = {}
514 if func.__code__.co_argcount == 1:
514 if func.__code__.co_argcount == 1:
515 # we gain a small amount of time because
515 # we gain a small amount of time because
516 # we don't need to pack/unpack the list
516 # we don't need to pack/unpack the list
517 def f(arg):
517 def f(arg):
518 if arg not in cache:
518 if arg not in cache:
519 cache[arg] = func(arg)
519 cache[arg] = func(arg)
520 return cache[arg]
520 return cache[arg]
521 else:
521 else:
522 def f(*args):
522 def f(*args):
523 if args not in cache:
523 if args not in cache:
524 cache[args] = func(*args)
524 cache[args] = func(*args)
525 return cache[args]
525 return cache[args]
526
526
527 return f
527 return f
528
528
529 class sortdict(dict):
529 class sortdict(dict):
530 '''a simple sorted dictionary'''
530 '''a simple sorted dictionary'''
531 def __init__(self, data=None):
531 def __init__(self, data=None):
532 self._list = []
532 self._list = []
533 if data:
533 if data:
534 self.update(data)
534 self.update(data)
535 def copy(self):
535 def copy(self):
536 return sortdict(self)
536 return sortdict(self)
537 def __setitem__(self, key, val):
537 def __setitem__(self, key, val):
538 if key in self:
538 if key in self:
539 self._list.remove(key)
539 self._list.remove(key)
540 self._list.append(key)
540 self._list.append(key)
541 dict.__setitem__(self, key, val)
541 dict.__setitem__(self, key, val)
542 def __iter__(self):
542 def __iter__(self):
543 return self._list.__iter__()
543 return self._list.__iter__()
544 def update(self, src):
544 def update(self, src):
545 if isinstance(src, dict):
545 if isinstance(src, dict):
546 src = src.iteritems()
546 src = src.iteritems()
547 for k, v in src:
547 for k, v in src:
548 self[k] = v
548 self[k] = v
549 def clear(self):
549 def clear(self):
550 dict.clear(self)
550 dict.clear(self)
551 self._list = []
551 self._list = []
552 def items(self):
552 def items(self):
553 return [(k, self[k]) for k in self._list]
553 return [(k, self[k]) for k in self._list]
554 def __delitem__(self, key):
554 def __delitem__(self, key):
555 dict.__delitem__(self, key)
555 dict.__delitem__(self, key)
556 self._list.remove(key)
556 self._list.remove(key)
557 def pop(self, key, *args, **kwargs):
557 def pop(self, key, *args, **kwargs):
558 dict.pop(self, key, *args, **kwargs)
558 dict.pop(self, key, *args, **kwargs)
559 try:
559 try:
560 self._list.remove(key)
560 self._list.remove(key)
561 except ValueError:
561 except ValueError:
562 pass
562 pass
563 def keys(self):
563 def keys(self):
564 return self._list[:]
564 return self._list[:]
565 def iterkeys(self):
565 def iterkeys(self):
566 return self._list.__iter__()
566 return self._list.__iter__()
567 def iteritems(self):
567 def iteritems(self):
568 for k in self._list:
568 for k in self._list:
569 yield k, self[k]
569 yield k, self[k]
570 def insert(self, index, key, val):
570 def insert(self, index, key, val):
571 self._list.insert(index, key)
571 self._list.insert(index, key)
572 dict.__setitem__(self, key, val)
572 dict.__setitem__(self, key, val)
573 def __repr__(self):
573 def __repr__(self):
574 if not self:
574 if not self:
575 return '%s()' % self.__class__.__name__
575 return '%s()' % self.__class__.__name__
576 return '%s(%r)' % (self.__class__.__name__, self.items())
576 return '%s(%r)' % (self.__class__.__name__, self.items())
577
577
578 class _lrucachenode(object):
578 class _lrucachenode(object):
579 """A node in a doubly linked list.
579 """A node in a doubly linked list.
580
580
581 Holds a reference to nodes on either side as well as a key-value
581 Holds a reference to nodes on either side as well as a key-value
582 pair for the dictionary entry.
582 pair for the dictionary entry.
583 """
583 """
584 __slots__ = (u'next', u'prev', u'key', u'value')
584 __slots__ = (u'next', u'prev', u'key', u'value')
585
585
586 def __init__(self):
586 def __init__(self):
587 self.next = None
587 self.next = None
588 self.prev = None
588 self.prev = None
589
589
590 self.key = _notset
590 self.key = _notset
591 self.value = None
591 self.value = None
592
592
593 def markempty(self):
593 def markempty(self):
594 """Mark the node as emptied."""
594 """Mark the node as emptied."""
595 self.key = _notset
595 self.key = _notset
596
596
597 class lrucachedict(object):
597 class lrucachedict(object):
598 """Dict that caches most recent accesses and sets.
598 """Dict that caches most recent accesses and sets.
599
599
600 The dict consists of an actual backing dict - indexed by original
600 The dict consists of an actual backing dict - indexed by original
601 key - and a doubly linked circular list defining the order of entries in
601 key - and a doubly linked circular list defining the order of entries in
602 the cache.
602 the cache.
603
603
604 The head node is the newest entry in the cache. If the cache is full,
604 The head node is the newest entry in the cache. If the cache is full,
605 we recycle head.prev and make it the new head. Cache accesses result in
605 we recycle head.prev and make it the new head. Cache accesses result in
606 the node being moved to before the existing head and being marked as the
606 the node being moved to before the existing head and being marked as the
607 new head node.
607 new head node.
608 """
608 """
609 def __init__(self, max):
609 def __init__(self, max):
610 self._cache = {}
610 self._cache = {}
611
611
612 self._head = head = _lrucachenode()
612 self._head = head = _lrucachenode()
613 head.prev = head
613 head.prev = head
614 head.next = head
614 head.next = head
615 self._size = 1
615 self._size = 1
616 self._capacity = max
616 self._capacity = max
617
617
618 def __len__(self):
618 def __len__(self):
619 return len(self._cache)
619 return len(self._cache)
620
620
621 def __contains__(self, k):
621 def __contains__(self, k):
622 return k in self._cache
622 return k in self._cache
623
623
624 def __iter__(self):
624 def __iter__(self):
625 # We don't have to iterate in cache order, but why not.
625 # We don't have to iterate in cache order, but why not.
626 n = self._head
626 n = self._head
627 for i in range(len(self._cache)):
627 for i in range(len(self._cache)):
628 yield n.key
628 yield n.key
629 n = n.next
629 n = n.next
630
630
631 def __getitem__(self, k):
631 def __getitem__(self, k):
632 node = self._cache[k]
632 node = self._cache[k]
633 self._movetohead(node)
633 self._movetohead(node)
634 return node.value
634 return node.value
635
635
636 def __setitem__(self, k, v):
636 def __setitem__(self, k, v):
637 node = self._cache.get(k)
637 node = self._cache.get(k)
638 # Replace existing value and mark as newest.
638 # Replace existing value and mark as newest.
639 if node is not None:
639 if node is not None:
640 node.value = v
640 node.value = v
641 self._movetohead(node)
641 self._movetohead(node)
642 return
642 return
643
643
644 if self._size < self._capacity:
644 if self._size < self._capacity:
645 node = self._addcapacity()
645 node = self._addcapacity()
646 else:
646 else:
647 # Grab the last/oldest item.
647 # Grab the last/oldest item.
648 node = self._head.prev
648 node = self._head.prev
649
649
650 # At capacity. Kill the old entry.
650 # At capacity. Kill the old entry.
651 if node.key is not _notset:
651 if node.key is not _notset:
652 del self._cache[node.key]
652 del self._cache[node.key]
653
653
654 node.key = k
654 node.key = k
655 node.value = v
655 node.value = v
656 self._cache[k] = node
656 self._cache[k] = node
657 # And mark it as newest entry. No need to adjust order since it
657 # And mark it as newest entry. No need to adjust order since it
658 # is already self._head.prev.
658 # is already self._head.prev.
659 self._head = node
659 self._head = node
660
660
661 def __delitem__(self, k):
661 def __delitem__(self, k):
662 node = self._cache.pop(k)
662 node = self._cache.pop(k)
663 node.markempty()
663 node.markempty()
664
664
665 # Temporarily mark as newest item before re-adjusting head to make
665 # Temporarily mark as newest item before re-adjusting head to make
666 # this node the oldest item.
666 # this node the oldest item.
667 self._movetohead(node)
667 self._movetohead(node)
668 self._head = node.next
668 self._head = node.next
669
669
670 # Additional dict methods.
670 # Additional dict methods.
671
671
672 def get(self, k, default=None):
672 def get(self, k, default=None):
673 try:
673 try:
674 return self._cache[k].value
674 return self._cache[k].value
675 except KeyError:
675 except KeyError:
676 return default
676 return default
677
677
678 def clear(self):
678 def clear(self):
679 n = self._head
679 n = self._head
680 while n.key is not _notset:
680 while n.key is not _notset:
681 n.markempty()
681 n.markempty()
682 n = n.next
682 n = n.next
683
683
684 self._cache.clear()
684 self._cache.clear()
685
685
686 def copy(self):
686 def copy(self):
687 result = lrucachedict(self._capacity)
687 result = lrucachedict(self._capacity)
688 n = self._head.prev
688 n = self._head.prev
689 # Iterate in oldest-to-newest order, so the copy has the right ordering
689 # Iterate in oldest-to-newest order, so the copy has the right ordering
690 for i in range(len(self._cache)):
690 for i in range(len(self._cache)):
691 result[n.key] = n.value
691 result[n.key] = n.value
692 n = n.prev
692 n = n.prev
693 return result
693 return result
694
694
695 def _movetohead(self, node):
695 def _movetohead(self, node):
696 """Mark a node as the newest, making it the new head.
696 """Mark a node as the newest, making it the new head.
697
697
698 When a node is accessed, it becomes the freshest entry in the LRU
698 When a node is accessed, it becomes the freshest entry in the LRU
699 list, which is denoted by self._head.
699 list, which is denoted by self._head.
700
700
701 Visually, let's make ``N`` the new head node (* denotes head):
701 Visually, let's make ``N`` the new head node (* denotes head):
702
702
703 previous/oldest <-> head <-> next/next newest
703 previous/oldest <-> head <-> next/next newest
704
704
705 ----<->--- A* ---<->-----
705 ----<->--- A* ---<->-----
706 | |
706 | |
707 E <-> D <-> N <-> C <-> B
707 E <-> D <-> N <-> C <-> B
708
708
709 To:
709 To:
710
710
711 ----<->--- N* ---<->-----
711 ----<->--- N* ---<->-----
712 | |
712 | |
713 E <-> D <-> C <-> B <-> A
713 E <-> D <-> C <-> B <-> A
714
714
715 This requires the following moves:
715 This requires the following moves:
716
716
717 C.next = D (node.prev.next = node.next)
717 C.next = D (node.prev.next = node.next)
718 D.prev = C (node.next.prev = node.prev)
718 D.prev = C (node.next.prev = node.prev)
719 E.next = N (head.prev.next = node)
719 E.next = N (head.prev.next = node)
720 N.prev = E (node.prev = head.prev)
720 N.prev = E (node.prev = head.prev)
721 N.next = A (node.next = head)
721 N.next = A (node.next = head)
722 A.prev = N (head.prev = node)
722 A.prev = N (head.prev = node)
723 """
723 """
724 head = self._head
724 head = self._head
725 # C.next = D
725 # C.next = D
726 node.prev.next = node.next
726 node.prev.next = node.next
727 # D.prev = C
727 # D.prev = C
728 node.next.prev = node.prev
728 node.next.prev = node.prev
729 # N.prev = E
729 # N.prev = E
730 node.prev = head.prev
730 node.prev = head.prev
731 # N.next = A
731 # N.next = A
732 # It is tempting to do just "head" here, however if node is
732 # It is tempting to do just "head" here, however if node is
733 # adjacent to head, this will do bad things.
733 # adjacent to head, this will do bad things.
734 node.next = head.prev.next
734 node.next = head.prev.next
735 # E.next = N
735 # E.next = N
736 node.next.prev = node
736 node.next.prev = node
737 # A.prev = N
737 # A.prev = N
738 node.prev.next = node
738 node.prev.next = node
739
739
740 self._head = node
740 self._head = node
741
741
742 def _addcapacity(self):
742 def _addcapacity(self):
743 """Add a node to the circular linked list.
743 """Add a node to the circular linked list.
744
744
745 The new node is inserted before the head node.
745 The new node is inserted before the head node.
746 """
746 """
747 head = self._head
747 head = self._head
748 node = _lrucachenode()
748 node = _lrucachenode()
749 head.prev.next = node
749 head.prev.next = node
750 node.prev = head.prev
750 node.prev = head.prev
751 node.next = head
751 node.next = head
752 head.prev = node
752 head.prev = node
753 self._size += 1
753 self._size += 1
754 return node
754 return node
755
755
756 def lrucachefunc(func):
756 def lrucachefunc(func):
757 '''cache most recent results of function calls'''
757 '''cache most recent results of function calls'''
758 cache = {}
758 cache = {}
759 order = collections.deque()
759 order = collections.deque()
760 if func.__code__.co_argcount == 1:
760 if func.__code__.co_argcount == 1:
761 def f(arg):
761 def f(arg):
762 if arg not in cache:
762 if arg not in cache:
763 if len(cache) > 20:
763 if len(cache) > 20:
764 del cache[order.popleft()]
764 del cache[order.popleft()]
765 cache[arg] = func(arg)
765 cache[arg] = func(arg)
766 else:
766 else:
767 order.remove(arg)
767 order.remove(arg)
768 order.append(arg)
768 order.append(arg)
769 return cache[arg]
769 return cache[arg]
770 else:
770 else:
771 def f(*args):
771 def f(*args):
772 if args not in cache:
772 if args not in cache:
773 if len(cache) > 20:
773 if len(cache) > 20:
774 del cache[order.popleft()]
774 del cache[order.popleft()]
775 cache[args] = func(*args)
775 cache[args] = func(*args)
776 else:
776 else:
777 order.remove(args)
777 order.remove(args)
778 order.append(args)
778 order.append(args)
779 return cache[args]
779 return cache[args]
780
780
781 return f
781 return f
782
782
783 class propertycache(object):
783 class propertycache(object):
784 def __init__(self, func):
784 def __init__(self, func):
785 self.func = func
785 self.func = func
786 self.name = func.__name__
786 self.name = func.__name__
787 def __get__(self, obj, type=None):
787 def __get__(self, obj, type=None):
788 result = self.func(obj)
788 result = self.func(obj)
789 self.cachevalue(obj, result)
789 self.cachevalue(obj, result)
790 return result
790 return result
791
791
792 def cachevalue(self, obj, value):
792 def cachevalue(self, obj, value):
793 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
793 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
794 obj.__dict__[self.name] = value
794 obj.__dict__[self.name] = value
795
795
796 def pipefilter(s, cmd):
796 def pipefilter(s, cmd):
797 '''filter string S through command CMD, returning its output'''
797 '''filter string S through command CMD, returning its output'''
798 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
798 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
799 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
799 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
800 pout, perr = p.communicate(s)
800 pout, perr = p.communicate(s)
801 return pout
801 return pout
802
802
803 def tempfilter(s, cmd):
803 def tempfilter(s, cmd):
804 '''filter string S through a pair of temporary files with CMD.
804 '''filter string S through a pair of temporary files with CMD.
805 CMD is used as a template to create the real command to be run,
805 CMD is used as a template to create the real command to be run,
806 with the strings INFILE and OUTFILE replaced by the real names of
806 with the strings INFILE and OUTFILE replaced by the real names of
807 the temporary files generated.'''
807 the temporary files generated.'''
808 inname, outname = None, None
808 inname, outname = None, None
809 try:
809 try:
810 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
810 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
811 fp = os.fdopen(infd, pycompat.sysstr('wb'))
811 fp = os.fdopen(infd, pycompat.sysstr('wb'))
812 fp.write(s)
812 fp.write(s)
813 fp.close()
813 fp.close()
814 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
814 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
815 os.close(outfd)
815 os.close(outfd)
816 cmd = cmd.replace('INFILE', inname)
816 cmd = cmd.replace('INFILE', inname)
817 cmd = cmd.replace('OUTFILE', outname)
817 cmd = cmd.replace('OUTFILE', outname)
818 code = os.system(cmd)
818 code = os.system(cmd)
819 if pycompat.sysplatform == 'OpenVMS' and code & 1:
819 if pycompat.sysplatform == 'OpenVMS' and code & 1:
820 code = 0
820 code = 0
821 if code:
821 if code:
822 raise Abort(_("command '%s' failed: %s") %
822 raise Abort(_("command '%s' failed: %s") %
823 (cmd, explainexit(code)))
823 (cmd, explainexit(code)))
824 return readfile(outname)
824 return readfile(outname)
825 finally:
825 finally:
826 try:
826 try:
827 if inname:
827 if inname:
828 os.unlink(inname)
828 os.unlink(inname)
829 except OSError:
829 except OSError:
830 pass
830 pass
831 try:
831 try:
832 if outname:
832 if outname:
833 os.unlink(outname)
833 os.unlink(outname)
834 except OSError:
834 except OSError:
835 pass
835 pass
836
836
837 filtertable = {
837 filtertable = {
838 'tempfile:': tempfilter,
838 'tempfile:': tempfilter,
839 'pipe:': pipefilter,
839 'pipe:': pipefilter,
840 }
840 }
841
841
842 def filter(s, cmd):
842 def filter(s, cmd):
843 "filter a string through a command that transforms its input to its output"
843 "filter a string through a command that transforms its input to its output"
844 for name, fn in filtertable.iteritems():
844 for name, fn in filtertable.iteritems():
845 if cmd.startswith(name):
845 if cmd.startswith(name):
846 return fn(s, cmd[len(name):].lstrip())
846 return fn(s, cmd[len(name):].lstrip())
847 return pipefilter(s, cmd)
847 return pipefilter(s, cmd)
848
848
849 def binary(s):
849 def binary(s):
850 """return true if a string is binary data"""
850 """return true if a string is binary data"""
851 return bool(s and '\0' in s)
851 return bool(s and '\0' in s)
852
852
853 def increasingchunks(source, min=1024, max=65536):
853 def increasingchunks(source, min=1024, max=65536):
854 '''return no less than min bytes per chunk while data remains,
854 '''return no less than min bytes per chunk while data remains,
855 doubling min after each chunk until it reaches max'''
855 doubling min after each chunk until it reaches max'''
856 def log2(x):
856 def log2(x):
857 if not x:
857 if not x:
858 return 0
858 return 0
859 i = 0
859 i = 0
860 while x:
860 while x:
861 x >>= 1
861 x >>= 1
862 i += 1
862 i += 1
863 return i - 1
863 return i - 1
864
864
865 buf = []
865 buf = []
866 blen = 0
866 blen = 0
867 for chunk in source:
867 for chunk in source:
868 buf.append(chunk)
868 buf.append(chunk)
869 blen += len(chunk)
869 blen += len(chunk)
870 if blen >= min:
870 if blen >= min:
871 if min < max:
871 if min < max:
872 min = min << 1
872 min = min << 1
873 nmin = 1 << log2(blen)
873 nmin = 1 << log2(blen)
874 if nmin > min:
874 if nmin > min:
875 min = nmin
875 min = nmin
876 if min > max:
876 if min > max:
877 min = max
877 min = max
878 yield ''.join(buf)
878 yield ''.join(buf)
879 blen = 0
879 blen = 0
880 buf = []
880 buf = []
881 if buf:
881 if buf:
882 yield ''.join(buf)
882 yield ''.join(buf)
883
883
884 Abort = error.Abort
884 Abort = error.Abort
885
885
886 def always(fn):
886 def always(fn):
887 return True
887 return True
888
888
889 def never(fn):
889 def never(fn):
890 return False
890 return False
891
891
892 def nogc(func):
892 def nogc(func):
893 """disable garbage collector
893 """disable garbage collector
894
894
895 Python's garbage collector triggers a GC each time a certain number of
895 Python's garbage collector triggers a GC each time a certain number of
896 container objects (the number being defined by gc.get_threshold()) are
896 container objects (the number being defined by gc.get_threshold()) are
897 allocated even when marked not to be tracked by the collector. Tracking has
897 allocated even when marked not to be tracked by the collector. Tracking has
898 no effect on when GCs are triggered, only on what objects the GC looks
898 no effect on when GCs are triggered, only on what objects the GC looks
899 into. As a workaround, disable GC while building complex (huge)
899 into. As a workaround, disable GC while building complex (huge)
900 containers.
900 containers.
901
901
902 This garbage collector issue have been fixed in 2.7.
902 This garbage collector issue have been fixed in 2.7.
903 """
903 """
904 if sys.version_info >= (2, 7):
904 if sys.version_info >= (2, 7):
905 return func
905 return func
906 def wrapper(*args, **kwargs):
906 def wrapper(*args, **kwargs):
907 gcenabled = gc.isenabled()
907 gcenabled = gc.isenabled()
908 gc.disable()
908 gc.disable()
909 try:
909 try:
910 return func(*args, **kwargs)
910 return func(*args, **kwargs)
911 finally:
911 finally:
912 if gcenabled:
912 if gcenabled:
913 gc.enable()
913 gc.enable()
914 return wrapper
914 return wrapper
915
915
916 def pathto(root, n1, n2):
916 def pathto(root, n1, n2):
917 '''return the relative path from one place to another.
917 '''return the relative path from one place to another.
918 root should use os.sep to separate directories
918 root should use os.sep to separate directories
919 n1 should use os.sep to separate directories
919 n1 should use os.sep to separate directories
920 n2 should use "/" to separate directories
920 n2 should use "/" to separate directories
921 returns an os.sep-separated path.
921 returns an os.sep-separated path.
922
922
923 If n1 is a relative path, it's assumed it's
923 If n1 is a relative path, it's assumed it's
924 relative to root.
924 relative to root.
925 n2 should always be relative to root.
925 n2 should always be relative to root.
926 '''
926 '''
927 if not n1:
927 if not n1:
928 return localpath(n2)
928 return localpath(n2)
929 if os.path.isabs(n1):
929 if os.path.isabs(n1):
930 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
930 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
931 return os.path.join(root, localpath(n2))
931 return os.path.join(root, localpath(n2))
932 n2 = '/'.join((pconvert(root), n2))
932 n2 = '/'.join((pconvert(root), n2))
933 a, b = splitpath(n1), n2.split('/')
933 a, b = splitpath(n1), n2.split('/')
934 a.reverse()
934 a.reverse()
935 b.reverse()
935 b.reverse()
936 while a and b and a[-1] == b[-1]:
936 while a and b and a[-1] == b[-1]:
937 a.pop()
937 a.pop()
938 b.pop()
938 b.pop()
939 b.reverse()
939 b.reverse()
940 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
940 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
941
941
942 def mainfrozen():
942 def mainfrozen():
943 """return True if we are a frozen executable.
943 """return True if we are a frozen executable.
944
944
945 The code supports py2exe (most common, Windows only) and tools/freeze
945 The code supports py2exe (most common, Windows only) and tools/freeze
946 (portable, not much used).
946 (portable, not much used).
947 """
947 """
948 return (safehasattr(sys, "frozen") or # new py2exe
948 return (safehasattr(sys, "frozen") or # new py2exe
949 safehasattr(sys, "importers") or # old py2exe
949 safehasattr(sys, "importers") or # old py2exe
950 imp.is_frozen(u"__main__")) # tools/freeze
950 imp.is_frozen(u"__main__")) # tools/freeze
951
951
952 # the location of data files matching the source code
952 # the location of data files matching the source code
953 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
953 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
954 # executable version (py2exe) doesn't support __file__
954 # executable version (py2exe) doesn't support __file__
955 datapath = os.path.dirname(pycompat.sysexecutable)
955 datapath = os.path.dirname(pycompat.sysexecutable)
956 else:
956 else:
957 datapath = os.path.dirname(pycompat.fsencode(__file__))
957 datapath = os.path.dirname(pycompat.fsencode(__file__))
958
958
959 i18n.setdatapath(datapath)
959 i18n.setdatapath(datapath)
960
960
961 _hgexecutable = None
961 _hgexecutable = None
962
962
963 def hgexecutable():
963 def hgexecutable():
964 """return location of the 'hg' executable.
964 """return location of the 'hg' executable.
965
965
966 Defaults to $HG or 'hg' in the search path.
966 Defaults to $HG or 'hg' in the search path.
967 """
967 """
968 if _hgexecutable is None:
968 if _hgexecutable is None:
969 hg = encoding.environ.get('HG')
969 hg = encoding.environ.get('HG')
970 mainmod = sys.modules[pycompat.sysstr('__main__')]
970 mainmod = sys.modules[pycompat.sysstr('__main__')]
971 if hg:
971 if hg:
972 _sethgexecutable(hg)
972 _sethgexecutable(hg)
973 elif mainfrozen():
973 elif mainfrozen():
974 if getattr(sys, 'frozen', None) == 'macosx_app':
974 if getattr(sys, 'frozen', None) == 'macosx_app':
975 # Env variable set by py2app
975 # Env variable set by py2app
976 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
976 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
977 else:
977 else:
978 _sethgexecutable(pycompat.sysexecutable)
978 _sethgexecutable(pycompat.sysexecutable)
979 elif (os.path.basename(
979 elif (os.path.basename(
980 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
980 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
981 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
981 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
982 else:
982 else:
983 exe = findexe('hg') or os.path.basename(sys.argv[0])
983 exe = findexe('hg') or os.path.basename(sys.argv[0])
984 _sethgexecutable(exe)
984 _sethgexecutable(exe)
985 return _hgexecutable
985 return _hgexecutable
986
986
987 def _sethgexecutable(path):
987 def _sethgexecutable(path):
988 """set location of the 'hg' executable"""
988 """set location of the 'hg' executable"""
989 global _hgexecutable
989 global _hgexecutable
990 _hgexecutable = path
990 _hgexecutable = path
991
991
992 def _isstdout(f):
992 def _isstdout(f):
993 fileno = getattr(f, 'fileno', None)
993 fileno = getattr(f, 'fileno', None)
994 return fileno and fileno() == sys.__stdout__.fileno()
994 return fileno and fileno() == sys.__stdout__.fileno()
995
995
996 def shellenviron(environ=None):
996 def shellenviron(environ=None):
997 """return environ with optional override, useful for shelling out"""
997 """return environ with optional override, useful for shelling out"""
998 def py2shell(val):
998 def py2shell(val):
999 'convert python object into string that is useful to shell'
999 'convert python object into string that is useful to shell'
1000 if val is None or val is False:
1000 if val is None or val is False:
1001 return '0'
1001 return '0'
1002 if val is True:
1002 if val is True:
1003 return '1'
1003 return '1'
1004 return str(val)
1004 return str(val)
1005 env = dict(encoding.environ)
1005 env = dict(encoding.environ)
1006 if environ:
1006 if environ:
1007 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1007 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1008 env['HG'] = hgexecutable()
1008 env['HG'] = hgexecutable()
1009 return env
1009 return env
1010
1010
1011 def system(cmd, environ=None, cwd=None, out=None):
1011 def system(cmd, environ=None, cwd=None, out=None):
1012 '''enhanced shell command execution.
1012 '''enhanced shell command execution.
1013 run with environment maybe modified, maybe in different dir.
1013 run with environment maybe modified, maybe in different dir.
1014
1014
1015 if out is specified, it is assumed to be a file-like object that has a
1015 if out is specified, it is assumed to be a file-like object that has a
1016 write() method. stdout and stderr will be redirected to out.'''
1016 write() method. stdout and stderr will be redirected to out.'''
1017 try:
1017 try:
1018 stdout.flush()
1018 stdout.flush()
1019 except Exception:
1019 except Exception:
1020 pass
1020 pass
1021 cmd = quotecommand(cmd)
1021 cmd = quotecommand(cmd)
1022 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1022 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1023 and sys.version_info[1] < 7):
1023 and sys.version_info[1] < 7):
1024 # subprocess kludge to work around issues in half-baked Python
1024 # subprocess kludge to work around issues in half-baked Python
1025 # ports, notably bichued/python:
1025 # ports, notably bichued/python:
1026 if not cwd is None:
1026 if not cwd is None:
1027 os.chdir(cwd)
1027 os.chdir(cwd)
1028 rc = os.system(cmd)
1028 rc = os.system(cmd)
1029 else:
1029 else:
1030 env = shellenviron(environ)
1030 env = shellenviron(environ)
1031 if out is None or _isstdout(out):
1031 if out is None or _isstdout(out):
1032 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1032 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1033 env=env, cwd=cwd)
1033 env=env, cwd=cwd)
1034 else:
1034 else:
1035 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1035 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1036 env=env, cwd=cwd, stdout=subprocess.PIPE,
1036 env=env, cwd=cwd, stdout=subprocess.PIPE,
1037 stderr=subprocess.STDOUT)
1037 stderr=subprocess.STDOUT)
1038 for line in iter(proc.stdout.readline, ''):
1038 for line in iter(proc.stdout.readline, ''):
1039 out.write(line)
1039 out.write(line)
1040 proc.wait()
1040 proc.wait()
1041 rc = proc.returncode
1041 rc = proc.returncode
1042 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1042 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1043 rc = 0
1043 rc = 0
1044 return rc
1044 return rc
1045
1045
1046 def checksignature(func):
1046 def checksignature(func):
1047 '''wrap a function with code to check for calling errors'''
1047 '''wrap a function with code to check for calling errors'''
1048 def check(*args, **kwargs):
1048 def check(*args, **kwargs):
1049 try:
1049 try:
1050 return func(*args, **kwargs)
1050 return func(*args, **kwargs)
1051 except TypeError:
1051 except TypeError:
1052 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1052 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1053 raise error.SignatureError
1053 raise error.SignatureError
1054 raise
1054 raise
1055
1055
1056 return check
1056 return check
1057
1057
1058 # a whilelist of known filesystems where hardlink works reliably
1058 # a whilelist of known filesystems where hardlink works reliably
1059 _hardlinkfswhitelist = set([
1059 _hardlinkfswhitelist = set([
1060 'btrfs',
1060 'btrfs',
1061 'ext2',
1061 'ext2',
1062 'ext3',
1062 'ext3',
1063 'ext4',
1063 'ext4',
1064 'hfs',
1064 'hfs',
1065 'jfs',
1065 'jfs',
1066 'reiserfs',
1066 'reiserfs',
1067 'tmpfs',
1067 'tmpfs',
1068 'ufs',
1068 'ufs',
1069 'xfs',
1069 'xfs',
1070 'zfs',
1070 'zfs',
1071 ])
1071 ])
1072
1072
1073 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1073 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1074 '''copy a file, preserving mode and optionally other stat info like
1074 '''copy a file, preserving mode and optionally other stat info like
1075 atime/mtime
1075 atime/mtime
1076
1076
1077 checkambig argument is used with filestat, and is useful only if
1077 checkambig argument is used with filestat, and is useful only if
1078 destination file is guarded by any lock (e.g. repo.lock or
1078 destination file is guarded by any lock (e.g. repo.lock or
1079 repo.wlock).
1079 repo.wlock).
1080
1080
1081 copystat and checkambig should be exclusive.
1081 copystat and checkambig should be exclusive.
1082 '''
1082 '''
1083 assert not (copystat and checkambig)
1083 assert not (copystat and checkambig)
1084 oldstat = None
1084 oldstat = None
1085 if os.path.lexists(dest):
1085 if os.path.lexists(dest):
1086 if checkambig:
1086 if checkambig:
1087 oldstat = checkambig and filestat(dest)
1087 oldstat = checkambig and filestat(dest)
1088 unlink(dest)
1088 unlink(dest)
1089 if hardlink:
1089 if hardlink:
1090 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1090 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1091 # unless we are confident that dest is on a whitelisted filesystem.
1091 # unless we are confident that dest is on a whitelisted filesystem.
1092 try:
1092 try:
1093 fstype = getfstype(os.path.dirname(dest))
1093 fstype = getfstype(os.path.dirname(dest))
1094 except OSError:
1094 except OSError:
1095 fstype = None
1095 fstype = None
1096 if fstype not in _hardlinkfswhitelist:
1096 if fstype not in _hardlinkfswhitelist:
1097 hardlink = False
1097 hardlink = False
1098 if hardlink:
1098 if hardlink:
1099 try:
1099 try:
1100 oslink(src, dest)
1100 oslink(src, dest)
1101 return
1101 return
1102 except (IOError, OSError):
1102 except (IOError, OSError):
1103 pass # fall back to normal copy
1103 pass # fall back to normal copy
1104 if os.path.islink(src):
1104 if os.path.islink(src):
1105 os.symlink(os.readlink(src), dest)
1105 os.symlink(os.readlink(src), dest)
1106 # copytime is ignored for symlinks, but in general copytime isn't needed
1106 # copytime is ignored for symlinks, but in general copytime isn't needed
1107 # for them anyway
1107 # for them anyway
1108 else:
1108 else:
1109 try:
1109 try:
1110 shutil.copyfile(src, dest)
1110 shutil.copyfile(src, dest)
1111 if copystat:
1111 if copystat:
1112 # copystat also copies mode
1112 # copystat also copies mode
1113 shutil.copystat(src, dest)
1113 shutil.copystat(src, dest)
1114 else:
1114 else:
1115 shutil.copymode(src, dest)
1115 shutil.copymode(src, dest)
1116 if oldstat and oldstat.stat:
1116 if oldstat and oldstat.stat:
1117 newstat = filestat(dest)
1117 newstat = filestat(dest)
1118 if newstat.isambig(oldstat):
1118 if newstat.isambig(oldstat):
1119 # stat of copied file is ambiguous to original one
1119 # stat of copied file is ambiguous to original one
1120 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1120 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1121 os.utime(dest, (advanced, advanced))
1121 os.utime(dest, (advanced, advanced))
1122 except shutil.Error as inst:
1122 except shutil.Error as inst:
1123 raise Abort(str(inst))
1123 raise Abort(str(inst))
1124
1124
1125 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1125 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1126 """Copy a directory tree using hardlinks if possible."""
1126 """Copy a directory tree using hardlinks if possible."""
1127 num = 0
1127 num = 0
1128
1128
1129 if hardlink is None:
1130 hardlink = (os.stat(src).st_dev ==
1131 os.stat(os.path.dirname(dst)).st_dev)
1132
1133 gettopic = lambda: hardlink and _('linking') or _('copying')
1129 gettopic = lambda: hardlink and _('linking') or _('copying')
1134 topic = gettopic()
1135
1130
1136 if os.path.isdir(src):
1131 if os.path.isdir(src):
1132 if hardlink is None:
1133 hardlink = (os.stat(src).st_dev ==
1134 os.stat(os.path.dirname(dst)).st_dev)
1135 topic = gettopic()
1137 os.mkdir(dst)
1136 os.mkdir(dst)
1138 for name, kind in osutil.listdir(src):
1137 for name, kind in osutil.listdir(src):
1139 srcname = os.path.join(src, name)
1138 srcname = os.path.join(src, name)
1140 dstname = os.path.join(dst, name)
1139 dstname = os.path.join(dst, name)
1141 def nprog(t, pos):
1140 def nprog(t, pos):
1142 if pos is not None:
1141 if pos is not None:
1143 return progress(t, pos + num)
1142 return progress(t, pos + num)
1144 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1143 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1145 num += n
1144 num += n
1146 else:
1145 else:
1146 if hardlink is None:
1147 hardlink = (os.stat(src).st_dev ==
1148 os.stat(os.path.dirname(dst)).st_dev)
1149 topic = gettopic()
1150
1147 if hardlink:
1151 if hardlink:
1148 try:
1152 try:
1149 oslink(src, dst)
1153 oslink(src, dst)
1150 except (IOError, OSError):
1154 except (IOError, OSError):
1151 hardlink = False
1155 hardlink = False
1152 shutil.copy(src, dst)
1156 shutil.copy(src, dst)
1153 else:
1157 else:
1154 shutil.copy(src, dst)
1158 shutil.copy(src, dst)
1155 num += 1
1159 num += 1
1156 progress(topic, num)
1160 progress(topic, num)
1157 progress(topic, None)
1161 progress(topic, None)
1158
1162
1159 return hardlink, num
1163 return hardlink, num
1160
1164
1161 _winreservednames = '''con prn aux nul
1165 _winreservednames = '''con prn aux nul
1162 com1 com2 com3 com4 com5 com6 com7 com8 com9
1166 com1 com2 com3 com4 com5 com6 com7 com8 com9
1163 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1167 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1164 _winreservedchars = ':*?"<>|'
1168 _winreservedchars = ':*?"<>|'
1165 def checkwinfilename(path):
1169 def checkwinfilename(path):
1166 r'''Check that the base-relative path is a valid filename on Windows.
1170 r'''Check that the base-relative path is a valid filename on Windows.
1167 Returns None if the path is ok, or a UI string describing the problem.
1171 Returns None if the path is ok, or a UI string describing the problem.
1168
1172
1169 >>> checkwinfilename("just/a/normal/path")
1173 >>> checkwinfilename("just/a/normal/path")
1170 >>> checkwinfilename("foo/bar/con.xml")
1174 >>> checkwinfilename("foo/bar/con.xml")
1171 "filename contains 'con', which is reserved on Windows"
1175 "filename contains 'con', which is reserved on Windows"
1172 >>> checkwinfilename("foo/con.xml/bar")
1176 >>> checkwinfilename("foo/con.xml/bar")
1173 "filename contains 'con', which is reserved on Windows"
1177 "filename contains 'con', which is reserved on Windows"
1174 >>> checkwinfilename("foo/bar/xml.con")
1178 >>> checkwinfilename("foo/bar/xml.con")
1175 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1179 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1176 "filename contains 'AUX', which is reserved on Windows"
1180 "filename contains 'AUX', which is reserved on Windows"
1177 >>> checkwinfilename("foo/bar/bla:.txt")
1181 >>> checkwinfilename("foo/bar/bla:.txt")
1178 "filename contains ':', which is reserved on Windows"
1182 "filename contains ':', which is reserved on Windows"
1179 >>> checkwinfilename("foo/bar/b\07la.txt")
1183 >>> checkwinfilename("foo/bar/b\07la.txt")
1180 "filename contains '\\x07', which is invalid on Windows"
1184 "filename contains '\\x07', which is invalid on Windows"
1181 >>> checkwinfilename("foo/bar/bla ")
1185 >>> checkwinfilename("foo/bar/bla ")
1182 "filename ends with ' ', which is not allowed on Windows"
1186 "filename ends with ' ', which is not allowed on Windows"
1183 >>> checkwinfilename("../bar")
1187 >>> checkwinfilename("../bar")
1184 >>> checkwinfilename("foo\\")
1188 >>> checkwinfilename("foo\\")
1185 "filename ends with '\\', which is invalid on Windows"
1189 "filename ends with '\\', which is invalid on Windows"
1186 >>> checkwinfilename("foo\\/bar")
1190 >>> checkwinfilename("foo\\/bar")
1187 "directory name ends with '\\', which is invalid on Windows"
1191 "directory name ends with '\\', which is invalid on Windows"
1188 '''
1192 '''
1189 if path.endswith('\\'):
1193 if path.endswith('\\'):
1190 return _("filename ends with '\\', which is invalid on Windows")
1194 return _("filename ends with '\\', which is invalid on Windows")
1191 if '\\/' in path:
1195 if '\\/' in path:
1192 return _("directory name ends with '\\', which is invalid on Windows")
1196 return _("directory name ends with '\\', which is invalid on Windows")
1193 for n in path.replace('\\', '/').split('/'):
1197 for n in path.replace('\\', '/').split('/'):
1194 if not n:
1198 if not n:
1195 continue
1199 continue
1196 for c in pycompat.bytestr(n):
1200 for c in pycompat.bytestr(n):
1197 if c in _winreservedchars:
1201 if c in _winreservedchars:
1198 return _("filename contains '%s', which is reserved "
1202 return _("filename contains '%s', which is reserved "
1199 "on Windows") % c
1203 "on Windows") % c
1200 if ord(c) <= 31:
1204 if ord(c) <= 31:
1201 return _("filename contains %r, which is invalid "
1205 return _("filename contains %r, which is invalid "
1202 "on Windows") % c
1206 "on Windows") % c
1203 base = n.split('.')[0]
1207 base = n.split('.')[0]
1204 if base and base.lower() in _winreservednames:
1208 if base and base.lower() in _winreservednames:
1205 return _("filename contains '%s', which is reserved "
1209 return _("filename contains '%s', which is reserved "
1206 "on Windows") % base
1210 "on Windows") % base
1207 t = n[-1]
1211 t = n[-1]
1208 if t in '. ' and n not in '..':
1212 if t in '. ' and n not in '..':
1209 return _("filename ends with '%s', which is not allowed "
1213 return _("filename ends with '%s', which is not allowed "
1210 "on Windows") % t
1214 "on Windows") % t
1211
1215
1212 if pycompat.osname == 'nt':
1216 if pycompat.osname == 'nt':
1213 checkosfilename = checkwinfilename
1217 checkosfilename = checkwinfilename
1214 timer = time.clock
1218 timer = time.clock
1215 else:
1219 else:
1216 checkosfilename = platform.checkosfilename
1220 checkosfilename = platform.checkosfilename
1217 timer = time.time
1221 timer = time.time
1218
1222
1219 if safehasattr(time, "perf_counter"):
1223 if safehasattr(time, "perf_counter"):
1220 timer = time.perf_counter
1224 timer = time.perf_counter
1221
1225
1222 def makelock(info, pathname):
1226 def makelock(info, pathname):
1223 try:
1227 try:
1224 return os.symlink(info, pathname)
1228 return os.symlink(info, pathname)
1225 except OSError as why:
1229 except OSError as why:
1226 if why.errno == errno.EEXIST:
1230 if why.errno == errno.EEXIST:
1227 raise
1231 raise
1228 except AttributeError: # no symlink in os
1232 except AttributeError: # no symlink in os
1229 pass
1233 pass
1230
1234
1231 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1235 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1232 os.write(ld, info)
1236 os.write(ld, info)
1233 os.close(ld)
1237 os.close(ld)
1234
1238
1235 def readlock(pathname):
1239 def readlock(pathname):
1236 try:
1240 try:
1237 return os.readlink(pathname)
1241 return os.readlink(pathname)
1238 except OSError as why:
1242 except OSError as why:
1239 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1243 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1240 raise
1244 raise
1241 except AttributeError: # no symlink in os
1245 except AttributeError: # no symlink in os
1242 pass
1246 pass
1243 fp = posixfile(pathname)
1247 fp = posixfile(pathname)
1244 r = fp.read()
1248 r = fp.read()
1245 fp.close()
1249 fp.close()
1246 return r
1250 return r
1247
1251
1248 def fstat(fp):
1252 def fstat(fp):
1249 '''stat file object that may not have fileno method.'''
1253 '''stat file object that may not have fileno method.'''
1250 try:
1254 try:
1251 return os.fstat(fp.fileno())
1255 return os.fstat(fp.fileno())
1252 except AttributeError:
1256 except AttributeError:
1253 return os.stat(fp.name)
1257 return os.stat(fp.name)
1254
1258
1255 # File system features
1259 # File system features
1256
1260
1257 def fscasesensitive(path):
1261 def fscasesensitive(path):
1258 """
1262 """
1259 Return true if the given path is on a case-sensitive filesystem
1263 Return true if the given path is on a case-sensitive filesystem
1260
1264
1261 Requires a path (like /foo/.hg) ending with a foldable final
1265 Requires a path (like /foo/.hg) ending with a foldable final
1262 directory component.
1266 directory component.
1263 """
1267 """
1264 s1 = os.lstat(path)
1268 s1 = os.lstat(path)
1265 d, b = os.path.split(path)
1269 d, b = os.path.split(path)
1266 b2 = b.upper()
1270 b2 = b.upper()
1267 if b == b2:
1271 if b == b2:
1268 b2 = b.lower()
1272 b2 = b.lower()
1269 if b == b2:
1273 if b == b2:
1270 return True # no evidence against case sensitivity
1274 return True # no evidence against case sensitivity
1271 p2 = os.path.join(d, b2)
1275 p2 = os.path.join(d, b2)
1272 try:
1276 try:
1273 s2 = os.lstat(p2)
1277 s2 = os.lstat(p2)
1274 if s2 == s1:
1278 if s2 == s1:
1275 return False
1279 return False
1276 return True
1280 return True
1277 except OSError:
1281 except OSError:
1278 return True
1282 return True
1279
1283
1280 try:
1284 try:
1281 import re2
1285 import re2
1282 _re2 = None
1286 _re2 = None
1283 except ImportError:
1287 except ImportError:
1284 _re2 = False
1288 _re2 = False
1285
1289
1286 class _re(object):
1290 class _re(object):
1287 def _checkre2(self):
1291 def _checkre2(self):
1288 global _re2
1292 global _re2
1289 try:
1293 try:
1290 # check if match works, see issue3964
1294 # check if match works, see issue3964
1291 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1295 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1292 except ImportError:
1296 except ImportError:
1293 _re2 = False
1297 _re2 = False
1294
1298
1295 def compile(self, pat, flags=0):
1299 def compile(self, pat, flags=0):
1296 '''Compile a regular expression, using re2 if possible
1300 '''Compile a regular expression, using re2 if possible
1297
1301
1298 For best performance, use only re2-compatible regexp features. The
1302 For best performance, use only re2-compatible regexp features. The
1299 only flags from the re module that are re2-compatible are
1303 only flags from the re module that are re2-compatible are
1300 IGNORECASE and MULTILINE.'''
1304 IGNORECASE and MULTILINE.'''
1301 if _re2 is None:
1305 if _re2 is None:
1302 self._checkre2()
1306 self._checkre2()
1303 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1307 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1304 if flags & remod.IGNORECASE:
1308 if flags & remod.IGNORECASE:
1305 pat = '(?i)' + pat
1309 pat = '(?i)' + pat
1306 if flags & remod.MULTILINE:
1310 if flags & remod.MULTILINE:
1307 pat = '(?m)' + pat
1311 pat = '(?m)' + pat
1308 try:
1312 try:
1309 return re2.compile(pat)
1313 return re2.compile(pat)
1310 except re2.error:
1314 except re2.error:
1311 pass
1315 pass
1312 return remod.compile(pat, flags)
1316 return remod.compile(pat, flags)
1313
1317
1314 @propertycache
1318 @propertycache
1315 def escape(self):
1319 def escape(self):
1316 '''Return the version of escape corresponding to self.compile.
1320 '''Return the version of escape corresponding to self.compile.
1317
1321
1318 This is imperfect because whether re2 or re is used for a particular
1322 This is imperfect because whether re2 or re is used for a particular
1319 function depends on the flags, etc, but it's the best we can do.
1323 function depends on the flags, etc, but it's the best we can do.
1320 '''
1324 '''
1321 global _re2
1325 global _re2
1322 if _re2 is None:
1326 if _re2 is None:
1323 self._checkre2()
1327 self._checkre2()
1324 if _re2:
1328 if _re2:
1325 return re2.escape
1329 return re2.escape
1326 else:
1330 else:
1327 return remod.escape
1331 return remod.escape
1328
1332
1329 re = _re()
1333 re = _re()
1330
1334
1331 _fspathcache = {}
1335 _fspathcache = {}
1332 def fspath(name, root):
1336 def fspath(name, root):
1333 '''Get name in the case stored in the filesystem
1337 '''Get name in the case stored in the filesystem
1334
1338
1335 The name should be relative to root, and be normcase-ed for efficiency.
1339 The name should be relative to root, and be normcase-ed for efficiency.
1336
1340
1337 Note that this function is unnecessary, and should not be
1341 Note that this function is unnecessary, and should not be
1338 called, for case-sensitive filesystems (simply because it's expensive).
1342 called, for case-sensitive filesystems (simply because it's expensive).
1339
1343
1340 The root should be normcase-ed, too.
1344 The root should be normcase-ed, too.
1341 '''
1345 '''
1342 def _makefspathcacheentry(dir):
1346 def _makefspathcacheentry(dir):
1343 return dict((normcase(n), n) for n in os.listdir(dir))
1347 return dict((normcase(n), n) for n in os.listdir(dir))
1344
1348
1345 seps = pycompat.ossep
1349 seps = pycompat.ossep
1346 if pycompat.osaltsep:
1350 if pycompat.osaltsep:
1347 seps = seps + pycompat.osaltsep
1351 seps = seps + pycompat.osaltsep
1348 # Protect backslashes. This gets silly very quickly.
1352 # Protect backslashes. This gets silly very quickly.
1349 seps.replace('\\','\\\\')
1353 seps.replace('\\','\\\\')
1350 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1354 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1351 dir = os.path.normpath(root)
1355 dir = os.path.normpath(root)
1352 result = []
1356 result = []
1353 for part, sep in pattern.findall(name):
1357 for part, sep in pattern.findall(name):
1354 if sep:
1358 if sep:
1355 result.append(sep)
1359 result.append(sep)
1356 continue
1360 continue
1357
1361
1358 if dir not in _fspathcache:
1362 if dir not in _fspathcache:
1359 _fspathcache[dir] = _makefspathcacheentry(dir)
1363 _fspathcache[dir] = _makefspathcacheentry(dir)
1360 contents = _fspathcache[dir]
1364 contents = _fspathcache[dir]
1361
1365
1362 found = contents.get(part)
1366 found = contents.get(part)
1363 if not found:
1367 if not found:
1364 # retry "once per directory" per "dirstate.walk" which
1368 # retry "once per directory" per "dirstate.walk" which
1365 # may take place for each patches of "hg qpush", for example
1369 # may take place for each patches of "hg qpush", for example
1366 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1370 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1367 found = contents.get(part)
1371 found = contents.get(part)
1368
1372
1369 result.append(found or part)
1373 result.append(found or part)
1370 dir = os.path.join(dir, part)
1374 dir = os.path.join(dir, part)
1371
1375
1372 return ''.join(result)
1376 return ''.join(result)
1373
1377
1374 def getfstype(dirpath):
1378 def getfstype(dirpath):
1375 '''Get the filesystem type name from a directory (best-effort)
1379 '''Get the filesystem type name from a directory (best-effort)
1376
1380
1377 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1381 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1378 '''
1382 '''
1379 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1383 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1380
1384
1381 def checknlink(testfile):
1385 def checknlink(testfile):
1382 '''check whether hardlink count reporting works properly'''
1386 '''check whether hardlink count reporting works properly'''
1383
1387
1384 # testfile may be open, so we need a separate file for checking to
1388 # testfile may be open, so we need a separate file for checking to
1385 # work around issue2543 (or testfile may get lost on Samba shares)
1389 # work around issue2543 (or testfile may get lost on Samba shares)
1386 f1 = testfile + ".hgtmp1"
1390 f1 = testfile + ".hgtmp1"
1387 if os.path.lexists(f1):
1391 if os.path.lexists(f1):
1388 return False
1392 return False
1389 try:
1393 try:
1390 posixfile(f1, 'w').close()
1394 posixfile(f1, 'w').close()
1391 except IOError:
1395 except IOError:
1392 try:
1396 try:
1393 os.unlink(f1)
1397 os.unlink(f1)
1394 except OSError:
1398 except OSError:
1395 pass
1399 pass
1396 return False
1400 return False
1397
1401
1398 f2 = testfile + ".hgtmp2"
1402 f2 = testfile + ".hgtmp2"
1399 fd = None
1403 fd = None
1400 try:
1404 try:
1401 oslink(f1, f2)
1405 oslink(f1, f2)
1402 # nlinks() may behave differently for files on Windows shares if
1406 # nlinks() may behave differently for files on Windows shares if
1403 # the file is open.
1407 # the file is open.
1404 fd = posixfile(f2)
1408 fd = posixfile(f2)
1405 return nlinks(f2) > 1
1409 return nlinks(f2) > 1
1406 except OSError:
1410 except OSError:
1407 return False
1411 return False
1408 finally:
1412 finally:
1409 if fd is not None:
1413 if fd is not None:
1410 fd.close()
1414 fd.close()
1411 for f in (f1, f2):
1415 for f in (f1, f2):
1412 try:
1416 try:
1413 os.unlink(f)
1417 os.unlink(f)
1414 except OSError:
1418 except OSError:
1415 pass
1419 pass
1416
1420
1417 def endswithsep(path):
1421 def endswithsep(path):
1418 '''Check path ends with os.sep or os.altsep.'''
1422 '''Check path ends with os.sep or os.altsep.'''
1419 return (path.endswith(pycompat.ossep)
1423 return (path.endswith(pycompat.ossep)
1420 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1424 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1421
1425
1422 def splitpath(path):
1426 def splitpath(path):
1423 '''Split path by os.sep.
1427 '''Split path by os.sep.
1424 Note that this function does not use os.altsep because this is
1428 Note that this function does not use os.altsep because this is
1425 an alternative of simple "xxx.split(os.sep)".
1429 an alternative of simple "xxx.split(os.sep)".
1426 It is recommended to use os.path.normpath() before using this
1430 It is recommended to use os.path.normpath() before using this
1427 function if need.'''
1431 function if need.'''
1428 return path.split(pycompat.ossep)
1432 return path.split(pycompat.ossep)
1429
1433
1430 def gui():
1434 def gui():
1431 '''Are we running in a GUI?'''
1435 '''Are we running in a GUI?'''
1432 if pycompat.sysplatform == 'darwin':
1436 if pycompat.sysplatform == 'darwin':
1433 if 'SSH_CONNECTION' in encoding.environ:
1437 if 'SSH_CONNECTION' in encoding.environ:
1434 # handle SSH access to a box where the user is logged in
1438 # handle SSH access to a box where the user is logged in
1435 return False
1439 return False
1436 elif getattr(osutil, 'isgui', None):
1440 elif getattr(osutil, 'isgui', None):
1437 # check if a CoreGraphics session is available
1441 # check if a CoreGraphics session is available
1438 return osutil.isgui()
1442 return osutil.isgui()
1439 else:
1443 else:
1440 # pure build; use a safe default
1444 # pure build; use a safe default
1441 return True
1445 return True
1442 else:
1446 else:
1443 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1447 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1444
1448
1445 def mktempcopy(name, emptyok=False, createmode=None):
1449 def mktempcopy(name, emptyok=False, createmode=None):
1446 """Create a temporary file with the same contents from name
1450 """Create a temporary file with the same contents from name
1447
1451
1448 The permission bits are copied from the original file.
1452 The permission bits are copied from the original file.
1449
1453
1450 If the temporary file is going to be truncated immediately, you
1454 If the temporary file is going to be truncated immediately, you
1451 can use emptyok=True as an optimization.
1455 can use emptyok=True as an optimization.
1452
1456
1453 Returns the name of the temporary file.
1457 Returns the name of the temporary file.
1454 """
1458 """
1455 d, fn = os.path.split(name)
1459 d, fn = os.path.split(name)
1456 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1460 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1457 os.close(fd)
1461 os.close(fd)
1458 # Temporary files are created with mode 0600, which is usually not
1462 # Temporary files are created with mode 0600, which is usually not
1459 # what we want. If the original file already exists, just copy
1463 # what we want. If the original file already exists, just copy
1460 # its mode. Otherwise, manually obey umask.
1464 # its mode. Otherwise, manually obey umask.
1461 copymode(name, temp, createmode)
1465 copymode(name, temp, createmode)
1462 if emptyok:
1466 if emptyok:
1463 return temp
1467 return temp
1464 try:
1468 try:
1465 try:
1469 try:
1466 ifp = posixfile(name, "rb")
1470 ifp = posixfile(name, "rb")
1467 except IOError as inst:
1471 except IOError as inst:
1468 if inst.errno == errno.ENOENT:
1472 if inst.errno == errno.ENOENT:
1469 return temp
1473 return temp
1470 if not getattr(inst, 'filename', None):
1474 if not getattr(inst, 'filename', None):
1471 inst.filename = name
1475 inst.filename = name
1472 raise
1476 raise
1473 ofp = posixfile(temp, "wb")
1477 ofp = posixfile(temp, "wb")
1474 for chunk in filechunkiter(ifp):
1478 for chunk in filechunkiter(ifp):
1475 ofp.write(chunk)
1479 ofp.write(chunk)
1476 ifp.close()
1480 ifp.close()
1477 ofp.close()
1481 ofp.close()
1478 except: # re-raises
1482 except: # re-raises
1479 try: os.unlink(temp)
1483 try: os.unlink(temp)
1480 except OSError: pass
1484 except OSError: pass
1481 raise
1485 raise
1482 return temp
1486 return temp
1483
1487
1484 class filestat(object):
1488 class filestat(object):
1485 """help to exactly detect change of a file
1489 """help to exactly detect change of a file
1486
1490
1487 'stat' attribute is result of 'os.stat()' if specified 'path'
1491 'stat' attribute is result of 'os.stat()' if specified 'path'
1488 exists. Otherwise, it is None. This can avoid preparative
1492 exists. Otherwise, it is None. This can avoid preparative
1489 'exists()' examination on client side of this class.
1493 'exists()' examination on client side of this class.
1490 """
1494 """
1491 def __init__(self, path):
1495 def __init__(self, path):
1492 try:
1496 try:
1493 self.stat = os.stat(path)
1497 self.stat = os.stat(path)
1494 except OSError as err:
1498 except OSError as err:
1495 if err.errno != errno.ENOENT:
1499 if err.errno != errno.ENOENT:
1496 raise
1500 raise
1497 self.stat = None
1501 self.stat = None
1498
1502
1499 __hash__ = object.__hash__
1503 __hash__ = object.__hash__
1500
1504
1501 def __eq__(self, old):
1505 def __eq__(self, old):
1502 try:
1506 try:
1503 # if ambiguity between stat of new and old file is
1507 # if ambiguity between stat of new and old file is
1504 # avoided, comparison of size, ctime and mtime is enough
1508 # avoided, comparison of size, ctime and mtime is enough
1505 # to exactly detect change of a file regardless of platform
1509 # to exactly detect change of a file regardless of platform
1506 return (self.stat.st_size == old.stat.st_size and
1510 return (self.stat.st_size == old.stat.st_size and
1507 self.stat.st_ctime == old.stat.st_ctime and
1511 self.stat.st_ctime == old.stat.st_ctime and
1508 self.stat.st_mtime == old.stat.st_mtime)
1512 self.stat.st_mtime == old.stat.st_mtime)
1509 except AttributeError:
1513 except AttributeError:
1510 return False
1514 return False
1511
1515
1512 def isambig(self, old):
1516 def isambig(self, old):
1513 """Examine whether new (= self) stat is ambiguous against old one
1517 """Examine whether new (= self) stat is ambiguous against old one
1514
1518
1515 "S[N]" below means stat of a file at N-th change:
1519 "S[N]" below means stat of a file at N-th change:
1516
1520
1517 - S[n-1].ctime < S[n].ctime: can detect change of a file
1521 - S[n-1].ctime < S[n].ctime: can detect change of a file
1518 - S[n-1].ctime == S[n].ctime
1522 - S[n-1].ctime == S[n].ctime
1519 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1523 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1520 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1524 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1521 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1525 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1522 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1526 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1523
1527
1524 Case (*2) above means that a file was changed twice or more at
1528 Case (*2) above means that a file was changed twice or more at
1525 same time in sec (= S[n-1].ctime), and comparison of timestamp
1529 same time in sec (= S[n-1].ctime), and comparison of timestamp
1526 is ambiguous.
1530 is ambiguous.
1527
1531
1528 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1532 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1529 timestamp is ambiguous".
1533 timestamp is ambiguous".
1530
1534
1531 But advancing mtime only in case (*2) doesn't work as
1535 But advancing mtime only in case (*2) doesn't work as
1532 expected, because naturally advanced S[n].mtime in case (*1)
1536 expected, because naturally advanced S[n].mtime in case (*1)
1533 might be equal to manually advanced S[n-1 or earlier].mtime.
1537 might be equal to manually advanced S[n-1 or earlier].mtime.
1534
1538
1535 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1539 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1536 treated as ambiguous regardless of mtime, to avoid overlooking
1540 treated as ambiguous regardless of mtime, to avoid overlooking
1537 by confliction between such mtime.
1541 by confliction between such mtime.
1538
1542
1539 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1543 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1540 S[n].mtime", even if size of a file isn't changed.
1544 S[n].mtime", even if size of a file isn't changed.
1541 """
1545 """
1542 try:
1546 try:
1543 return (self.stat.st_ctime == old.stat.st_ctime)
1547 return (self.stat.st_ctime == old.stat.st_ctime)
1544 except AttributeError:
1548 except AttributeError:
1545 return False
1549 return False
1546
1550
1547 def avoidambig(self, path, old):
1551 def avoidambig(self, path, old):
1548 """Change file stat of specified path to avoid ambiguity
1552 """Change file stat of specified path to avoid ambiguity
1549
1553
1550 'old' should be previous filestat of 'path'.
1554 'old' should be previous filestat of 'path'.
1551
1555
1552 This skips avoiding ambiguity, if a process doesn't have
1556 This skips avoiding ambiguity, if a process doesn't have
1553 appropriate privileges for 'path'.
1557 appropriate privileges for 'path'.
1554 """
1558 """
1555 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1559 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1556 try:
1560 try:
1557 os.utime(path, (advanced, advanced))
1561 os.utime(path, (advanced, advanced))
1558 except OSError as inst:
1562 except OSError as inst:
1559 if inst.errno == errno.EPERM:
1563 if inst.errno == errno.EPERM:
1560 # utime() on the file created by another user causes EPERM,
1564 # utime() on the file created by another user causes EPERM,
1561 # if a process doesn't have appropriate privileges
1565 # if a process doesn't have appropriate privileges
1562 return
1566 return
1563 raise
1567 raise
1564
1568
1565 def __ne__(self, other):
1569 def __ne__(self, other):
1566 return not self == other
1570 return not self == other
1567
1571
1568 class atomictempfile(object):
1572 class atomictempfile(object):
1569 '''writable file object that atomically updates a file
1573 '''writable file object that atomically updates a file
1570
1574
1571 All writes will go to a temporary copy of the original file. Call
1575 All writes will go to a temporary copy of the original file. Call
1572 close() when you are done writing, and atomictempfile will rename
1576 close() when you are done writing, and atomictempfile will rename
1573 the temporary copy to the original name, making the changes
1577 the temporary copy to the original name, making the changes
1574 visible. If the object is destroyed without being closed, all your
1578 visible. If the object is destroyed without being closed, all your
1575 writes are discarded.
1579 writes are discarded.
1576
1580
1577 checkambig argument of constructor is used with filestat, and is
1581 checkambig argument of constructor is used with filestat, and is
1578 useful only if target file is guarded by any lock (e.g. repo.lock
1582 useful only if target file is guarded by any lock (e.g. repo.lock
1579 or repo.wlock).
1583 or repo.wlock).
1580 '''
1584 '''
1581 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1585 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1582 self.__name = name # permanent name
1586 self.__name = name # permanent name
1583 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1587 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1584 createmode=createmode)
1588 createmode=createmode)
1585 self._fp = posixfile(self._tempname, mode)
1589 self._fp = posixfile(self._tempname, mode)
1586 self._checkambig = checkambig
1590 self._checkambig = checkambig
1587
1591
1588 # delegated methods
1592 # delegated methods
1589 self.read = self._fp.read
1593 self.read = self._fp.read
1590 self.write = self._fp.write
1594 self.write = self._fp.write
1591 self.seek = self._fp.seek
1595 self.seek = self._fp.seek
1592 self.tell = self._fp.tell
1596 self.tell = self._fp.tell
1593 self.fileno = self._fp.fileno
1597 self.fileno = self._fp.fileno
1594
1598
1595 def close(self):
1599 def close(self):
1596 if not self._fp.closed:
1600 if not self._fp.closed:
1597 self._fp.close()
1601 self._fp.close()
1598 filename = localpath(self.__name)
1602 filename = localpath(self.__name)
1599 oldstat = self._checkambig and filestat(filename)
1603 oldstat = self._checkambig and filestat(filename)
1600 if oldstat and oldstat.stat:
1604 if oldstat and oldstat.stat:
1601 rename(self._tempname, filename)
1605 rename(self._tempname, filename)
1602 newstat = filestat(filename)
1606 newstat = filestat(filename)
1603 if newstat.isambig(oldstat):
1607 if newstat.isambig(oldstat):
1604 # stat of changed file is ambiguous to original one
1608 # stat of changed file is ambiguous to original one
1605 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1609 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1606 os.utime(filename, (advanced, advanced))
1610 os.utime(filename, (advanced, advanced))
1607 else:
1611 else:
1608 rename(self._tempname, filename)
1612 rename(self._tempname, filename)
1609
1613
1610 def discard(self):
1614 def discard(self):
1611 if not self._fp.closed:
1615 if not self._fp.closed:
1612 try:
1616 try:
1613 os.unlink(self._tempname)
1617 os.unlink(self._tempname)
1614 except OSError:
1618 except OSError:
1615 pass
1619 pass
1616 self._fp.close()
1620 self._fp.close()
1617
1621
1618 def __del__(self):
1622 def __del__(self):
1619 if safehasattr(self, '_fp'): # constructor actually did something
1623 if safehasattr(self, '_fp'): # constructor actually did something
1620 self.discard()
1624 self.discard()
1621
1625
1622 def __enter__(self):
1626 def __enter__(self):
1623 return self
1627 return self
1624
1628
1625 def __exit__(self, exctype, excvalue, traceback):
1629 def __exit__(self, exctype, excvalue, traceback):
1626 if exctype is not None:
1630 if exctype is not None:
1627 self.discard()
1631 self.discard()
1628 else:
1632 else:
1629 self.close()
1633 self.close()
1630
1634
1631 def unlinkpath(f, ignoremissing=False):
1635 def unlinkpath(f, ignoremissing=False):
1632 """unlink and remove the directory if it is empty"""
1636 """unlink and remove the directory if it is empty"""
1633 if ignoremissing:
1637 if ignoremissing:
1634 tryunlink(f)
1638 tryunlink(f)
1635 else:
1639 else:
1636 unlink(f)
1640 unlink(f)
1637 # try removing directories that might now be empty
1641 # try removing directories that might now be empty
1638 try:
1642 try:
1639 removedirs(os.path.dirname(f))
1643 removedirs(os.path.dirname(f))
1640 except OSError:
1644 except OSError:
1641 pass
1645 pass
1642
1646
1643 def tryunlink(f):
1647 def tryunlink(f):
1644 """Attempt to remove a file, ignoring ENOENT errors."""
1648 """Attempt to remove a file, ignoring ENOENT errors."""
1645 try:
1649 try:
1646 unlink(f)
1650 unlink(f)
1647 except OSError as e:
1651 except OSError as e:
1648 if e.errno != errno.ENOENT:
1652 if e.errno != errno.ENOENT:
1649 raise
1653 raise
1650
1654
1651 def makedirs(name, mode=None, notindexed=False):
1655 def makedirs(name, mode=None, notindexed=False):
1652 """recursive directory creation with parent mode inheritance
1656 """recursive directory creation with parent mode inheritance
1653
1657
1654 Newly created directories are marked as "not to be indexed by
1658 Newly created directories are marked as "not to be indexed by
1655 the content indexing service", if ``notindexed`` is specified
1659 the content indexing service", if ``notindexed`` is specified
1656 for "write" mode access.
1660 for "write" mode access.
1657 """
1661 """
1658 try:
1662 try:
1659 makedir(name, notindexed)
1663 makedir(name, notindexed)
1660 except OSError as err:
1664 except OSError as err:
1661 if err.errno == errno.EEXIST:
1665 if err.errno == errno.EEXIST:
1662 return
1666 return
1663 if err.errno != errno.ENOENT or not name:
1667 if err.errno != errno.ENOENT or not name:
1664 raise
1668 raise
1665 parent = os.path.dirname(os.path.abspath(name))
1669 parent = os.path.dirname(os.path.abspath(name))
1666 if parent == name:
1670 if parent == name:
1667 raise
1671 raise
1668 makedirs(parent, mode, notindexed)
1672 makedirs(parent, mode, notindexed)
1669 try:
1673 try:
1670 makedir(name, notindexed)
1674 makedir(name, notindexed)
1671 except OSError as err:
1675 except OSError as err:
1672 # Catch EEXIST to handle races
1676 # Catch EEXIST to handle races
1673 if err.errno == errno.EEXIST:
1677 if err.errno == errno.EEXIST:
1674 return
1678 return
1675 raise
1679 raise
1676 if mode is not None:
1680 if mode is not None:
1677 os.chmod(name, mode)
1681 os.chmod(name, mode)
1678
1682
1679 def readfile(path):
1683 def readfile(path):
1680 with open(path, 'rb') as fp:
1684 with open(path, 'rb') as fp:
1681 return fp.read()
1685 return fp.read()
1682
1686
1683 def writefile(path, text):
1687 def writefile(path, text):
1684 with open(path, 'wb') as fp:
1688 with open(path, 'wb') as fp:
1685 fp.write(text)
1689 fp.write(text)
1686
1690
1687 def appendfile(path, text):
1691 def appendfile(path, text):
1688 with open(path, 'ab') as fp:
1692 with open(path, 'ab') as fp:
1689 fp.write(text)
1693 fp.write(text)
1690
1694
1691 class chunkbuffer(object):
1695 class chunkbuffer(object):
1692 """Allow arbitrary sized chunks of data to be efficiently read from an
1696 """Allow arbitrary sized chunks of data to be efficiently read from an
1693 iterator over chunks of arbitrary size."""
1697 iterator over chunks of arbitrary size."""
1694
1698
1695 def __init__(self, in_iter):
1699 def __init__(self, in_iter):
1696 """in_iter is the iterator that's iterating over the input chunks.
1700 """in_iter is the iterator that's iterating over the input chunks.
1697 targetsize is how big a buffer to try to maintain."""
1701 targetsize is how big a buffer to try to maintain."""
1698 def splitbig(chunks):
1702 def splitbig(chunks):
1699 for chunk in chunks:
1703 for chunk in chunks:
1700 if len(chunk) > 2**20:
1704 if len(chunk) > 2**20:
1701 pos = 0
1705 pos = 0
1702 while pos < len(chunk):
1706 while pos < len(chunk):
1703 end = pos + 2 ** 18
1707 end = pos + 2 ** 18
1704 yield chunk[pos:end]
1708 yield chunk[pos:end]
1705 pos = end
1709 pos = end
1706 else:
1710 else:
1707 yield chunk
1711 yield chunk
1708 self.iter = splitbig(in_iter)
1712 self.iter = splitbig(in_iter)
1709 self._queue = collections.deque()
1713 self._queue = collections.deque()
1710 self._chunkoffset = 0
1714 self._chunkoffset = 0
1711
1715
1712 def read(self, l=None):
1716 def read(self, l=None):
1713 """Read L bytes of data from the iterator of chunks of data.
1717 """Read L bytes of data from the iterator of chunks of data.
1714 Returns less than L bytes if the iterator runs dry.
1718 Returns less than L bytes if the iterator runs dry.
1715
1719
1716 If size parameter is omitted, read everything"""
1720 If size parameter is omitted, read everything"""
1717 if l is None:
1721 if l is None:
1718 return ''.join(self.iter)
1722 return ''.join(self.iter)
1719
1723
1720 left = l
1724 left = l
1721 buf = []
1725 buf = []
1722 queue = self._queue
1726 queue = self._queue
1723 while left > 0:
1727 while left > 0:
1724 # refill the queue
1728 # refill the queue
1725 if not queue:
1729 if not queue:
1726 target = 2**18
1730 target = 2**18
1727 for chunk in self.iter:
1731 for chunk in self.iter:
1728 queue.append(chunk)
1732 queue.append(chunk)
1729 target -= len(chunk)
1733 target -= len(chunk)
1730 if target <= 0:
1734 if target <= 0:
1731 break
1735 break
1732 if not queue:
1736 if not queue:
1733 break
1737 break
1734
1738
1735 # The easy way to do this would be to queue.popleft(), modify the
1739 # The easy way to do this would be to queue.popleft(), modify the
1736 # chunk (if necessary), then queue.appendleft(). However, for cases
1740 # chunk (if necessary), then queue.appendleft(). However, for cases
1737 # where we read partial chunk content, this incurs 2 dequeue
1741 # where we read partial chunk content, this incurs 2 dequeue
1738 # mutations and creates a new str for the remaining chunk in the
1742 # mutations and creates a new str for the remaining chunk in the
1739 # queue. Our code below avoids this overhead.
1743 # queue. Our code below avoids this overhead.
1740
1744
1741 chunk = queue[0]
1745 chunk = queue[0]
1742 chunkl = len(chunk)
1746 chunkl = len(chunk)
1743 offset = self._chunkoffset
1747 offset = self._chunkoffset
1744
1748
1745 # Use full chunk.
1749 # Use full chunk.
1746 if offset == 0 and left >= chunkl:
1750 if offset == 0 and left >= chunkl:
1747 left -= chunkl
1751 left -= chunkl
1748 queue.popleft()
1752 queue.popleft()
1749 buf.append(chunk)
1753 buf.append(chunk)
1750 # self._chunkoffset remains at 0.
1754 # self._chunkoffset remains at 0.
1751 continue
1755 continue
1752
1756
1753 chunkremaining = chunkl - offset
1757 chunkremaining = chunkl - offset
1754
1758
1755 # Use all of unconsumed part of chunk.
1759 # Use all of unconsumed part of chunk.
1756 if left >= chunkremaining:
1760 if left >= chunkremaining:
1757 left -= chunkremaining
1761 left -= chunkremaining
1758 queue.popleft()
1762 queue.popleft()
1759 # offset == 0 is enabled by block above, so this won't merely
1763 # offset == 0 is enabled by block above, so this won't merely
1760 # copy via ``chunk[0:]``.
1764 # copy via ``chunk[0:]``.
1761 buf.append(chunk[offset:])
1765 buf.append(chunk[offset:])
1762 self._chunkoffset = 0
1766 self._chunkoffset = 0
1763
1767
1764 # Partial chunk needed.
1768 # Partial chunk needed.
1765 else:
1769 else:
1766 buf.append(chunk[offset:offset + left])
1770 buf.append(chunk[offset:offset + left])
1767 self._chunkoffset += left
1771 self._chunkoffset += left
1768 left -= chunkremaining
1772 left -= chunkremaining
1769
1773
1770 return ''.join(buf)
1774 return ''.join(buf)
1771
1775
1772 def filechunkiter(f, size=131072, limit=None):
1776 def filechunkiter(f, size=131072, limit=None):
1773 """Create a generator that produces the data in the file size
1777 """Create a generator that produces the data in the file size
1774 (default 131072) bytes at a time, up to optional limit (default is
1778 (default 131072) bytes at a time, up to optional limit (default is
1775 to read all data). Chunks may be less than size bytes if the
1779 to read all data). Chunks may be less than size bytes if the
1776 chunk is the last chunk in the file, or the file is a socket or
1780 chunk is the last chunk in the file, or the file is a socket or
1777 some other type of file that sometimes reads less data than is
1781 some other type of file that sometimes reads less data than is
1778 requested."""
1782 requested."""
1779 assert size >= 0
1783 assert size >= 0
1780 assert limit is None or limit >= 0
1784 assert limit is None or limit >= 0
1781 while True:
1785 while True:
1782 if limit is None:
1786 if limit is None:
1783 nbytes = size
1787 nbytes = size
1784 else:
1788 else:
1785 nbytes = min(limit, size)
1789 nbytes = min(limit, size)
1786 s = nbytes and f.read(nbytes)
1790 s = nbytes and f.read(nbytes)
1787 if not s:
1791 if not s:
1788 break
1792 break
1789 if limit:
1793 if limit:
1790 limit -= len(s)
1794 limit -= len(s)
1791 yield s
1795 yield s
1792
1796
1793 def makedate(timestamp=None):
1797 def makedate(timestamp=None):
1794 '''Return a unix timestamp (or the current time) as a (unixtime,
1798 '''Return a unix timestamp (or the current time) as a (unixtime,
1795 offset) tuple based off the local timezone.'''
1799 offset) tuple based off the local timezone.'''
1796 if timestamp is None:
1800 if timestamp is None:
1797 timestamp = time.time()
1801 timestamp = time.time()
1798 if timestamp < 0:
1802 if timestamp < 0:
1799 hint = _("check your clock")
1803 hint = _("check your clock")
1800 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1804 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1801 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1805 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1802 datetime.datetime.fromtimestamp(timestamp))
1806 datetime.datetime.fromtimestamp(timestamp))
1803 tz = delta.days * 86400 + delta.seconds
1807 tz = delta.days * 86400 + delta.seconds
1804 return timestamp, tz
1808 return timestamp, tz
1805
1809
1806 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1810 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1807 """represent a (unixtime, offset) tuple as a localized time.
1811 """represent a (unixtime, offset) tuple as a localized time.
1808 unixtime is seconds since the epoch, and offset is the time zone's
1812 unixtime is seconds since the epoch, and offset is the time zone's
1809 number of seconds away from UTC.
1813 number of seconds away from UTC.
1810
1814
1811 >>> datestr((0, 0))
1815 >>> datestr((0, 0))
1812 'Thu Jan 01 00:00:00 1970 +0000'
1816 'Thu Jan 01 00:00:00 1970 +0000'
1813 >>> datestr((42, 0))
1817 >>> datestr((42, 0))
1814 'Thu Jan 01 00:00:42 1970 +0000'
1818 'Thu Jan 01 00:00:42 1970 +0000'
1815 >>> datestr((-42, 0))
1819 >>> datestr((-42, 0))
1816 'Wed Dec 31 23:59:18 1969 +0000'
1820 'Wed Dec 31 23:59:18 1969 +0000'
1817 >>> datestr((0x7fffffff, 0))
1821 >>> datestr((0x7fffffff, 0))
1818 'Tue Jan 19 03:14:07 2038 +0000'
1822 'Tue Jan 19 03:14:07 2038 +0000'
1819 >>> datestr((-0x80000000, 0))
1823 >>> datestr((-0x80000000, 0))
1820 'Fri Dec 13 20:45:52 1901 +0000'
1824 'Fri Dec 13 20:45:52 1901 +0000'
1821 """
1825 """
1822 t, tz = date or makedate()
1826 t, tz = date or makedate()
1823 if "%1" in format or "%2" in format or "%z" in format:
1827 if "%1" in format or "%2" in format or "%z" in format:
1824 sign = (tz > 0) and "-" or "+"
1828 sign = (tz > 0) and "-" or "+"
1825 minutes = abs(tz) // 60
1829 minutes = abs(tz) // 60
1826 q, r = divmod(minutes, 60)
1830 q, r = divmod(minutes, 60)
1827 format = format.replace("%z", "%1%2")
1831 format = format.replace("%z", "%1%2")
1828 format = format.replace("%1", "%c%02d" % (sign, q))
1832 format = format.replace("%1", "%c%02d" % (sign, q))
1829 format = format.replace("%2", "%02d" % r)
1833 format = format.replace("%2", "%02d" % r)
1830 d = t - tz
1834 d = t - tz
1831 if d > 0x7fffffff:
1835 if d > 0x7fffffff:
1832 d = 0x7fffffff
1836 d = 0x7fffffff
1833 elif d < -0x80000000:
1837 elif d < -0x80000000:
1834 d = -0x80000000
1838 d = -0x80000000
1835 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1839 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1836 # because they use the gmtime() system call which is buggy on Windows
1840 # because they use the gmtime() system call which is buggy on Windows
1837 # for negative values.
1841 # for negative values.
1838 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1842 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1839 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1843 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1840 return s
1844 return s
1841
1845
1842 def shortdate(date=None):
1846 def shortdate(date=None):
1843 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1847 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1844 return datestr(date, format='%Y-%m-%d')
1848 return datestr(date, format='%Y-%m-%d')
1845
1849
1846 def parsetimezone(s):
1850 def parsetimezone(s):
1847 """find a trailing timezone, if any, in string, and return a
1851 """find a trailing timezone, if any, in string, and return a
1848 (offset, remainder) pair"""
1852 (offset, remainder) pair"""
1849
1853
1850 if s.endswith("GMT") or s.endswith("UTC"):
1854 if s.endswith("GMT") or s.endswith("UTC"):
1851 return 0, s[:-3].rstrip()
1855 return 0, s[:-3].rstrip()
1852
1856
1853 # Unix-style timezones [+-]hhmm
1857 # Unix-style timezones [+-]hhmm
1854 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1858 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1855 sign = (s[-5] == "+") and 1 or -1
1859 sign = (s[-5] == "+") and 1 or -1
1856 hours = int(s[-4:-2])
1860 hours = int(s[-4:-2])
1857 minutes = int(s[-2:])
1861 minutes = int(s[-2:])
1858 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1862 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1859
1863
1860 # ISO8601 trailing Z
1864 # ISO8601 trailing Z
1861 if s.endswith("Z") and s[-2:-1].isdigit():
1865 if s.endswith("Z") and s[-2:-1].isdigit():
1862 return 0, s[:-1]
1866 return 0, s[:-1]
1863
1867
1864 # ISO8601-style [+-]hh:mm
1868 # ISO8601-style [+-]hh:mm
1865 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1869 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1866 s[-5:-3].isdigit() and s[-2:].isdigit()):
1870 s[-5:-3].isdigit() and s[-2:].isdigit()):
1867 sign = (s[-6] == "+") and 1 or -1
1871 sign = (s[-6] == "+") and 1 or -1
1868 hours = int(s[-5:-3])
1872 hours = int(s[-5:-3])
1869 minutes = int(s[-2:])
1873 minutes = int(s[-2:])
1870 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1874 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1871
1875
1872 return None, s
1876 return None, s
1873
1877
1874 def strdate(string, format, defaults=None):
1878 def strdate(string, format, defaults=None):
1875 """parse a localized time string and return a (unixtime, offset) tuple.
1879 """parse a localized time string and return a (unixtime, offset) tuple.
1876 if the string cannot be parsed, ValueError is raised."""
1880 if the string cannot be parsed, ValueError is raised."""
1877 if defaults is None:
1881 if defaults is None:
1878 defaults = {}
1882 defaults = {}
1879
1883
1880 # NOTE: unixtime = localunixtime + offset
1884 # NOTE: unixtime = localunixtime + offset
1881 offset, date = parsetimezone(string)
1885 offset, date = parsetimezone(string)
1882
1886
1883 # add missing elements from defaults
1887 # add missing elements from defaults
1884 usenow = False # default to using biased defaults
1888 usenow = False # default to using biased defaults
1885 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1889 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1886 found = [True for p in part if ("%"+p) in format]
1890 found = [True for p in part if ("%"+p) in format]
1887 if not found:
1891 if not found:
1888 date += "@" + defaults[part][usenow]
1892 date += "@" + defaults[part][usenow]
1889 format += "@%" + part[0]
1893 format += "@%" + part[0]
1890 else:
1894 else:
1891 # We've found a specific time element, less specific time
1895 # We've found a specific time element, less specific time
1892 # elements are relative to today
1896 # elements are relative to today
1893 usenow = True
1897 usenow = True
1894
1898
1895 timetuple = time.strptime(date, format)
1899 timetuple = time.strptime(date, format)
1896 localunixtime = int(calendar.timegm(timetuple))
1900 localunixtime = int(calendar.timegm(timetuple))
1897 if offset is None:
1901 if offset is None:
1898 # local timezone
1902 # local timezone
1899 unixtime = int(time.mktime(timetuple))
1903 unixtime = int(time.mktime(timetuple))
1900 offset = unixtime - localunixtime
1904 offset = unixtime - localunixtime
1901 else:
1905 else:
1902 unixtime = localunixtime + offset
1906 unixtime = localunixtime + offset
1903 return unixtime, offset
1907 return unixtime, offset
1904
1908
1905 def parsedate(date, formats=None, bias=None):
1909 def parsedate(date, formats=None, bias=None):
1906 """parse a localized date/time and return a (unixtime, offset) tuple.
1910 """parse a localized date/time and return a (unixtime, offset) tuple.
1907
1911
1908 The date may be a "unixtime offset" string or in one of the specified
1912 The date may be a "unixtime offset" string or in one of the specified
1909 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1913 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1910
1914
1911 >>> parsedate(' today ') == parsedate(\
1915 >>> parsedate(' today ') == parsedate(\
1912 datetime.date.today().strftime('%b %d'))
1916 datetime.date.today().strftime('%b %d'))
1913 True
1917 True
1914 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1918 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1915 datetime.timedelta(days=1)\
1919 datetime.timedelta(days=1)\
1916 ).strftime('%b %d'))
1920 ).strftime('%b %d'))
1917 True
1921 True
1918 >>> now, tz = makedate()
1922 >>> now, tz = makedate()
1919 >>> strnow, strtz = parsedate('now')
1923 >>> strnow, strtz = parsedate('now')
1920 >>> (strnow - now) < 1
1924 >>> (strnow - now) < 1
1921 True
1925 True
1922 >>> tz == strtz
1926 >>> tz == strtz
1923 True
1927 True
1924 """
1928 """
1925 if bias is None:
1929 if bias is None:
1926 bias = {}
1930 bias = {}
1927 if not date:
1931 if not date:
1928 return 0, 0
1932 return 0, 0
1929 if isinstance(date, tuple) and len(date) == 2:
1933 if isinstance(date, tuple) and len(date) == 2:
1930 return date
1934 return date
1931 if not formats:
1935 if not formats:
1932 formats = defaultdateformats
1936 formats = defaultdateformats
1933 date = date.strip()
1937 date = date.strip()
1934
1938
1935 if date == 'now' or date == _('now'):
1939 if date == 'now' or date == _('now'):
1936 return makedate()
1940 return makedate()
1937 if date == 'today' or date == _('today'):
1941 if date == 'today' or date == _('today'):
1938 date = datetime.date.today().strftime('%b %d')
1942 date = datetime.date.today().strftime('%b %d')
1939 elif date == 'yesterday' or date == _('yesterday'):
1943 elif date == 'yesterday' or date == _('yesterday'):
1940 date = (datetime.date.today() -
1944 date = (datetime.date.today() -
1941 datetime.timedelta(days=1)).strftime('%b %d')
1945 datetime.timedelta(days=1)).strftime('%b %d')
1942
1946
1943 try:
1947 try:
1944 when, offset = map(int, date.split(' '))
1948 when, offset = map(int, date.split(' '))
1945 except ValueError:
1949 except ValueError:
1946 # fill out defaults
1950 # fill out defaults
1947 now = makedate()
1951 now = makedate()
1948 defaults = {}
1952 defaults = {}
1949 for part in ("d", "mb", "yY", "HI", "M", "S"):
1953 for part in ("d", "mb", "yY", "HI", "M", "S"):
1950 # this piece is for rounding the specific end of unknowns
1954 # this piece is for rounding the specific end of unknowns
1951 b = bias.get(part)
1955 b = bias.get(part)
1952 if b is None:
1956 if b is None:
1953 if part[0] in "HMS":
1957 if part[0] in "HMS":
1954 b = "00"
1958 b = "00"
1955 else:
1959 else:
1956 b = "0"
1960 b = "0"
1957
1961
1958 # this piece is for matching the generic end to today's date
1962 # this piece is for matching the generic end to today's date
1959 n = datestr(now, "%" + part[0])
1963 n = datestr(now, "%" + part[0])
1960
1964
1961 defaults[part] = (b, n)
1965 defaults[part] = (b, n)
1962
1966
1963 for format in formats:
1967 for format in formats:
1964 try:
1968 try:
1965 when, offset = strdate(date, format, defaults)
1969 when, offset = strdate(date, format, defaults)
1966 except (ValueError, OverflowError):
1970 except (ValueError, OverflowError):
1967 pass
1971 pass
1968 else:
1972 else:
1969 break
1973 break
1970 else:
1974 else:
1971 raise Abort(_('invalid date: %r') % date)
1975 raise Abort(_('invalid date: %r') % date)
1972 # validate explicit (probably user-specified) date and
1976 # validate explicit (probably user-specified) date and
1973 # time zone offset. values must fit in signed 32 bits for
1977 # time zone offset. values must fit in signed 32 bits for
1974 # current 32-bit linux runtimes. timezones go from UTC-12
1978 # current 32-bit linux runtimes. timezones go from UTC-12
1975 # to UTC+14
1979 # to UTC+14
1976 if when < -0x80000000 or when > 0x7fffffff:
1980 if when < -0x80000000 or when > 0x7fffffff:
1977 raise Abort(_('date exceeds 32 bits: %d') % when)
1981 raise Abort(_('date exceeds 32 bits: %d') % when)
1978 if offset < -50400 or offset > 43200:
1982 if offset < -50400 or offset > 43200:
1979 raise Abort(_('impossible time zone offset: %d') % offset)
1983 raise Abort(_('impossible time zone offset: %d') % offset)
1980 return when, offset
1984 return when, offset
1981
1985
1982 def matchdate(date):
1986 def matchdate(date):
1983 """Return a function that matches a given date match specifier
1987 """Return a function that matches a given date match specifier
1984
1988
1985 Formats include:
1989 Formats include:
1986
1990
1987 '{date}' match a given date to the accuracy provided
1991 '{date}' match a given date to the accuracy provided
1988
1992
1989 '<{date}' on or before a given date
1993 '<{date}' on or before a given date
1990
1994
1991 '>{date}' on or after a given date
1995 '>{date}' on or after a given date
1992
1996
1993 >>> p1 = parsedate("10:29:59")
1997 >>> p1 = parsedate("10:29:59")
1994 >>> p2 = parsedate("10:30:00")
1998 >>> p2 = parsedate("10:30:00")
1995 >>> p3 = parsedate("10:30:59")
1999 >>> p3 = parsedate("10:30:59")
1996 >>> p4 = parsedate("10:31:00")
2000 >>> p4 = parsedate("10:31:00")
1997 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2001 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1998 >>> f = matchdate("10:30")
2002 >>> f = matchdate("10:30")
1999 >>> f(p1[0])
2003 >>> f(p1[0])
2000 False
2004 False
2001 >>> f(p2[0])
2005 >>> f(p2[0])
2002 True
2006 True
2003 >>> f(p3[0])
2007 >>> f(p3[0])
2004 True
2008 True
2005 >>> f(p4[0])
2009 >>> f(p4[0])
2006 False
2010 False
2007 >>> f(p5[0])
2011 >>> f(p5[0])
2008 False
2012 False
2009 """
2013 """
2010
2014
2011 def lower(date):
2015 def lower(date):
2012 d = {'mb': "1", 'd': "1"}
2016 d = {'mb': "1", 'd': "1"}
2013 return parsedate(date, extendeddateformats, d)[0]
2017 return parsedate(date, extendeddateformats, d)[0]
2014
2018
2015 def upper(date):
2019 def upper(date):
2016 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2020 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2017 for days in ("31", "30", "29"):
2021 for days in ("31", "30", "29"):
2018 try:
2022 try:
2019 d["d"] = days
2023 d["d"] = days
2020 return parsedate(date, extendeddateformats, d)[0]
2024 return parsedate(date, extendeddateformats, d)[0]
2021 except Abort:
2025 except Abort:
2022 pass
2026 pass
2023 d["d"] = "28"
2027 d["d"] = "28"
2024 return parsedate(date, extendeddateformats, d)[0]
2028 return parsedate(date, extendeddateformats, d)[0]
2025
2029
2026 date = date.strip()
2030 date = date.strip()
2027
2031
2028 if not date:
2032 if not date:
2029 raise Abort(_("dates cannot consist entirely of whitespace"))
2033 raise Abort(_("dates cannot consist entirely of whitespace"))
2030 elif date[0] == "<":
2034 elif date[0] == "<":
2031 if not date[1:]:
2035 if not date[1:]:
2032 raise Abort(_("invalid day spec, use '<DATE'"))
2036 raise Abort(_("invalid day spec, use '<DATE'"))
2033 when = upper(date[1:])
2037 when = upper(date[1:])
2034 return lambda x: x <= when
2038 return lambda x: x <= when
2035 elif date[0] == ">":
2039 elif date[0] == ">":
2036 if not date[1:]:
2040 if not date[1:]:
2037 raise Abort(_("invalid day spec, use '>DATE'"))
2041 raise Abort(_("invalid day spec, use '>DATE'"))
2038 when = lower(date[1:])
2042 when = lower(date[1:])
2039 return lambda x: x >= when
2043 return lambda x: x >= when
2040 elif date[0] == "-":
2044 elif date[0] == "-":
2041 try:
2045 try:
2042 days = int(date[1:])
2046 days = int(date[1:])
2043 except ValueError:
2047 except ValueError:
2044 raise Abort(_("invalid day spec: %s") % date[1:])
2048 raise Abort(_("invalid day spec: %s") % date[1:])
2045 if days < 0:
2049 if days < 0:
2046 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2050 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2047 % date[1:])
2051 % date[1:])
2048 when = makedate()[0] - days * 3600 * 24
2052 when = makedate()[0] - days * 3600 * 24
2049 return lambda x: x >= when
2053 return lambda x: x >= when
2050 elif " to " in date:
2054 elif " to " in date:
2051 a, b = date.split(" to ")
2055 a, b = date.split(" to ")
2052 start, stop = lower(a), upper(b)
2056 start, stop = lower(a), upper(b)
2053 return lambda x: x >= start and x <= stop
2057 return lambda x: x >= start and x <= stop
2054 else:
2058 else:
2055 start, stop = lower(date), upper(date)
2059 start, stop = lower(date), upper(date)
2056 return lambda x: x >= start and x <= stop
2060 return lambda x: x >= start and x <= stop
2057
2061
2058 def stringmatcher(pattern, casesensitive=True):
2062 def stringmatcher(pattern, casesensitive=True):
2059 """
2063 """
2060 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2064 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2061 returns the matcher name, pattern, and matcher function.
2065 returns the matcher name, pattern, and matcher function.
2062 missing or unknown prefixes are treated as literal matches.
2066 missing or unknown prefixes are treated as literal matches.
2063
2067
2064 helper for tests:
2068 helper for tests:
2065 >>> def test(pattern, *tests):
2069 >>> def test(pattern, *tests):
2066 ... kind, pattern, matcher = stringmatcher(pattern)
2070 ... kind, pattern, matcher = stringmatcher(pattern)
2067 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2071 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2068 >>> def itest(pattern, *tests):
2072 >>> def itest(pattern, *tests):
2069 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2073 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2070 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2074 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2071
2075
2072 exact matching (no prefix):
2076 exact matching (no prefix):
2073 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2077 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2074 ('literal', 'abcdefg', [False, False, True])
2078 ('literal', 'abcdefg', [False, False, True])
2075
2079
2076 regex matching ('re:' prefix)
2080 regex matching ('re:' prefix)
2077 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2081 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2078 ('re', 'a.+b', [False, False, True])
2082 ('re', 'a.+b', [False, False, True])
2079
2083
2080 force exact matches ('literal:' prefix)
2084 force exact matches ('literal:' prefix)
2081 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2085 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2082 ('literal', 're:foobar', [False, True])
2086 ('literal', 're:foobar', [False, True])
2083
2087
2084 unknown prefixes are ignored and treated as literals
2088 unknown prefixes are ignored and treated as literals
2085 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2089 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2086 ('literal', 'foo:bar', [False, False, True])
2090 ('literal', 'foo:bar', [False, False, True])
2087
2091
2088 case insensitive regex matches
2092 case insensitive regex matches
2089 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2093 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2090 ('re', 'A.+b', [False, False, True])
2094 ('re', 'A.+b', [False, False, True])
2091
2095
2092 case insensitive literal matches
2096 case insensitive literal matches
2093 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2097 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2094 ('literal', 'ABCDEFG', [False, False, True])
2098 ('literal', 'ABCDEFG', [False, False, True])
2095 """
2099 """
2096 if pattern.startswith('re:'):
2100 if pattern.startswith('re:'):
2097 pattern = pattern[3:]
2101 pattern = pattern[3:]
2098 try:
2102 try:
2099 flags = 0
2103 flags = 0
2100 if not casesensitive:
2104 if not casesensitive:
2101 flags = remod.I
2105 flags = remod.I
2102 regex = remod.compile(pattern, flags)
2106 regex = remod.compile(pattern, flags)
2103 except remod.error as e:
2107 except remod.error as e:
2104 raise error.ParseError(_('invalid regular expression: %s')
2108 raise error.ParseError(_('invalid regular expression: %s')
2105 % e)
2109 % e)
2106 return 're', pattern, regex.search
2110 return 're', pattern, regex.search
2107 elif pattern.startswith('literal:'):
2111 elif pattern.startswith('literal:'):
2108 pattern = pattern[8:]
2112 pattern = pattern[8:]
2109
2113
2110 match = pattern.__eq__
2114 match = pattern.__eq__
2111
2115
2112 if not casesensitive:
2116 if not casesensitive:
2113 ipat = encoding.lower(pattern)
2117 ipat = encoding.lower(pattern)
2114 match = lambda s: ipat == encoding.lower(s)
2118 match = lambda s: ipat == encoding.lower(s)
2115 return 'literal', pattern, match
2119 return 'literal', pattern, match
2116
2120
2117 def shortuser(user):
2121 def shortuser(user):
2118 """Return a short representation of a user name or email address."""
2122 """Return a short representation of a user name or email address."""
2119 f = user.find('@')
2123 f = user.find('@')
2120 if f >= 0:
2124 if f >= 0:
2121 user = user[:f]
2125 user = user[:f]
2122 f = user.find('<')
2126 f = user.find('<')
2123 if f >= 0:
2127 if f >= 0:
2124 user = user[f + 1:]
2128 user = user[f + 1:]
2125 f = user.find(' ')
2129 f = user.find(' ')
2126 if f >= 0:
2130 if f >= 0:
2127 user = user[:f]
2131 user = user[:f]
2128 f = user.find('.')
2132 f = user.find('.')
2129 if f >= 0:
2133 if f >= 0:
2130 user = user[:f]
2134 user = user[:f]
2131 return user
2135 return user
2132
2136
2133 def emailuser(user):
2137 def emailuser(user):
2134 """Return the user portion of an email address."""
2138 """Return the user portion of an email address."""
2135 f = user.find('@')
2139 f = user.find('@')
2136 if f >= 0:
2140 if f >= 0:
2137 user = user[:f]
2141 user = user[:f]
2138 f = user.find('<')
2142 f = user.find('<')
2139 if f >= 0:
2143 if f >= 0:
2140 user = user[f + 1:]
2144 user = user[f + 1:]
2141 return user
2145 return user
2142
2146
2143 def email(author):
2147 def email(author):
2144 '''get email of author.'''
2148 '''get email of author.'''
2145 r = author.find('>')
2149 r = author.find('>')
2146 if r == -1:
2150 if r == -1:
2147 r = None
2151 r = None
2148 return author[author.find('<') + 1:r]
2152 return author[author.find('<') + 1:r]
2149
2153
2150 def ellipsis(text, maxlength=400):
2154 def ellipsis(text, maxlength=400):
2151 """Trim string to at most maxlength (default: 400) columns in display."""
2155 """Trim string to at most maxlength (default: 400) columns in display."""
2152 return encoding.trim(text, maxlength, ellipsis='...')
2156 return encoding.trim(text, maxlength, ellipsis='...')
2153
2157
2154 def unitcountfn(*unittable):
2158 def unitcountfn(*unittable):
2155 '''return a function that renders a readable count of some quantity'''
2159 '''return a function that renders a readable count of some quantity'''
2156
2160
2157 def go(count):
2161 def go(count):
2158 for multiplier, divisor, format in unittable:
2162 for multiplier, divisor, format in unittable:
2159 if count >= divisor * multiplier:
2163 if count >= divisor * multiplier:
2160 return format % (count / float(divisor))
2164 return format % (count / float(divisor))
2161 return unittable[-1][2] % count
2165 return unittable[-1][2] % count
2162
2166
2163 return go
2167 return go
2164
2168
2165 def processlinerange(fromline, toline):
2169 def processlinerange(fromline, toline):
2166 """Check that linerange <fromline>:<toline> makes sense and return a
2170 """Check that linerange <fromline>:<toline> makes sense and return a
2167 0-based range.
2171 0-based range.
2168
2172
2169 >>> processlinerange(10, 20)
2173 >>> processlinerange(10, 20)
2170 (9, 20)
2174 (9, 20)
2171 >>> processlinerange(2, 1)
2175 >>> processlinerange(2, 1)
2172 Traceback (most recent call last):
2176 Traceback (most recent call last):
2173 ...
2177 ...
2174 ParseError: line range must be positive
2178 ParseError: line range must be positive
2175 >>> processlinerange(0, 5)
2179 >>> processlinerange(0, 5)
2176 Traceback (most recent call last):
2180 Traceback (most recent call last):
2177 ...
2181 ...
2178 ParseError: fromline must be strictly positive
2182 ParseError: fromline must be strictly positive
2179 """
2183 """
2180 if toline - fromline < 0:
2184 if toline - fromline < 0:
2181 raise error.ParseError(_("line range must be positive"))
2185 raise error.ParseError(_("line range must be positive"))
2182 if fromline < 1:
2186 if fromline < 1:
2183 raise error.ParseError(_("fromline must be strictly positive"))
2187 raise error.ParseError(_("fromline must be strictly positive"))
2184 return fromline - 1, toline
2188 return fromline - 1, toline
2185
2189
2186 bytecount = unitcountfn(
2190 bytecount = unitcountfn(
2187 (100, 1 << 30, _('%.0f GB')),
2191 (100, 1 << 30, _('%.0f GB')),
2188 (10, 1 << 30, _('%.1f GB')),
2192 (10, 1 << 30, _('%.1f GB')),
2189 (1, 1 << 30, _('%.2f GB')),
2193 (1, 1 << 30, _('%.2f GB')),
2190 (100, 1 << 20, _('%.0f MB')),
2194 (100, 1 << 20, _('%.0f MB')),
2191 (10, 1 << 20, _('%.1f MB')),
2195 (10, 1 << 20, _('%.1f MB')),
2192 (1, 1 << 20, _('%.2f MB')),
2196 (1, 1 << 20, _('%.2f MB')),
2193 (100, 1 << 10, _('%.0f KB')),
2197 (100, 1 << 10, _('%.0f KB')),
2194 (10, 1 << 10, _('%.1f KB')),
2198 (10, 1 << 10, _('%.1f KB')),
2195 (1, 1 << 10, _('%.2f KB')),
2199 (1, 1 << 10, _('%.2f KB')),
2196 (1, 1, _('%.0f bytes')),
2200 (1, 1, _('%.0f bytes')),
2197 )
2201 )
2198
2202
2199 def escapestr(s):
2203 def escapestr(s):
2200 # call underlying function of s.encode('string_escape') directly for
2204 # call underlying function of s.encode('string_escape') directly for
2201 # Python 3 compatibility
2205 # Python 3 compatibility
2202 return codecs.escape_encode(s)[0]
2206 return codecs.escape_encode(s)[0]
2203
2207
2204 def unescapestr(s):
2208 def unescapestr(s):
2205 return codecs.escape_decode(s)[0]
2209 return codecs.escape_decode(s)[0]
2206
2210
2207 def uirepr(s):
2211 def uirepr(s):
2208 # Avoid double backslash in Windows path repr()
2212 # Avoid double backslash in Windows path repr()
2209 return repr(s).replace('\\\\', '\\')
2213 return repr(s).replace('\\\\', '\\')
2210
2214
2211 # delay import of textwrap
2215 # delay import of textwrap
2212 def MBTextWrapper(**kwargs):
2216 def MBTextWrapper(**kwargs):
2213 class tw(textwrap.TextWrapper):
2217 class tw(textwrap.TextWrapper):
2214 """
2218 """
2215 Extend TextWrapper for width-awareness.
2219 Extend TextWrapper for width-awareness.
2216
2220
2217 Neither number of 'bytes' in any encoding nor 'characters' is
2221 Neither number of 'bytes' in any encoding nor 'characters' is
2218 appropriate to calculate terminal columns for specified string.
2222 appropriate to calculate terminal columns for specified string.
2219
2223
2220 Original TextWrapper implementation uses built-in 'len()' directly,
2224 Original TextWrapper implementation uses built-in 'len()' directly,
2221 so overriding is needed to use width information of each characters.
2225 so overriding is needed to use width information of each characters.
2222
2226
2223 In addition, characters classified into 'ambiguous' width are
2227 In addition, characters classified into 'ambiguous' width are
2224 treated as wide in East Asian area, but as narrow in other.
2228 treated as wide in East Asian area, but as narrow in other.
2225
2229
2226 This requires use decision to determine width of such characters.
2230 This requires use decision to determine width of such characters.
2227 """
2231 """
2228 def _cutdown(self, ucstr, space_left):
2232 def _cutdown(self, ucstr, space_left):
2229 l = 0
2233 l = 0
2230 colwidth = encoding.ucolwidth
2234 colwidth = encoding.ucolwidth
2231 for i in xrange(len(ucstr)):
2235 for i in xrange(len(ucstr)):
2232 l += colwidth(ucstr[i])
2236 l += colwidth(ucstr[i])
2233 if space_left < l:
2237 if space_left < l:
2234 return (ucstr[:i], ucstr[i:])
2238 return (ucstr[:i], ucstr[i:])
2235 return ucstr, ''
2239 return ucstr, ''
2236
2240
2237 # overriding of base class
2241 # overriding of base class
2238 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2242 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2239 space_left = max(width - cur_len, 1)
2243 space_left = max(width - cur_len, 1)
2240
2244
2241 if self.break_long_words:
2245 if self.break_long_words:
2242 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2246 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2243 cur_line.append(cut)
2247 cur_line.append(cut)
2244 reversed_chunks[-1] = res
2248 reversed_chunks[-1] = res
2245 elif not cur_line:
2249 elif not cur_line:
2246 cur_line.append(reversed_chunks.pop())
2250 cur_line.append(reversed_chunks.pop())
2247
2251
2248 # this overriding code is imported from TextWrapper of Python 2.6
2252 # this overriding code is imported from TextWrapper of Python 2.6
2249 # to calculate columns of string by 'encoding.ucolwidth()'
2253 # to calculate columns of string by 'encoding.ucolwidth()'
2250 def _wrap_chunks(self, chunks):
2254 def _wrap_chunks(self, chunks):
2251 colwidth = encoding.ucolwidth
2255 colwidth = encoding.ucolwidth
2252
2256
2253 lines = []
2257 lines = []
2254 if self.width <= 0:
2258 if self.width <= 0:
2255 raise ValueError("invalid width %r (must be > 0)" % self.width)
2259 raise ValueError("invalid width %r (must be > 0)" % self.width)
2256
2260
2257 # Arrange in reverse order so items can be efficiently popped
2261 # Arrange in reverse order so items can be efficiently popped
2258 # from a stack of chucks.
2262 # from a stack of chucks.
2259 chunks.reverse()
2263 chunks.reverse()
2260
2264
2261 while chunks:
2265 while chunks:
2262
2266
2263 # Start the list of chunks that will make up the current line.
2267 # Start the list of chunks that will make up the current line.
2264 # cur_len is just the length of all the chunks in cur_line.
2268 # cur_len is just the length of all the chunks in cur_line.
2265 cur_line = []
2269 cur_line = []
2266 cur_len = 0
2270 cur_len = 0
2267
2271
2268 # Figure out which static string will prefix this line.
2272 # Figure out which static string will prefix this line.
2269 if lines:
2273 if lines:
2270 indent = self.subsequent_indent
2274 indent = self.subsequent_indent
2271 else:
2275 else:
2272 indent = self.initial_indent
2276 indent = self.initial_indent
2273
2277
2274 # Maximum width for this line.
2278 # Maximum width for this line.
2275 width = self.width - len(indent)
2279 width = self.width - len(indent)
2276
2280
2277 # First chunk on line is whitespace -- drop it, unless this
2281 # First chunk on line is whitespace -- drop it, unless this
2278 # is the very beginning of the text (i.e. no lines started yet).
2282 # is the very beginning of the text (i.e. no lines started yet).
2279 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2283 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2280 del chunks[-1]
2284 del chunks[-1]
2281
2285
2282 while chunks:
2286 while chunks:
2283 l = colwidth(chunks[-1])
2287 l = colwidth(chunks[-1])
2284
2288
2285 # Can at least squeeze this chunk onto the current line.
2289 # Can at least squeeze this chunk onto the current line.
2286 if cur_len + l <= width:
2290 if cur_len + l <= width:
2287 cur_line.append(chunks.pop())
2291 cur_line.append(chunks.pop())
2288 cur_len += l
2292 cur_len += l
2289
2293
2290 # Nope, this line is full.
2294 # Nope, this line is full.
2291 else:
2295 else:
2292 break
2296 break
2293
2297
2294 # The current line is full, and the next chunk is too big to
2298 # The current line is full, and the next chunk is too big to
2295 # fit on *any* line (not just this one).
2299 # fit on *any* line (not just this one).
2296 if chunks and colwidth(chunks[-1]) > width:
2300 if chunks and colwidth(chunks[-1]) > width:
2297 self._handle_long_word(chunks, cur_line, cur_len, width)
2301 self._handle_long_word(chunks, cur_line, cur_len, width)
2298
2302
2299 # If the last chunk on this line is all whitespace, drop it.
2303 # If the last chunk on this line is all whitespace, drop it.
2300 if (self.drop_whitespace and
2304 if (self.drop_whitespace and
2301 cur_line and cur_line[-1].strip() == ''):
2305 cur_line and cur_line[-1].strip() == ''):
2302 del cur_line[-1]
2306 del cur_line[-1]
2303
2307
2304 # Convert current line back to a string and store it in list
2308 # Convert current line back to a string and store it in list
2305 # of all lines (return value).
2309 # of all lines (return value).
2306 if cur_line:
2310 if cur_line:
2307 lines.append(indent + ''.join(cur_line))
2311 lines.append(indent + ''.join(cur_line))
2308
2312
2309 return lines
2313 return lines
2310
2314
2311 global MBTextWrapper
2315 global MBTextWrapper
2312 MBTextWrapper = tw
2316 MBTextWrapper = tw
2313 return tw(**kwargs)
2317 return tw(**kwargs)
2314
2318
2315 def wrap(line, width, initindent='', hangindent=''):
2319 def wrap(line, width, initindent='', hangindent=''):
2316 maxindent = max(len(hangindent), len(initindent))
2320 maxindent = max(len(hangindent), len(initindent))
2317 if width <= maxindent:
2321 if width <= maxindent:
2318 # adjust for weird terminal size
2322 # adjust for weird terminal size
2319 width = max(78, maxindent + 1)
2323 width = max(78, maxindent + 1)
2320 line = line.decode(pycompat.sysstr(encoding.encoding),
2324 line = line.decode(pycompat.sysstr(encoding.encoding),
2321 pycompat.sysstr(encoding.encodingmode))
2325 pycompat.sysstr(encoding.encodingmode))
2322 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2326 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2323 pycompat.sysstr(encoding.encodingmode))
2327 pycompat.sysstr(encoding.encodingmode))
2324 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2328 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2325 pycompat.sysstr(encoding.encodingmode))
2329 pycompat.sysstr(encoding.encodingmode))
2326 wrapper = MBTextWrapper(width=width,
2330 wrapper = MBTextWrapper(width=width,
2327 initial_indent=initindent,
2331 initial_indent=initindent,
2328 subsequent_indent=hangindent)
2332 subsequent_indent=hangindent)
2329 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2333 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2330
2334
2331 if (pyplatform.python_implementation() == 'CPython' and
2335 if (pyplatform.python_implementation() == 'CPython' and
2332 sys.version_info < (3, 0)):
2336 sys.version_info < (3, 0)):
2333 # There is an issue in CPython that some IO methods do not handle EINTR
2337 # There is an issue in CPython that some IO methods do not handle EINTR
2334 # correctly. The following table shows what CPython version (and functions)
2338 # correctly. The following table shows what CPython version (and functions)
2335 # are affected (buggy: has the EINTR bug, okay: otherwise):
2339 # are affected (buggy: has the EINTR bug, okay: otherwise):
2336 #
2340 #
2337 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2341 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2338 # --------------------------------------------------
2342 # --------------------------------------------------
2339 # fp.__iter__ | buggy | buggy | okay
2343 # fp.__iter__ | buggy | buggy | okay
2340 # fp.read* | buggy | okay [1] | okay
2344 # fp.read* | buggy | okay [1] | okay
2341 #
2345 #
2342 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2346 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2343 #
2347 #
2344 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2348 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2345 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2349 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2346 #
2350 #
2347 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2351 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2348 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2352 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2349 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2353 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2350 # fp.__iter__ but not other fp.read* methods.
2354 # fp.__iter__ but not other fp.read* methods.
2351 #
2355 #
2352 # On modern systems like Linux, the "read" syscall cannot be interrupted
2356 # On modern systems like Linux, the "read" syscall cannot be interrupted
2353 # when reading "fast" files like on-disk files. So the EINTR issue only
2357 # when reading "fast" files like on-disk files. So the EINTR issue only
2354 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2358 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2355 # files approximately as "fast" files and use the fast (unsafe) code path,
2359 # files approximately as "fast" files and use the fast (unsafe) code path,
2356 # to minimize the performance impact.
2360 # to minimize the performance impact.
2357 if sys.version_info >= (2, 7, 4):
2361 if sys.version_info >= (2, 7, 4):
2358 # fp.readline deals with EINTR correctly, use it as a workaround.
2362 # fp.readline deals with EINTR correctly, use it as a workaround.
2359 def _safeiterfile(fp):
2363 def _safeiterfile(fp):
2360 return iter(fp.readline, '')
2364 return iter(fp.readline, '')
2361 else:
2365 else:
2362 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2366 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2363 # note: this may block longer than necessary because of bufsize.
2367 # note: this may block longer than necessary because of bufsize.
2364 def _safeiterfile(fp, bufsize=4096):
2368 def _safeiterfile(fp, bufsize=4096):
2365 fd = fp.fileno()
2369 fd = fp.fileno()
2366 line = ''
2370 line = ''
2367 while True:
2371 while True:
2368 try:
2372 try:
2369 buf = os.read(fd, bufsize)
2373 buf = os.read(fd, bufsize)
2370 except OSError as ex:
2374 except OSError as ex:
2371 # os.read only raises EINTR before any data is read
2375 # os.read only raises EINTR before any data is read
2372 if ex.errno == errno.EINTR:
2376 if ex.errno == errno.EINTR:
2373 continue
2377 continue
2374 else:
2378 else:
2375 raise
2379 raise
2376 line += buf
2380 line += buf
2377 if '\n' in buf:
2381 if '\n' in buf:
2378 splitted = line.splitlines(True)
2382 splitted = line.splitlines(True)
2379 line = ''
2383 line = ''
2380 for l in splitted:
2384 for l in splitted:
2381 if l[-1] == '\n':
2385 if l[-1] == '\n':
2382 yield l
2386 yield l
2383 else:
2387 else:
2384 line = l
2388 line = l
2385 if not buf:
2389 if not buf:
2386 break
2390 break
2387 if line:
2391 if line:
2388 yield line
2392 yield line
2389
2393
2390 def iterfile(fp):
2394 def iterfile(fp):
2391 fastpath = True
2395 fastpath = True
2392 if type(fp) is file:
2396 if type(fp) is file:
2393 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2397 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2394 if fastpath:
2398 if fastpath:
2395 return fp
2399 return fp
2396 else:
2400 else:
2397 return _safeiterfile(fp)
2401 return _safeiterfile(fp)
2398 else:
2402 else:
2399 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2403 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2400 def iterfile(fp):
2404 def iterfile(fp):
2401 return fp
2405 return fp
2402
2406
2403 def iterlines(iterator):
2407 def iterlines(iterator):
2404 for chunk in iterator:
2408 for chunk in iterator:
2405 for line in chunk.splitlines():
2409 for line in chunk.splitlines():
2406 yield line
2410 yield line
2407
2411
2408 def expandpath(path):
2412 def expandpath(path):
2409 return os.path.expanduser(os.path.expandvars(path))
2413 return os.path.expanduser(os.path.expandvars(path))
2410
2414
2411 def hgcmd():
2415 def hgcmd():
2412 """Return the command used to execute current hg
2416 """Return the command used to execute current hg
2413
2417
2414 This is different from hgexecutable() because on Windows we want
2418 This is different from hgexecutable() because on Windows we want
2415 to avoid things opening new shell windows like batch files, so we
2419 to avoid things opening new shell windows like batch files, so we
2416 get either the python call or current executable.
2420 get either the python call or current executable.
2417 """
2421 """
2418 if mainfrozen():
2422 if mainfrozen():
2419 if getattr(sys, 'frozen', None) == 'macosx_app':
2423 if getattr(sys, 'frozen', None) == 'macosx_app':
2420 # Env variable set by py2app
2424 # Env variable set by py2app
2421 return [encoding.environ['EXECUTABLEPATH']]
2425 return [encoding.environ['EXECUTABLEPATH']]
2422 else:
2426 else:
2423 return [pycompat.sysexecutable]
2427 return [pycompat.sysexecutable]
2424 return gethgcmd()
2428 return gethgcmd()
2425
2429
2426 def rundetached(args, condfn):
2430 def rundetached(args, condfn):
2427 """Execute the argument list in a detached process.
2431 """Execute the argument list in a detached process.
2428
2432
2429 condfn is a callable which is called repeatedly and should return
2433 condfn is a callable which is called repeatedly and should return
2430 True once the child process is known to have started successfully.
2434 True once the child process is known to have started successfully.
2431 At this point, the child process PID is returned. If the child
2435 At this point, the child process PID is returned. If the child
2432 process fails to start or finishes before condfn() evaluates to
2436 process fails to start or finishes before condfn() evaluates to
2433 True, return -1.
2437 True, return -1.
2434 """
2438 """
2435 # Windows case is easier because the child process is either
2439 # Windows case is easier because the child process is either
2436 # successfully starting and validating the condition or exiting
2440 # successfully starting and validating the condition or exiting
2437 # on failure. We just poll on its PID. On Unix, if the child
2441 # on failure. We just poll on its PID. On Unix, if the child
2438 # process fails to start, it will be left in a zombie state until
2442 # process fails to start, it will be left in a zombie state until
2439 # the parent wait on it, which we cannot do since we expect a long
2443 # the parent wait on it, which we cannot do since we expect a long
2440 # running process on success. Instead we listen for SIGCHLD telling
2444 # running process on success. Instead we listen for SIGCHLD telling
2441 # us our child process terminated.
2445 # us our child process terminated.
2442 terminated = set()
2446 terminated = set()
2443 def handler(signum, frame):
2447 def handler(signum, frame):
2444 terminated.add(os.wait())
2448 terminated.add(os.wait())
2445 prevhandler = None
2449 prevhandler = None
2446 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2450 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2447 if SIGCHLD is not None:
2451 if SIGCHLD is not None:
2448 prevhandler = signal.signal(SIGCHLD, handler)
2452 prevhandler = signal.signal(SIGCHLD, handler)
2449 try:
2453 try:
2450 pid = spawndetached(args)
2454 pid = spawndetached(args)
2451 while not condfn():
2455 while not condfn():
2452 if ((pid in terminated or not testpid(pid))
2456 if ((pid in terminated or not testpid(pid))
2453 and not condfn()):
2457 and not condfn()):
2454 return -1
2458 return -1
2455 time.sleep(0.1)
2459 time.sleep(0.1)
2456 return pid
2460 return pid
2457 finally:
2461 finally:
2458 if prevhandler is not None:
2462 if prevhandler is not None:
2459 signal.signal(signal.SIGCHLD, prevhandler)
2463 signal.signal(signal.SIGCHLD, prevhandler)
2460
2464
2461 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2465 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2462 """Return the result of interpolating items in the mapping into string s.
2466 """Return the result of interpolating items in the mapping into string s.
2463
2467
2464 prefix is a single character string, or a two character string with
2468 prefix is a single character string, or a two character string with
2465 a backslash as the first character if the prefix needs to be escaped in
2469 a backslash as the first character if the prefix needs to be escaped in
2466 a regular expression.
2470 a regular expression.
2467
2471
2468 fn is an optional function that will be applied to the replacement text
2472 fn is an optional function that will be applied to the replacement text
2469 just before replacement.
2473 just before replacement.
2470
2474
2471 escape_prefix is an optional flag that allows using doubled prefix for
2475 escape_prefix is an optional flag that allows using doubled prefix for
2472 its escaping.
2476 its escaping.
2473 """
2477 """
2474 fn = fn or (lambda s: s)
2478 fn = fn or (lambda s: s)
2475 patterns = '|'.join(mapping.keys())
2479 patterns = '|'.join(mapping.keys())
2476 if escape_prefix:
2480 if escape_prefix:
2477 patterns += '|' + prefix
2481 patterns += '|' + prefix
2478 if len(prefix) > 1:
2482 if len(prefix) > 1:
2479 prefix_char = prefix[1:]
2483 prefix_char = prefix[1:]
2480 else:
2484 else:
2481 prefix_char = prefix
2485 prefix_char = prefix
2482 mapping[prefix_char] = prefix_char
2486 mapping[prefix_char] = prefix_char
2483 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2487 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2484 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2488 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2485
2489
2486 def getport(port):
2490 def getport(port):
2487 """Return the port for a given network service.
2491 """Return the port for a given network service.
2488
2492
2489 If port is an integer, it's returned as is. If it's a string, it's
2493 If port is an integer, it's returned as is. If it's a string, it's
2490 looked up using socket.getservbyname(). If there's no matching
2494 looked up using socket.getservbyname(). If there's no matching
2491 service, error.Abort is raised.
2495 service, error.Abort is raised.
2492 """
2496 """
2493 try:
2497 try:
2494 return int(port)
2498 return int(port)
2495 except ValueError:
2499 except ValueError:
2496 pass
2500 pass
2497
2501
2498 try:
2502 try:
2499 return socket.getservbyname(port)
2503 return socket.getservbyname(port)
2500 except socket.error:
2504 except socket.error:
2501 raise Abort(_("no port number associated with service '%s'") % port)
2505 raise Abort(_("no port number associated with service '%s'") % port)
2502
2506
2503 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2507 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2504 '0': False, 'no': False, 'false': False, 'off': False,
2508 '0': False, 'no': False, 'false': False, 'off': False,
2505 'never': False}
2509 'never': False}
2506
2510
2507 def parsebool(s):
2511 def parsebool(s):
2508 """Parse s into a boolean.
2512 """Parse s into a boolean.
2509
2513
2510 If s is not a valid boolean, returns None.
2514 If s is not a valid boolean, returns None.
2511 """
2515 """
2512 return _booleans.get(s.lower(), None)
2516 return _booleans.get(s.lower(), None)
2513
2517
2514 _hextochr = dict((a + b, chr(int(a + b, 16)))
2518 _hextochr = dict((a + b, chr(int(a + b, 16)))
2515 for a in string.hexdigits for b in string.hexdigits)
2519 for a in string.hexdigits for b in string.hexdigits)
2516
2520
2517 class url(object):
2521 class url(object):
2518 r"""Reliable URL parser.
2522 r"""Reliable URL parser.
2519
2523
2520 This parses URLs and provides attributes for the following
2524 This parses URLs and provides attributes for the following
2521 components:
2525 components:
2522
2526
2523 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2527 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2524
2528
2525 Missing components are set to None. The only exception is
2529 Missing components are set to None. The only exception is
2526 fragment, which is set to '' if present but empty.
2530 fragment, which is set to '' if present but empty.
2527
2531
2528 If parsefragment is False, fragment is included in query. If
2532 If parsefragment is False, fragment is included in query. If
2529 parsequery is False, query is included in path. If both are
2533 parsequery is False, query is included in path. If both are
2530 False, both fragment and query are included in path.
2534 False, both fragment and query are included in path.
2531
2535
2532 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2536 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2533
2537
2534 Note that for backward compatibility reasons, bundle URLs do not
2538 Note that for backward compatibility reasons, bundle URLs do not
2535 take host names. That means 'bundle://../' has a path of '../'.
2539 take host names. That means 'bundle://../' has a path of '../'.
2536
2540
2537 Examples:
2541 Examples:
2538
2542
2539 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2543 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2540 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2544 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2541 >>> url('ssh://[::1]:2200//home/joe/repo')
2545 >>> url('ssh://[::1]:2200//home/joe/repo')
2542 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2546 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2543 >>> url('file:///home/joe/repo')
2547 >>> url('file:///home/joe/repo')
2544 <url scheme: 'file', path: '/home/joe/repo'>
2548 <url scheme: 'file', path: '/home/joe/repo'>
2545 >>> url('file:///c:/temp/foo/')
2549 >>> url('file:///c:/temp/foo/')
2546 <url scheme: 'file', path: 'c:/temp/foo/'>
2550 <url scheme: 'file', path: 'c:/temp/foo/'>
2547 >>> url('bundle:foo')
2551 >>> url('bundle:foo')
2548 <url scheme: 'bundle', path: 'foo'>
2552 <url scheme: 'bundle', path: 'foo'>
2549 >>> url('bundle://../foo')
2553 >>> url('bundle://../foo')
2550 <url scheme: 'bundle', path: '../foo'>
2554 <url scheme: 'bundle', path: '../foo'>
2551 >>> url(r'c:\foo\bar')
2555 >>> url(r'c:\foo\bar')
2552 <url path: 'c:\\foo\\bar'>
2556 <url path: 'c:\\foo\\bar'>
2553 >>> url(r'\\blah\blah\blah')
2557 >>> url(r'\\blah\blah\blah')
2554 <url path: '\\\\blah\\blah\\blah'>
2558 <url path: '\\\\blah\\blah\\blah'>
2555 >>> url(r'\\blah\blah\blah#baz')
2559 >>> url(r'\\blah\blah\blah#baz')
2556 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2560 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2557 >>> url(r'file:///C:\users\me')
2561 >>> url(r'file:///C:\users\me')
2558 <url scheme: 'file', path: 'C:\\users\\me'>
2562 <url scheme: 'file', path: 'C:\\users\\me'>
2559
2563
2560 Authentication credentials:
2564 Authentication credentials:
2561
2565
2562 >>> url('ssh://joe:xyz@x/repo')
2566 >>> url('ssh://joe:xyz@x/repo')
2563 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2567 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2564 >>> url('ssh://joe@x/repo')
2568 >>> url('ssh://joe@x/repo')
2565 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2569 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2566
2570
2567 Query strings and fragments:
2571 Query strings and fragments:
2568
2572
2569 >>> url('http://host/a?b#c')
2573 >>> url('http://host/a?b#c')
2570 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2574 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2571 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2575 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2572 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2576 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2573
2577
2574 Empty path:
2578 Empty path:
2575
2579
2576 >>> url('')
2580 >>> url('')
2577 <url path: ''>
2581 <url path: ''>
2578 >>> url('#a')
2582 >>> url('#a')
2579 <url path: '', fragment: 'a'>
2583 <url path: '', fragment: 'a'>
2580 >>> url('http://host/')
2584 >>> url('http://host/')
2581 <url scheme: 'http', host: 'host', path: ''>
2585 <url scheme: 'http', host: 'host', path: ''>
2582 >>> url('http://host/#a')
2586 >>> url('http://host/#a')
2583 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2587 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2584
2588
2585 Only scheme:
2589 Only scheme:
2586
2590
2587 >>> url('http:')
2591 >>> url('http:')
2588 <url scheme: 'http'>
2592 <url scheme: 'http'>
2589 """
2593 """
2590
2594
2591 _safechars = "!~*'()+"
2595 _safechars = "!~*'()+"
2592 _safepchars = "/!~*'()+:\\"
2596 _safepchars = "/!~*'()+:\\"
2593 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2597 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2594
2598
2595 def __init__(self, path, parsequery=True, parsefragment=True):
2599 def __init__(self, path, parsequery=True, parsefragment=True):
2596 # We slowly chomp away at path until we have only the path left
2600 # We slowly chomp away at path until we have only the path left
2597 self.scheme = self.user = self.passwd = self.host = None
2601 self.scheme = self.user = self.passwd = self.host = None
2598 self.port = self.path = self.query = self.fragment = None
2602 self.port = self.path = self.query = self.fragment = None
2599 self._localpath = True
2603 self._localpath = True
2600 self._hostport = ''
2604 self._hostport = ''
2601 self._origpath = path
2605 self._origpath = path
2602
2606
2603 if parsefragment and '#' in path:
2607 if parsefragment and '#' in path:
2604 path, self.fragment = path.split('#', 1)
2608 path, self.fragment = path.split('#', 1)
2605
2609
2606 # special case for Windows drive letters and UNC paths
2610 # special case for Windows drive letters and UNC paths
2607 if hasdriveletter(path) or path.startswith('\\\\'):
2611 if hasdriveletter(path) or path.startswith('\\\\'):
2608 self.path = path
2612 self.path = path
2609 return
2613 return
2610
2614
2611 # For compatibility reasons, we can't handle bundle paths as
2615 # For compatibility reasons, we can't handle bundle paths as
2612 # normal URLS
2616 # normal URLS
2613 if path.startswith('bundle:'):
2617 if path.startswith('bundle:'):
2614 self.scheme = 'bundle'
2618 self.scheme = 'bundle'
2615 path = path[7:]
2619 path = path[7:]
2616 if path.startswith('//'):
2620 if path.startswith('//'):
2617 path = path[2:]
2621 path = path[2:]
2618 self.path = path
2622 self.path = path
2619 return
2623 return
2620
2624
2621 if self._matchscheme(path):
2625 if self._matchscheme(path):
2622 parts = path.split(':', 1)
2626 parts = path.split(':', 1)
2623 if parts[0]:
2627 if parts[0]:
2624 self.scheme, path = parts
2628 self.scheme, path = parts
2625 self._localpath = False
2629 self._localpath = False
2626
2630
2627 if not path:
2631 if not path:
2628 path = None
2632 path = None
2629 if self._localpath:
2633 if self._localpath:
2630 self.path = ''
2634 self.path = ''
2631 return
2635 return
2632 else:
2636 else:
2633 if self._localpath:
2637 if self._localpath:
2634 self.path = path
2638 self.path = path
2635 return
2639 return
2636
2640
2637 if parsequery and '?' in path:
2641 if parsequery and '?' in path:
2638 path, self.query = path.split('?', 1)
2642 path, self.query = path.split('?', 1)
2639 if not path:
2643 if not path:
2640 path = None
2644 path = None
2641 if not self.query:
2645 if not self.query:
2642 self.query = None
2646 self.query = None
2643
2647
2644 # // is required to specify a host/authority
2648 # // is required to specify a host/authority
2645 if path and path.startswith('//'):
2649 if path and path.startswith('//'):
2646 parts = path[2:].split('/', 1)
2650 parts = path[2:].split('/', 1)
2647 if len(parts) > 1:
2651 if len(parts) > 1:
2648 self.host, path = parts
2652 self.host, path = parts
2649 else:
2653 else:
2650 self.host = parts[0]
2654 self.host = parts[0]
2651 path = None
2655 path = None
2652 if not self.host:
2656 if not self.host:
2653 self.host = None
2657 self.host = None
2654 # path of file:///d is /d
2658 # path of file:///d is /d
2655 # path of file:///d:/ is d:/, not /d:/
2659 # path of file:///d:/ is d:/, not /d:/
2656 if path and not hasdriveletter(path):
2660 if path and not hasdriveletter(path):
2657 path = '/' + path
2661 path = '/' + path
2658
2662
2659 if self.host and '@' in self.host:
2663 if self.host and '@' in self.host:
2660 self.user, self.host = self.host.rsplit('@', 1)
2664 self.user, self.host = self.host.rsplit('@', 1)
2661 if ':' in self.user:
2665 if ':' in self.user:
2662 self.user, self.passwd = self.user.split(':', 1)
2666 self.user, self.passwd = self.user.split(':', 1)
2663 if not self.host:
2667 if not self.host:
2664 self.host = None
2668 self.host = None
2665
2669
2666 # Don't split on colons in IPv6 addresses without ports
2670 # Don't split on colons in IPv6 addresses without ports
2667 if (self.host and ':' in self.host and
2671 if (self.host and ':' in self.host and
2668 not (self.host.startswith('[') and self.host.endswith(']'))):
2672 not (self.host.startswith('[') and self.host.endswith(']'))):
2669 self._hostport = self.host
2673 self._hostport = self.host
2670 self.host, self.port = self.host.rsplit(':', 1)
2674 self.host, self.port = self.host.rsplit(':', 1)
2671 if not self.host:
2675 if not self.host:
2672 self.host = None
2676 self.host = None
2673
2677
2674 if (self.host and self.scheme == 'file' and
2678 if (self.host and self.scheme == 'file' and
2675 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2679 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2676 raise Abort(_('file:// URLs can only refer to localhost'))
2680 raise Abort(_('file:// URLs can only refer to localhost'))
2677
2681
2678 self.path = path
2682 self.path = path
2679
2683
2680 # leave the query string escaped
2684 # leave the query string escaped
2681 for a in ('user', 'passwd', 'host', 'port',
2685 for a in ('user', 'passwd', 'host', 'port',
2682 'path', 'fragment'):
2686 'path', 'fragment'):
2683 v = getattr(self, a)
2687 v = getattr(self, a)
2684 if v is not None:
2688 if v is not None:
2685 setattr(self, a, urlreq.unquote(v))
2689 setattr(self, a, urlreq.unquote(v))
2686
2690
2687 def __repr__(self):
2691 def __repr__(self):
2688 attrs = []
2692 attrs = []
2689 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2693 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2690 'query', 'fragment'):
2694 'query', 'fragment'):
2691 v = getattr(self, a)
2695 v = getattr(self, a)
2692 if v is not None:
2696 if v is not None:
2693 attrs.append('%s: %r' % (a, v))
2697 attrs.append('%s: %r' % (a, v))
2694 return '<url %s>' % ', '.join(attrs)
2698 return '<url %s>' % ', '.join(attrs)
2695
2699
2696 def __str__(self):
2700 def __str__(self):
2697 r"""Join the URL's components back into a URL string.
2701 r"""Join the URL's components back into a URL string.
2698
2702
2699 Examples:
2703 Examples:
2700
2704
2701 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2705 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2702 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2706 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2703 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2707 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2704 'http://user:pw@host:80/?foo=bar&baz=42'
2708 'http://user:pw@host:80/?foo=bar&baz=42'
2705 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2709 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2706 'http://user:pw@host:80/?foo=bar%3dbaz'
2710 'http://user:pw@host:80/?foo=bar%3dbaz'
2707 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2711 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2708 'ssh://user:pw@[::1]:2200//home/joe#'
2712 'ssh://user:pw@[::1]:2200//home/joe#'
2709 >>> str(url('http://localhost:80//'))
2713 >>> str(url('http://localhost:80//'))
2710 'http://localhost:80//'
2714 'http://localhost:80//'
2711 >>> str(url('http://localhost:80/'))
2715 >>> str(url('http://localhost:80/'))
2712 'http://localhost:80/'
2716 'http://localhost:80/'
2713 >>> str(url('http://localhost:80'))
2717 >>> str(url('http://localhost:80'))
2714 'http://localhost:80/'
2718 'http://localhost:80/'
2715 >>> str(url('bundle:foo'))
2719 >>> str(url('bundle:foo'))
2716 'bundle:foo'
2720 'bundle:foo'
2717 >>> str(url('bundle://../foo'))
2721 >>> str(url('bundle://../foo'))
2718 'bundle:../foo'
2722 'bundle:../foo'
2719 >>> str(url('path'))
2723 >>> str(url('path'))
2720 'path'
2724 'path'
2721 >>> str(url('file:///tmp/foo/bar'))
2725 >>> str(url('file:///tmp/foo/bar'))
2722 'file:///tmp/foo/bar'
2726 'file:///tmp/foo/bar'
2723 >>> str(url('file:///c:/tmp/foo/bar'))
2727 >>> str(url('file:///c:/tmp/foo/bar'))
2724 'file:///c:/tmp/foo/bar'
2728 'file:///c:/tmp/foo/bar'
2725 >>> print url(r'bundle:foo\bar')
2729 >>> print url(r'bundle:foo\bar')
2726 bundle:foo\bar
2730 bundle:foo\bar
2727 >>> print url(r'file:///D:\data\hg')
2731 >>> print url(r'file:///D:\data\hg')
2728 file:///D:\data\hg
2732 file:///D:\data\hg
2729 """
2733 """
2730 return encoding.strfromlocal(self.__bytes__())
2734 return encoding.strfromlocal(self.__bytes__())
2731
2735
2732 def __bytes__(self):
2736 def __bytes__(self):
2733 if self._localpath:
2737 if self._localpath:
2734 s = self.path
2738 s = self.path
2735 if self.scheme == 'bundle':
2739 if self.scheme == 'bundle':
2736 s = 'bundle:' + s
2740 s = 'bundle:' + s
2737 if self.fragment:
2741 if self.fragment:
2738 s += '#' + self.fragment
2742 s += '#' + self.fragment
2739 return s
2743 return s
2740
2744
2741 s = self.scheme + ':'
2745 s = self.scheme + ':'
2742 if self.user or self.passwd or self.host:
2746 if self.user or self.passwd or self.host:
2743 s += '//'
2747 s += '//'
2744 elif self.scheme and (not self.path or self.path.startswith('/')
2748 elif self.scheme and (not self.path or self.path.startswith('/')
2745 or hasdriveletter(self.path)):
2749 or hasdriveletter(self.path)):
2746 s += '//'
2750 s += '//'
2747 if hasdriveletter(self.path):
2751 if hasdriveletter(self.path):
2748 s += '/'
2752 s += '/'
2749 if self.user:
2753 if self.user:
2750 s += urlreq.quote(self.user, safe=self._safechars)
2754 s += urlreq.quote(self.user, safe=self._safechars)
2751 if self.passwd:
2755 if self.passwd:
2752 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2756 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2753 if self.user or self.passwd:
2757 if self.user or self.passwd:
2754 s += '@'
2758 s += '@'
2755 if self.host:
2759 if self.host:
2756 if not (self.host.startswith('[') and self.host.endswith(']')):
2760 if not (self.host.startswith('[') and self.host.endswith(']')):
2757 s += urlreq.quote(self.host)
2761 s += urlreq.quote(self.host)
2758 else:
2762 else:
2759 s += self.host
2763 s += self.host
2760 if self.port:
2764 if self.port:
2761 s += ':' + urlreq.quote(self.port)
2765 s += ':' + urlreq.quote(self.port)
2762 if self.host:
2766 if self.host:
2763 s += '/'
2767 s += '/'
2764 if self.path:
2768 if self.path:
2765 # TODO: similar to the query string, we should not unescape the
2769 # TODO: similar to the query string, we should not unescape the
2766 # path when we store it, the path might contain '%2f' = '/',
2770 # path when we store it, the path might contain '%2f' = '/',
2767 # which we should *not* escape.
2771 # which we should *not* escape.
2768 s += urlreq.quote(self.path, safe=self._safepchars)
2772 s += urlreq.quote(self.path, safe=self._safepchars)
2769 if self.query:
2773 if self.query:
2770 # we store the query in escaped form.
2774 # we store the query in escaped form.
2771 s += '?' + self.query
2775 s += '?' + self.query
2772 if self.fragment is not None:
2776 if self.fragment is not None:
2773 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2777 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2774 return s
2778 return s
2775
2779
2776 def authinfo(self):
2780 def authinfo(self):
2777 user, passwd = self.user, self.passwd
2781 user, passwd = self.user, self.passwd
2778 try:
2782 try:
2779 self.user, self.passwd = None, None
2783 self.user, self.passwd = None, None
2780 s = str(self)
2784 s = str(self)
2781 finally:
2785 finally:
2782 self.user, self.passwd = user, passwd
2786 self.user, self.passwd = user, passwd
2783 if not self.user:
2787 if not self.user:
2784 return (s, None)
2788 return (s, None)
2785 # authinfo[1] is passed to urllib2 password manager, and its
2789 # authinfo[1] is passed to urllib2 password manager, and its
2786 # URIs must not contain credentials. The host is passed in the
2790 # URIs must not contain credentials. The host is passed in the
2787 # URIs list because Python < 2.4.3 uses only that to search for
2791 # URIs list because Python < 2.4.3 uses only that to search for
2788 # a password.
2792 # a password.
2789 return (s, (None, (s, self.host),
2793 return (s, (None, (s, self.host),
2790 self.user, self.passwd or ''))
2794 self.user, self.passwd or ''))
2791
2795
2792 def isabs(self):
2796 def isabs(self):
2793 if self.scheme and self.scheme != 'file':
2797 if self.scheme and self.scheme != 'file':
2794 return True # remote URL
2798 return True # remote URL
2795 if hasdriveletter(self.path):
2799 if hasdriveletter(self.path):
2796 return True # absolute for our purposes - can't be joined()
2800 return True # absolute for our purposes - can't be joined()
2797 if self.path.startswith(r'\\'):
2801 if self.path.startswith(r'\\'):
2798 return True # Windows UNC path
2802 return True # Windows UNC path
2799 if self.path.startswith('/'):
2803 if self.path.startswith('/'):
2800 return True # POSIX-style
2804 return True # POSIX-style
2801 return False
2805 return False
2802
2806
2803 def localpath(self):
2807 def localpath(self):
2804 if self.scheme == 'file' or self.scheme == 'bundle':
2808 if self.scheme == 'file' or self.scheme == 'bundle':
2805 path = self.path or '/'
2809 path = self.path or '/'
2806 # For Windows, we need to promote hosts containing drive
2810 # For Windows, we need to promote hosts containing drive
2807 # letters to paths with drive letters.
2811 # letters to paths with drive letters.
2808 if hasdriveletter(self._hostport):
2812 if hasdriveletter(self._hostport):
2809 path = self._hostport + '/' + self.path
2813 path = self._hostport + '/' + self.path
2810 elif (self.host is not None and self.path
2814 elif (self.host is not None and self.path
2811 and not hasdriveletter(path)):
2815 and not hasdriveletter(path)):
2812 path = '/' + path
2816 path = '/' + path
2813 return path
2817 return path
2814 return self._origpath
2818 return self._origpath
2815
2819
2816 def islocal(self):
2820 def islocal(self):
2817 '''whether localpath will return something that posixfile can open'''
2821 '''whether localpath will return something that posixfile can open'''
2818 return (not self.scheme or self.scheme == 'file'
2822 return (not self.scheme or self.scheme == 'file'
2819 or self.scheme == 'bundle')
2823 or self.scheme == 'bundle')
2820
2824
2821 def hasscheme(path):
2825 def hasscheme(path):
2822 return bool(url(path).scheme)
2826 return bool(url(path).scheme)
2823
2827
2824 def hasdriveletter(path):
2828 def hasdriveletter(path):
2825 return path and path[1:2] == ':' and path[0:1].isalpha()
2829 return path and path[1:2] == ':' and path[0:1].isalpha()
2826
2830
2827 def urllocalpath(path):
2831 def urllocalpath(path):
2828 return url(path, parsequery=False, parsefragment=False).localpath()
2832 return url(path, parsequery=False, parsefragment=False).localpath()
2829
2833
2830 def hidepassword(u):
2834 def hidepassword(u):
2831 '''hide user credential in a url string'''
2835 '''hide user credential in a url string'''
2832 u = url(u)
2836 u = url(u)
2833 if u.passwd:
2837 if u.passwd:
2834 u.passwd = '***'
2838 u.passwd = '***'
2835 return str(u)
2839 return str(u)
2836
2840
2837 def removeauth(u):
2841 def removeauth(u):
2838 '''remove all authentication information from a url string'''
2842 '''remove all authentication information from a url string'''
2839 u = url(u)
2843 u = url(u)
2840 u.user = u.passwd = None
2844 u.user = u.passwd = None
2841 return str(u)
2845 return str(u)
2842
2846
2843 timecount = unitcountfn(
2847 timecount = unitcountfn(
2844 (1, 1e3, _('%.0f s')),
2848 (1, 1e3, _('%.0f s')),
2845 (100, 1, _('%.1f s')),
2849 (100, 1, _('%.1f s')),
2846 (10, 1, _('%.2f s')),
2850 (10, 1, _('%.2f s')),
2847 (1, 1, _('%.3f s')),
2851 (1, 1, _('%.3f s')),
2848 (100, 0.001, _('%.1f ms')),
2852 (100, 0.001, _('%.1f ms')),
2849 (10, 0.001, _('%.2f ms')),
2853 (10, 0.001, _('%.2f ms')),
2850 (1, 0.001, _('%.3f ms')),
2854 (1, 0.001, _('%.3f ms')),
2851 (100, 0.000001, _('%.1f us')),
2855 (100, 0.000001, _('%.1f us')),
2852 (10, 0.000001, _('%.2f us')),
2856 (10, 0.000001, _('%.2f us')),
2853 (1, 0.000001, _('%.3f us')),
2857 (1, 0.000001, _('%.3f us')),
2854 (100, 0.000000001, _('%.1f ns')),
2858 (100, 0.000000001, _('%.1f ns')),
2855 (10, 0.000000001, _('%.2f ns')),
2859 (10, 0.000000001, _('%.2f ns')),
2856 (1, 0.000000001, _('%.3f ns')),
2860 (1, 0.000000001, _('%.3f ns')),
2857 )
2861 )
2858
2862
2859 _timenesting = [0]
2863 _timenesting = [0]
2860
2864
2861 def timed(func):
2865 def timed(func):
2862 '''Report the execution time of a function call to stderr.
2866 '''Report the execution time of a function call to stderr.
2863
2867
2864 During development, use as a decorator when you need to measure
2868 During development, use as a decorator when you need to measure
2865 the cost of a function, e.g. as follows:
2869 the cost of a function, e.g. as follows:
2866
2870
2867 @util.timed
2871 @util.timed
2868 def foo(a, b, c):
2872 def foo(a, b, c):
2869 pass
2873 pass
2870 '''
2874 '''
2871
2875
2872 def wrapper(*args, **kwargs):
2876 def wrapper(*args, **kwargs):
2873 start = timer()
2877 start = timer()
2874 indent = 2
2878 indent = 2
2875 _timenesting[0] += indent
2879 _timenesting[0] += indent
2876 try:
2880 try:
2877 return func(*args, **kwargs)
2881 return func(*args, **kwargs)
2878 finally:
2882 finally:
2879 elapsed = timer() - start
2883 elapsed = timer() - start
2880 _timenesting[0] -= indent
2884 _timenesting[0] -= indent
2881 stderr.write('%s%s: %s\n' %
2885 stderr.write('%s%s: %s\n' %
2882 (' ' * _timenesting[0], func.__name__,
2886 (' ' * _timenesting[0], func.__name__,
2883 timecount(elapsed)))
2887 timecount(elapsed)))
2884 return wrapper
2888 return wrapper
2885
2889
2886 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2890 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2887 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2891 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2888
2892
2889 def sizetoint(s):
2893 def sizetoint(s):
2890 '''Convert a space specifier to a byte count.
2894 '''Convert a space specifier to a byte count.
2891
2895
2892 >>> sizetoint('30')
2896 >>> sizetoint('30')
2893 30
2897 30
2894 >>> sizetoint('2.2kb')
2898 >>> sizetoint('2.2kb')
2895 2252
2899 2252
2896 >>> sizetoint('6M')
2900 >>> sizetoint('6M')
2897 6291456
2901 6291456
2898 '''
2902 '''
2899 t = s.strip().lower()
2903 t = s.strip().lower()
2900 try:
2904 try:
2901 for k, u in _sizeunits:
2905 for k, u in _sizeunits:
2902 if t.endswith(k):
2906 if t.endswith(k):
2903 return int(float(t[:-len(k)]) * u)
2907 return int(float(t[:-len(k)]) * u)
2904 return int(t)
2908 return int(t)
2905 except ValueError:
2909 except ValueError:
2906 raise error.ParseError(_("couldn't parse size: %s") % s)
2910 raise error.ParseError(_("couldn't parse size: %s") % s)
2907
2911
2908 class hooks(object):
2912 class hooks(object):
2909 '''A collection of hook functions that can be used to extend a
2913 '''A collection of hook functions that can be used to extend a
2910 function's behavior. Hooks are called in lexicographic order,
2914 function's behavior. Hooks are called in lexicographic order,
2911 based on the names of their sources.'''
2915 based on the names of their sources.'''
2912
2916
2913 def __init__(self):
2917 def __init__(self):
2914 self._hooks = []
2918 self._hooks = []
2915
2919
2916 def add(self, source, hook):
2920 def add(self, source, hook):
2917 self._hooks.append((source, hook))
2921 self._hooks.append((source, hook))
2918
2922
2919 def __call__(self, *args):
2923 def __call__(self, *args):
2920 self._hooks.sort(key=lambda x: x[0])
2924 self._hooks.sort(key=lambda x: x[0])
2921 results = []
2925 results = []
2922 for source, hook in self._hooks:
2926 for source, hook in self._hooks:
2923 results.append(hook(*args))
2927 results.append(hook(*args))
2924 return results
2928 return results
2925
2929
2926 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2930 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2927 '''Yields lines for a nicely formatted stacktrace.
2931 '''Yields lines for a nicely formatted stacktrace.
2928 Skips the 'skip' last entries, then return the last 'depth' entries.
2932 Skips the 'skip' last entries, then return the last 'depth' entries.
2929 Each file+linenumber is formatted according to fileline.
2933 Each file+linenumber is formatted according to fileline.
2930 Each line is formatted according to line.
2934 Each line is formatted according to line.
2931 If line is None, it yields:
2935 If line is None, it yields:
2932 length of longest filepath+line number,
2936 length of longest filepath+line number,
2933 filepath+linenumber,
2937 filepath+linenumber,
2934 function
2938 function
2935
2939
2936 Not be used in production code but very convenient while developing.
2940 Not be used in production code but very convenient while developing.
2937 '''
2941 '''
2938 entries = [(fileline % (fn, ln), func)
2942 entries = [(fileline % (fn, ln), func)
2939 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2943 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2940 ][-depth:]
2944 ][-depth:]
2941 if entries:
2945 if entries:
2942 fnmax = max(len(entry[0]) for entry in entries)
2946 fnmax = max(len(entry[0]) for entry in entries)
2943 for fnln, func in entries:
2947 for fnln, func in entries:
2944 if line is None:
2948 if line is None:
2945 yield (fnmax, fnln, func)
2949 yield (fnmax, fnln, func)
2946 else:
2950 else:
2947 yield line % (fnmax, fnln, func)
2951 yield line % (fnmax, fnln, func)
2948
2952
2949 def debugstacktrace(msg='stacktrace', skip=0,
2953 def debugstacktrace(msg='stacktrace', skip=0,
2950 f=stderr, otherf=stdout, depth=0):
2954 f=stderr, otherf=stdout, depth=0):
2951 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2955 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2952 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2956 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2953 By default it will flush stdout first.
2957 By default it will flush stdout first.
2954 It can be used everywhere and intentionally does not require an ui object.
2958 It can be used everywhere and intentionally does not require an ui object.
2955 Not be used in production code but very convenient while developing.
2959 Not be used in production code but very convenient while developing.
2956 '''
2960 '''
2957 if otherf:
2961 if otherf:
2958 otherf.flush()
2962 otherf.flush()
2959 f.write('%s at:\n' % msg.rstrip())
2963 f.write('%s at:\n' % msg.rstrip())
2960 for line in getstackframes(skip + 1, depth=depth):
2964 for line in getstackframes(skip + 1, depth=depth):
2961 f.write(line)
2965 f.write(line)
2962 f.flush()
2966 f.flush()
2963
2967
2964 class dirs(object):
2968 class dirs(object):
2965 '''a multiset of directory names from a dirstate or manifest'''
2969 '''a multiset of directory names from a dirstate or manifest'''
2966
2970
2967 def __init__(self, map, skip=None):
2971 def __init__(self, map, skip=None):
2968 self._dirs = {}
2972 self._dirs = {}
2969 addpath = self.addpath
2973 addpath = self.addpath
2970 if safehasattr(map, 'iteritems') and skip is not None:
2974 if safehasattr(map, 'iteritems') and skip is not None:
2971 for f, s in map.iteritems():
2975 for f, s in map.iteritems():
2972 if s[0] != skip:
2976 if s[0] != skip:
2973 addpath(f)
2977 addpath(f)
2974 else:
2978 else:
2975 for f in map:
2979 for f in map:
2976 addpath(f)
2980 addpath(f)
2977
2981
2978 def addpath(self, path):
2982 def addpath(self, path):
2979 dirs = self._dirs
2983 dirs = self._dirs
2980 for base in finddirs(path):
2984 for base in finddirs(path):
2981 if base in dirs:
2985 if base in dirs:
2982 dirs[base] += 1
2986 dirs[base] += 1
2983 return
2987 return
2984 dirs[base] = 1
2988 dirs[base] = 1
2985
2989
2986 def delpath(self, path):
2990 def delpath(self, path):
2987 dirs = self._dirs
2991 dirs = self._dirs
2988 for base in finddirs(path):
2992 for base in finddirs(path):
2989 if dirs[base] > 1:
2993 if dirs[base] > 1:
2990 dirs[base] -= 1
2994 dirs[base] -= 1
2991 return
2995 return
2992 del dirs[base]
2996 del dirs[base]
2993
2997
2994 def __iter__(self):
2998 def __iter__(self):
2995 return iter(self._dirs)
2999 return iter(self._dirs)
2996
3000
2997 def __contains__(self, d):
3001 def __contains__(self, d):
2998 return d in self._dirs
3002 return d in self._dirs
2999
3003
3000 if safehasattr(parsers, 'dirs'):
3004 if safehasattr(parsers, 'dirs'):
3001 dirs = parsers.dirs
3005 dirs = parsers.dirs
3002
3006
3003 def finddirs(path):
3007 def finddirs(path):
3004 pos = path.rfind('/')
3008 pos = path.rfind('/')
3005 while pos != -1:
3009 while pos != -1:
3006 yield path[:pos]
3010 yield path[:pos]
3007 pos = path.rfind('/', 0, pos)
3011 pos = path.rfind('/', 0, pos)
3008
3012
3009 class ctxmanager(object):
3013 class ctxmanager(object):
3010 '''A context manager for use in 'with' blocks to allow multiple
3014 '''A context manager for use in 'with' blocks to allow multiple
3011 contexts to be entered at once. This is both safer and more
3015 contexts to be entered at once. This is both safer and more
3012 flexible than contextlib.nested.
3016 flexible than contextlib.nested.
3013
3017
3014 Once Mercurial supports Python 2.7+, this will become mostly
3018 Once Mercurial supports Python 2.7+, this will become mostly
3015 unnecessary.
3019 unnecessary.
3016 '''
3020 '''
3017
3021
3018 def __init__(self, *args):
3022 def __init__(self, *args):
3019 '''Accepts a list of no-argument functions that return context
3023 '''Accepts a list of no-argument functions that return context
3020 managers. These will be invoked at __call__ time.'''
3024 managers. These will be invoked at __call__ time.'''
3021 self._pending = args
3025 self._pending = args
3022 self._atexit = []
3026 self._atexit = []
3023
3027
3024 def __enter__(self):
3028 def __enter__(self):
3025 return self
3029 return self
3026
3030
3027 def enter(self):
3031 def enter(self):
3028 '''Create and enter context managers in the order in which they were
3032 '''Create and enter context managers in the order in which they were
3029 passed to the constructor.'''
3033 passed to the constructor.'''
3030 values = []
3034 values = []
3031 for func in self._pending:
3035 for func in self._pending:
3032 obj = func()
3036 obj = func()
3033 values.append(obj.__enter__())
3037 values.append(obj.__enter__())
3034 self._atexit.append(obj.__exit__)
3038 self._atexit.append(obj.__exit__)
3035 del self._pending
3039 del self._pending
3036 return values
3040 return values
3037
3041
3038 def atexit(self, func, *args, **kwargs):
3042 def atexit(self, func, *args, **kwargs):
3039 '''Add a function to call when this context manager exits. The
3043 '''Add a function to call when this context manager exits. The
3040 ordering of multiple atexit calls is unspecified, save that
3044 ordering of multiple atexit calls is unspecified, save that
3041 they will happen before any __exit__ functions.'''
3045 they will happen before any __exit__ functions.'''
3042 def wrapper(exc_type, exc_val, exc_tb):
3046 def wrapper(exc_type, exc_val, exc_tb):
3043 func(*args, **kwargs)
3047 func(*args, **kwargs)
3044 self._atexit.append(wrapper)
3048 self._atexit.append(wrapper)
3045 return func
3049 return func
3046
3050
3047 def __exit__(self, exc_type, exc_val, exc_tb):
3051 def __exit__(self, exc_type, exc_val, exc_tb):
3048 '''Context managers are exited in the reverse order from which
3052 '''Context managers are exited in the reverse order from which
3049 they were created.'''
3053 they were created.'''
3050 received = exc_type is not None
3054 received = exc_type is not None
3051 suppressed = False
3055 suppressed = False
3052 pending = None
3056 pending = None
3053 self._atexit.reverse()
3057 self._atexit.reverse()
3054 for exitfunc in self._atexit:
3058 for exitfunc in self._atexit:
3055 try:
3059 try:
3056 if exitfunc(exc_type, exc_val, exc_tb):
3060 if exitfunc(exc_type, exc_val, exc_tb):
3057 suppressed = True
3061 suppressed = True
3058 exc_type = None
3062 exc_type = None
3059 exc_val = None
3063 exc_val = None
3060 exc_tb = None
3064 exc_tb = None
3061 except BaseException:
3065 except BaseException:
3062 pending = sys.exc_info()
3066 pending = sys.exc_info()
3063 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3067 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3064 del self._atexit
3068 del self._atexit
3065 if pending:
3069 if pending:
3066 raise exc_val
3070 raise exc_val
3067 return received and suppressed
3071 return received and suppressed
3068
3072
3069 # compression code
3073 # compression code
3070
3074
3071 SERVERROLE = 'server'
3075 SERVERROLE = 'server'
3072 CLIENTROLE = 'client'
3076 CLIENTROLE = 'client'
3073
3077
3074 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3078 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3075 (u'name', u'serverpriority',
3079 (u'name', u'serverpriority',
3076 u'clientpriority'))
3080 u'clientpriority'))
3077
3081
3078 class compressormanager(object):
3082 class compressormanager(object):
3079 """Holds registrations of various compression engines.
3083 """Holds registrations of various compression engines.
3080
3084
3081 This class essentially abstracts the differences between compression
3085 This class essentially abstracts the differences between compression
3082 engines to allow new compression formats to be added easily, possibly from
3086 engines to allow new compression formats to be added easily, possibly from
3083 extensions.
3087 extensions.
3084
3088
3085 Compressors are registered against the global instance by calling its
3089 Compressors are registered against the global instance by calling its
3086 ``register()`` method.
3090 ``register()`` method.
3087 """
3091 """
3088 def __init__(self):
3092 def __init__(self):
3089 self._engines = {}
3093 self._engines = {}
3090 # Bundle spec human name to engine name.
3094 # Bundle spec human name to engine name.
3091 self._bundlenames = {}
3095 self._bundlenames = {}
3092 # Internal bundle identifier to engine name.
3096 # Internal bundle identifier to engine name.
3093 self._bundletypes = {}
3097 self._bundletypes = {}
3094 # Revlog header to engine name.
3098 # Revlog header to engine name.
3095 self._revlogheaders = {}
3099 self._revlogheaders = {}
3096 # Wire proto identifier to engine name.
3100 # Wire proto identifier to engine name.
3097 self._wiretypes = {}
3101 self._wiretypes = {}
3098
3102
3099 def __getitem__(self, key):
3103 def __getitem__(self, key):
3100 return self._engines[key]
3104 return self._engines[key]
3101
3105
3102 def __contains__(self, key):
3106 def __contains__(self, key):
3103 return key in self._engines
3107 return key in self._engines
3104
3108
3105 def __iter__(self):
3109 def __iter__(self):
3106 return iter(self._engines.keys())
3110 return iter(self._engines.keys())
3107
3111
3108 def register(self, engine):
3112 def register(self, engine):
3109 """Register a compression engine with the manager.
3113 """Register a compression engine with the manager.
3110
3114
3111 The argument must be a ``compressionengine`` instance.
3115 The argument must be a ``compressionengine`` instance.
3112 """
3116 """
3113 if not isinstance(engine, compressionengine):
3117 if not isinstance(engine, compressionengine):
3114 raise ValueError(_('argument must be a compressionengine'))
3118 raise ValueError(_('argument must be a compressionengine'))
3115
3119
3116 name = engine.name()
3120 name = engine.name()
3117
3121
3118 if name in self._engines:
3122 if name in self._engines:
3119 raise error.Abort(_('compression engine %s already registered') %
3123 raise error.Abort(_('compression engine %s already registered') %
3120 name)
3124 name)
3121
3125
3122 bundleinfo = engine.bundletype()
3126 bundleinfo = engine.bundletype()
3123 if bundleinfo:
3127 if bundleinfo:
3124 bundlename, bundletype = bundleinfo
3128 bundlename, bundletype = bundleinfo
3125
3129
3126 if bundlename in self._bundlenames:
3130 if bundlename in self._bundlenames:
3127 raise error.Abort(_('bundle name %s already registered') %
3131 raise error.Abort(_('bundle name %s already registered') %
3128 bundlename)
3132 bundlename)
3129 if bundletype in self._bundletypes:
3133 if bundletype in self._bundletypes:
3130 raise error.Abort(_('bundle type %s already registered by %s') %
3134 raise error.Abort(_('bundle type %s already registered by %s') %
3131 (bundletype, self._bundletypes[bundletype]))
3135 (bundletype, self._bundletypes[bundletype]))
3132
3136
3133 # No external facing name declared.
3137 # No external facing name declared.
3134 if bundlename:
3138 if bundlename:
3135 self._bundlenames[bundlename] = name
3139 self._bundlenames[bundlename] = name
3136
3140
3137 self._bundletypes[bundletype] = name
3141 self._bundletypes[bundletype] = name
3138
3142
3139 wiresupport = engine.wireprotosupport()
3143 wiresupport = engine.wireprotosupport()
3140 if wiresupport:
3144 if wiresupport:
3141 wiretype = wiresupport.name
3145 wiretype = wiresupport.name
3142 if wiretype in self._wiretypes:
3146 if wiretype in self._wiretypes:
3143 raise error.Abort(_('wire protocol compression %s already '
3147 raise error.Abort(_('wire protocol compression %s already '
3144 'registered by %s') %
3148 'registered by %s') %
3145 (wiretype, self._wiretypes[wiretype]))
3149 (wiretype, self._wiretypes[wiretype]))
3146
3150
3147 self._wiretypes[wiretype] = name
3151 self._wiretypes[wiretype] = name
3148
3152
3149 revlogheader = engine.revlogheader()
3153 revlogheader = engine.revlogheader()
3150 if revlogheader and revlogheader in self._revlogheaders:
3154 if revlogheader and revlogheader in self._revlogheaders:
3151 raise error.Abort(_('revlog header %s already registered by %s') %
3155 raise error.Abort(_('revlog header %s already registered by %s') %
3152 (revlogheader, self._revlogheaders[revlogheader]))
3156 (revlogheader, self._revlogheaders[revlogheader]))
3153
3157
3154 if revlogheader:
3158 if revlogheader:
3155 self._revlogheaders[revlogheader] = name
3159 self._revlogheaders[revlogheader] = name
3156
3160
3157 self._engines[name] = engine
3161 self._engines[name] = engine
3158
3162
3159 @property
3163 @property
3160 def supportedbundlenames(self):
3164 def supportedbundlenames(self):
3161 return set(self._bundlenames.keys())
3165 return set(self._bundlenames.keys())
3162
3166
3163 @property
3167 @property
3164 def supportedbundletypes(self):
3168 def supportedbundletypes(self):
3165 return set(self._bundletypes.keys())
3169 return set(self._bundletypes.keys())
3166
3170
3167 def forbundlename(self, bundlename):
3171 def forbundlename(self, bundlename):
3168 """Obtain a compression engine registered to a bundle name.
3172 """Obtain a compression engine registered to a bundle name.
3169
3173
3170 Will raise KeyError if the bundle type isn't registered.
3174 Will raise KeyError if the bundle type isn't registered.
3171
3175
3172 Will abort if the engine is known but not available.
3176 Will abort if the engine is known but not available.
3173 """
3177 """
3174 engine = self._engines[self._bundlenames[bundlename]]
3178 engine = self._engines[self._bundlenames[bundlename]]
3175 if not engine.available():
3179 if not engine.available():
3176 raise error.Abort(_('compression engine %s could not be loaded') %
3180 raise error.Abort(_('compression engine %s could not be loaded') %
3177 engine.name())
3181 engine.name())
3178 return engine
3182 return engine
3179
3183
3180 def forbundletype(self, bundletype):
3184 def forbundletype(self, bundletype):
3181 """Obtain a compression engine registered to a bundle type.
3185 """Obtain a compression engine registered to a bundle type.
3182
3186
3183 Will raise KeyError if the bundle type isn't registered.
3187 Will raise KeyError if the bundle type isn't registered.
3184
3188
3185 Will abort if the engine is known but not available.
3189 Will abort if the engine is known but not available.
3186 """
3190 """
3187 engine = self._engines[self._bundletypes[bundletype]]
3191 engine = self._engines[self._bundletypes[bundletype]]
3188 if not engine.available():
3192 if not engine.available():
3189 raise error.Abort(_('compression engine %s could not be loaded') %
3193 raise error.Abort(_('compression engine %s could not be loaded') %
3190 engine.name())
3194 engine.name())
3191 return engine
3195 return engine
3192
3196
3193 def supportedwireengines(self, role, onlyavailable=True):
3197 def supportedwireengines(self, role, onlyavailable=True):
3194 """Obtain compression engines that support the wire protocol.
3198 """Obtain compression engines that support the wire protocol.
3195
3199
3196 Returns a list of engines in prioritized order, most desired first.
3200 Returns a list of engines in prioritized order, most desired first.
3197
3201
3198 If ``onlyavailable`` is set, filter out engines that can't be
3202 If ``onlyavailable`` is set, filter out engines that can't be
3199 loaded.
3203 loaded.
3200 """
3204 """
3201 assert role in (SERVERROLE, CLIENTROLE)
3205 assert role in (SERVERROLE, CLIENTROLE)
3202
3206
3203 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3207 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3204
3208
3205 engines = [self._engines[e] for e in self._wiretypes.values()]
3209 engines = [self._engines[e] for e in self._wiretypes.values()]
3206 if onlyavailable:
3210 if onlyavailable:
3207 engines = [e for e in engines if e.available()]
3211 engines = [e for e in engines if e.available()]
3208
3212
3209 def getkey(e):
3213 def getkey(e):
3210 # Sort first by priority, highest first. In case of tie, sort
3214 # Sort first by priority, highest first. In case of tie, sort
3211 # alphabetically. This is arbitrary, but ensures output is
3215 # alphabetically. This is arbitrary, but ensures output is
3212 # stable.
3216 # stable.
3213 w = e.wireprotosupport()
3217 w = e.wireprotosupport()
3214 return -1 * getattr(w, attr), w.name
3218 return -1 * getattr(w, attr), w.name
3215
3219
3216 return list(sorted(engines, key=getkey))
3220 return list(sorted(engines, key=getkey))
3217
3221
3218 def forwiretype(self, wiretype):
3222 def forwiretype(self, wiretype):
3219 engine = self._engines[self._wiretypes[wiretype]]
3223 engine = self._engines[self._wiretypes[wiretype]]
3220 if not engine.available():
3224 if not engine.available():
3221 raise error.Abort(_('compression engine %s could not be loaded') %
3225 raise error.Abort(_('compression engine %s could not be loaded') %
3222 engine.name())
3226 engine.name())
3223 return engine
3227 return engine
3224
3228
3225 def forrevlogheader(self, header):
3229 def forrevlogheader(self, header):
3226 """Obtain a compression engine registered to a revlog header.
3230 """Obtain a compression engine registered to a revlog header.
3227
3231
3228 Will raise KeyError if the revlog header value isn't registered.
3232 Will raise KeyError if the revlog header value isn't registered.
3229 """
3233 """
3230 return self._engines[self._revlogheaders[header]]
3234 return self._engines[self._revlogheaders[header]]
3231
3235
3232 compengines = compressormanager()
3236 compengines = compressormanager()
3233
3237
3234 class compressionengine(object):
3238 class compressionengine(object):
3235 """Base class for compression engines.
3239 """Base class for compression engines.
3236
3240
3237 Compression engines must implement the interface defined by this class.
3241 Compression engines must implement the interface defined by this class.
3238 """
3242 """
3239 def name(self):
3243 def name(self):
3240 """Returns the name of the compression engine.
3244 """Returns the name of the compression engine.
3241
3245
3242 This is the key the engine is registered under.
3246 This is the key the engine is registered under.
3243
3247
3244 This method must be implemented.
3248 This method must be implemented.
3245 """
3249 """
3246 raise NotImplementedError()
3250 raise NotImplementedError()
3247
3251
3248 def available(self):
3252 def available(self):
3249 """Whether the compression engine is available.
3253 """Whether the compression engine is available.
3250
3254
3251 The intent of this method is to allow optional compression engines
3255 The intent of this method is to allow optional compression engines
3252 that may not be available in all installations (such as engines relying
3256 that may not be available in all installations (such as engines relying
3253 on C extensions that may not be present).
3257 on C extensions that may not be present).
3254 """
3258 """
3255 return True
3259 return True
3256
3260
3257 def bundletype(self):
3261 def bundletype(self):
3258 """Describes bundle identifiers for this engine.
3262 """Describes bundle identifiers for this engine.
3259
3263
3260 If this compression engine isn't supported for bundles, returns None.
3264 If this compression engine isn't supported for bundles, returns None.
3261
3265
3262 If this engine can be used for bundles, returns a 2-tuple of strings of
3266 If this engine can be used for bundles, returns a 2-tuple of strings of
3263 the user-facing "bundle spec" compression name and an internal
3267 the user-facing "bundle spec" compression name and an internal
3264 identifier used to denote the compression format within bundles. To
3268 identifier used to denote the compression format within bundles. To
3265 exclude the name from external usage, set the first element to ``None``.
3269 exclude the name from external usage, set the first element to ``None``.
3266
3270
3267 If bundle compression is supported, the class must also implement
3271 If bundle compression is supported, the class must also implement
3268 ``compressstream`` and `decompressorreader``.
3272 ``compressstream`` and `decompressorreader``.
3269 """
3273 """
3270 return None
3274 return None
3271
3275
3272 def wireprotosupport(self):
3276 def wireprotosupport(self):
3273 """Declare support for this compression format on the wire protocol.
3277 """Declare support for this compression format on the wire protocol.
3274
3278
3275 If this compression engine isn't supported for compressing wire
3279 If this compression engine isn't supported for compressing wire
3276 protocol payloads, returns None.
3280 protocol payloads, returns None.
3277
3281
3278 Otherwise, returns ``compenginewireprotosupport`` with the following
3282 Otherwise, returns ``compenginewireprotosupport`` with the following
3279 fields:
3283 fields:
3280
3284
3281 * String format identifier
3285 * String format identifier
3282 * Integer priority for the server
3286 * Integer priority for the server
3283 * Integer priority for the client
3287 * Integer priority for the client
3284
3288
3285 The integer priorities are used to order the advertisement of format
3289 The integer priorities are used to order the advertisement of format
3286 support by server and client. The highest integer is advertised
3290 support by server and client. The highest integer is advertised
3287 first. Integers with non-positive values aren't advertised.
3291 first. Integers with non-positive values aren't advertised.
3288
3292
3289 The priority values are somewhat arbitrary and only used for default
3293 The priority values are somewhat arbitrary and only used for default
3290 ordering. The relative order can be changed via config options.
3294 ordering. The relative order can be changed via config options.
3291
3295
3292 If wire protocol compression is supported, the class must also implement
3296 If wire protocol compression is supported, the class must also implement
3293 ``compressstream`` and ``decompressorreader``.
3297 ``compressstream`` and ``decompressorreader``.
3294 """
3298 """
3295 return None
3299 return None
3296
3300
3297 def revlogheader(self):
3301 def revlogheader(self):
3298 """Header added to revlog chunks that identifies this engine.
3302 """Header added to revlog chunks that identifies this engine.
3299
3303
3300 If this engine can be used to compress revlogs, this method should
3304 If this engine can be used to compress revlogs, this method should
3301 return the bytes used to identify chunks compressed with this engine.
3305 return the bytes used to identify chunks compressed with this engine.
3302 Else, the method should return ``None`` to indicate it does not
3306 Else, the method should return ``None`` to indicate it does not
3303 participate in revlog compression.
3307 participate in revlog compression.
3304 """
3308 """
3305 return None
3309 return None
3306
3310
3307 def compressstream(self, it, opts=None):
3311 def compressstream(self, it, opts=None):
3308 """Compress an iterator of chunks.
3312 """Compress an iterator of chunks.
3309
3313
3310 The method receives an iterator (ideally a generator) of chunks of
3314 The method receives an iterator (ideally a generator) of chunks of
3311 bytes to be compressed. It returns an iterator (ideally a generator)
3315 bytes to be compressed. It returns an iterator (ideally a generator)
3312 of bytes of chunks representing the compressed output.
3316 of bytes of chunks representing the compressed output.
3313
3317
3314 Optionally accepts an argument defining how to perform compression.
3318 Optionally accepts an argument defining how to perform compression.
3315 Each engine treats this argument differently.
3319 Each engine treats this argument differently.
3316 """
3320 """
3317 raise NotImplementedError()
3321 raise NotImplementedError()
3318
3322
3319 def decompressorreader(self, fh):
3323 def decompressorreader(self, fh):
3320 """Perform decompression on a file object.
3324 """Perform decompression on a file object.
3321
3325
3322 Argument is an object with a ``read(size)`` method that returns
3326 Argument is an object with a ``read(size)`` method that returns
3323 compressed data. Return value is an object with a ``read(size)`` that
3327 compressed data. Return value is an object with a ``read(size)`` that
3324 returns uncompressed data.
3328 returns uncompressed data.
3325 """
3329 """
3326 raise NotImplementedError()
3330 raise NotImplementedError()
3327
3331
3328 def revlogcompressor(self, opts=None):
3332 def revlogcompressor(self, opts=None):
3329 """Obtain an object that can be used to compress revlog entries.
3333 """Obtain an object that can be used to compress revlog entries.
3330
3334
3331 The object has a ``compress(data)`` method that compresses binary
3335 The object has a ``compress(data)`` method that compresses binary
3332 data. This method returns compressed binary data or ``None`` if
3336 data. This method returns compressed binary data or ``None`` if
3333 the data could not be compressed (too small, not compressible, etc).
3337 the data could not be compressed (too small, not compressible, etc).
3334 The returned data should have a header uniquely identifying this
3338 The returned data should have a header uniquely identifying this
3335 compression format so decompression can be routed to this engine.
3339 compression format so decompression can be routed to this engine.
3336 This header should be identified by the ``revlogheader()`` return
3340 This header should be identified by the ``revlogheader()`` return
3337 value.
3341 value.
3338
3342
3339 The object has a ``decompress(data)`` method that decompresses
3343 The object has a ``decompress(data)`` method that decompresses
3340 data. The method will only be called if ``data`` begins with
3344 data. The method will only be called if ``data`` begins with
3341 ``revlogheader()``. The method should return the raw, uncompressed
3345 ``revlogheader()``. The method should return the raw, uncompressed
3342 data or raise a ``RevlogError``.
3346 data or raise a ``RevlogError``.
3343
3347
3344 The object is reusable but is not thread safe.
3348 The object is reusable but is not thread safe.
3345 """
3349 """
3346 raise NotImplementedError()
3350 raise NotImplementedError()
3347
3351
3348 class _zlibengine(compressionengine):
3352 class _zlibengine(compressionengine):
3349 def name(self):
3353 def name(self):
3350 return 'zlib'
3354 return 'zlib'
3351
3355
3352 def bundletype(self):
3356 def bundletype(self):
3353 return 'gzip', 'GZ'
3357 return 'gzip', 'GZ'
3354
3358
3355 def wireprotosupport(self):
3359 def wireprotosupport(self):
3356 return compewireprotosupport('zlib', 20, 20)
3360 return compewireprotosupport('zlib', 20, 20)
3357
3361
3358 def revlogheader(self):
3362 def revlogheader(self):
3359 return 'x'
3363 return 'x'
3360
3364
3361 def compressstream(self, it, opts=None):
3365 def compressstream(self, it, opts=None):
3362 opts = opts or {}
3366 opts = opts or {}
3363
3367
3364 z = zlib.compressobj(opts.get('level', -1))
3368 z = zlib.compressobj(opts.get('level', -1))
3365 for chunk in it:
3369 for chunk in it:
3366 data = z.compress(chunk)
3370 data = z.compress(chunk)
3367 # Not all calls to compress emit data. It is cheaper to inspect
3371 # Not all calls to compress emit data. It is cheaper to inspect
3368 # here than to feed empty chunks through generator.
3372 # here than to feed empty chunks through generator.
3369 if data:
3373 if data:
3370 yield data
3374 yield data
3371
3375
3372 yield z.flush()
3376 yield z.flush()
3373
3377
3374 def decompressorreader(self, fh):
3378 def decompressorreader(self, fh):
3375 def gen():
3379 def gen():
3376 d = zlib.decompressobj()
3380 d = zlib.decompressobj()
3377 for chunk in filechunkiter(fh):
3381 for chunk in filechunkiter(fh):
3378 while chunk:
3382 while chunk:
3379 # Limit output size to limit memory.
3383 # Limit output size to limit memory.
3380 yield d.decompress(chunk, 2 ** 18)
3384 yield d.decompress(chunk, 2 ** 18)
3381 chunk = d.unconsumed_tail
3385 chunk = d.unconsumed_tail
3382
3386
3383 return chunkbuffer(gen())
3387 return chunkbuffer(gen())
3384
3388
3385 class zlibrevlogcompressor(object):
3389 class zlibrevlogcompressor(object):
3386 def compress(self, data):
3390 def compress(self, data):
3387 insize = len(data)
3391 insize = len(data)
3388 # Caller handles empty input case.
3392 # Caller handles empty input case.
3389 assert insize > 0
3393 assert insize > 0
3390
3394
3391 if insize < 44:
3395 if insize < 44:
3392 return None
3396 return None
3393
3397
3394 elif insize <= 1000000:
3398 elif insize <= 1000000:
3395 compressed = zlib.compress(data)
3399 compressed = zlib.compress(data)
3396 if len(compressed) < insize:
3400 if len(compressed) < insize:
3397 return compressed
3401 return compressed
3398 return None
3402 return None
3399
3403
3400 # zlib makes an internal copy of the input buffer, doubling
3404 # zlib makes an internal copy of the input buffer, doubling
3401 # memory usage for large inputs. So do streaming compression
3405 # memory usage for large inputs. So do streaming compression
3402 # on large inputs.
3406 # on large inputs.
3403 else:
3407 else:
3404 z = zlib.compressobj()
3408 z = zlib.compressobj()
3405 parts = []
3409 parts = []
3406 pos = 0
3410 pos = 0
3407 while pos < insize:
3411 while pos < insize:
3408 pos2 = pos + 2**20
3412 pos2 = pos + 2**20
3409 parts.append(z.compress(data[pos:pos2]))
3413 parts.append(z.compress(data[pos:pos2]))
3410 pos = pos2
3414 pos = pos2
3411 parts.append(z.flush())
3415 parts.append(z.flush())
3412
3416
3413 if sum(map(len, parts)) < insize:
3417 if sum(map(len, parts)) < insize:
3414 return ''.join(parts)
3418 return ''.join(parts)
3415 return None
3419 return None
3416
3420
3417 def decompress(self, data):
3421 def decompress(self, data):
3418 try:
3422 try:
3419 return zlib.decompress(data)
3423 return zlib.decompress(data)
3420 except zlib.error as e:
3424 except zlib.error as e:
3421 raise error.RevlogError(_('revlog decompress error: %s') %
3425 raise error.RevlogError(_('revlog decompress error: %s') %
3422 str(e))
3426 str(e))
3423
3427
3424 def revlogcompressor(self, opts=None):
3428 def revlogcompressor(self, opts=None):
3425 return self.zlibrevlogcompressor()
3429 return self.zlibrevlogcompressor()
3426
3430
3427 compengines.register(_zlibengine())
3431 compengines.register(_zlibengine())
3428
3432
3429 class _bz2engine(compressionengine):
3433 class _bz2engine(compressionengine):
3430 def name(self):
3434 def name(self):
3431 return 'bz2'
3435 return 'bz2'
3432
3436
3433 def bundletype(self):
3437 def bundletype(self):
3434 return 'bzip2', 'BZ'
3438 return 'bzip2', 'BZ'
3435
3439
3436 # We declare a protocol name but don't advertise by default because
3440 # We declare a protocol name but don't advertise by default because
3437 # it is slow.
3441 # it is slow.
3438 def wireprotosupport(self):
3442 def wireprotosupport(self):
3439 return compewireprotosupport('bzip2', 0, 0)
3443 return compewireprotosupport('bzip2', 0, 0)
3440
3444
3441 def compressstream(self, it, opts=None):
3445 def compressstream(self, it, opts=None):
3442 opts = opts or {}
3446 opts = opts or {}
3443 z = bz2.BZ2Compressor(opts.get('level', 9))
3447 z = bz2.BZ2Compressor(opts.get('level', 9))
3444 for chunk in it:
3448 for chunk in it:
3445 data = z.compress(chunk)
3449 data = z.compress(chunk)
3446 if data:
3450 if data:
3447 yield data
3451 yield data
3448
3452
3449 yield z.flush()
3453 yield z.flush()
3450
3454
3451 def decompressorreader(self, fh):
3455 def decompressorreader(self, fh):
3452 def gen():
3456 def gen():
3453 d = bz2.BZ2Decompressor()
3457 d = bz2.BZ2Decompressor()
3454 for chunk in filechunkiter(fh):
3458 for chunk in filechunkiter(fh):
3455 yield d.decompress(chunk)
3459 yield d.decompress(chunk)
3456
3460
3457 return chunkbuffer(gen())
3461 return chunkbuffer(gen())
3458
3462
3459 compengines.register(_bz2engine())
3463 compengines.register(_bz2engine())
3460
3464
3461 class _truncatedbz2engine(compressionengine):
3465 class _truncatedbz2engine(compressionengine):
3462 def name(self):
3466 def name(self):
3463 return 'bz2truncated'
3467 return 'bz2truncated'
3464
3468
3465 def bundletype(self):
3469 def bundletype(self):
3466 return None, '_truncatedBZ'
3470 return None, '_truncatedBZ'
3467
3471
3468 # We don't implement compressstream because it is hackily handled elsewhere.
3472 # We don't implement compressstream because it is hackily handled elsewhere.
3469
3473
3470 def decompressorreader(self, fh):
3474 def decompressorreader(self, fh):
3471 def gen():
3475 def gen():
3472 # The input stream doesn't have the 'BZ' header. So add it back.
3476 # The input stream doesn't have the 'BZ' header. So add it back.
3473 d = bz2.BZ2Decompressor()
3477 d = bz2.BZ2Decompressor()
3474 d.decompress('BZ')
3478 d.decompress('BZ')
3475 for chunk in filechunkiter(fh):
3479 for chunk in filechunkiter(fh):
3476 yield d.decompress(chunk)
3480 yield d.decompress(chunk)
3477
3481
3478 return chunkbuffer(gen())
3482 return chunkbuffer(gen())
3479
3483
3480 compengines.register(_truncatedbz2engine())
3484 compengines.register(_truncatedbz2engine())
3481
3485
3482 class _noopengine(compressionengine):
3486 class _noopengine(compressionengine):
3483 def name(self):
3487 def name(self):
3484 return 'none'
3488 return 'none'
3485
3489
3486 def bundletype(self):
3490 def bundletype(self):
3487 return 'none', 'UN'
3491 return 'none', 'UN'
3488
3492
3489 # Clients always support uncompressed payloads. Servers don't because
3493 # Clients always support uncompressed payloads. Servers don't because
3490 # unless you are on a fast network, uncompressed payloads can easily
3494 # unless you are on a fast network, uncompressed payloads can easily
3491 # saturate your network pipe.
3495 # saturate your network pipe.
3492 def wireprotosupport(self):
3496 def wireprotosupport(self):
3493 return compewireprotosupport('none', 0, 10)
3497 return compewireprotosupport('none', 0, 10)
3494
3498
3495 # We don't implement revlogheader because it is handled specially
3499 # We don't implement revlogheader because it is handled specially
3496 # in the revlog class.
3500 # in the revlog class.
3497
3501
3498 def compressstream(self, it, opts=None):
3502 def compressstream(self, it, opts=None):
3499 return it
3503 return it
3500
3504
3501 def decompressorreader(self, fh):
3505 def decompressorreader(self, fh):
3502 return fh
3506 return fh
3503
3507
3504 class nooprevlogcompressor(object):
3508 class nooprevlogcompressor(object):
3505 def compress(self, data):
3509 def compress(self, data):
3506 return None
3510 return None
3507
3511
3508 def revlogcompressor(self, opts=None):
3512 def revlogcompressor(self, opts=None):
3509 return self.nooprevlogcompressor()
3513 return self.nooprevlogcompressor()
3510
3514
3511 compengines.register(_noopengine())
3515 compengines.register(_noopengine())
3512
3516
3513 class _zstdengine(compressionengine):
3517 class _zstdengine(compressionengine):
3514 def name(self):
3518 def name(self):
3515 return 'zstd'
3519 return 'zstd'
3516
3520
3517 @propertycache
3521 @propertycache
3518 def _module(self):
3522 def _module(self):
3519 # Not all installs have the zstd module available. So defer importing
3523 # Not all installs have the zstd module available. So defer importing
3520 # until first access.
3524 # until first access.
3521 try:
3525 try:
3522 from . import zstd
3526 from . import zstd
3523 # Force delayed import.
3527 # Force delayed import.
3524 zstd.__version__
3528 zstd.__version__
3525 return zstd
3529 return zstd
3526 except ImportError:
3530 except ImportError:
3527 return None
3531 return None
3528
3532
3529 def available(self):
3533 def available(self):
3530 return bool(self._module)
3534 return bool(self._module)
3531
3535
3532 def bundletype(self):
3536 def bundletype(self):
3533 return 'zstd', 'ZS'
3537 return 'zstd', 'ZS'
3534
3538
3535 def wireprotosupport(self):
3539 def wireprotosupport(self):
3536 return compewireprotosupport('zstd', 50, 50)
3540 return compewireprotosupport('zstd', 50, 50)
3537
3541
3538 def revlogheader(self):
3542 def revlogheader(self):
3539 return '\x28'
3543 return '\x28'
3540
3544
3541 def compressstream(self, it, opts=None):
3545 def compressstream(self, it, opts=None):
3542 opts = opts or {}
3546 opts = opts or {}
3543 # zstd level 3 is almost always significantly faster than zlib
3547 # zstd level 3 is almost always significantly faster than zlib
3544 # while providing no worse compression. It strikes a good balance
3548 # while providing no worse compression. It strikes a good balance
3545 # between speed and compression.
3549 # between speed and compression.
3546 level = opts.get('level', 3)
3550 level = opts.get('level', 3)
3547
3551
3548 zstd = self._module
3552 zstd = self._module
3549 z = zstd.ZstdCompressor(level=level).compressobj()
3553 z = zstd.ZstdCompressor(level=level).compressobj()
3550 for chunk in it:
3554 for chunk in it:
3551 data = z.compress(chunk)
3555 data = z.compress(chunk)
3552 if data:
3556 if data:
3553 yield data
3557 yield data
3554
3558
3555 yield z.flush()
3559 yield z.flush()
3556
3560
3557 def decompressorreader(self, fh):
3561 def decompressorreader(self, fh):
3558 zstd = self._module
3562 zstd = self._module
3559 dctx = zstd.ZstdDecompressor()
3563 dctx = zstd.ZstdDecompressor()
3560 return chunkbuffer(dctx.read_from(fh))
3564 return chunkbuffer(dctx.read_from(fh))
3561
3565
3562 class zstdrevlogcompressor(object):
3566 class zstdrevlogcompressor(object):
3563 def __init__(self, zstd, level=3):
3567 def __init__(self, zstd, level=3):
3564 # Writing the content size adds a few bytes to the output. However,
3568 # Writing the content size adds a few bytes to the output. However,
3565 # it allows decompression to be more optimal since we can
3569 # it allows decompression to be more optimal since we can
3566 # pre-allocate a buffer to hold the result.
3570 # pre-allocate a buffer to hold the result.
3567 self._cctx = zstd.ZstdCompressor(level=level,
3571 self._cctx = zstd.ZstdCompressor(level=level,
3568 write_content_size=True)
3572 write_content_size=True)
3569 self._dctx = zstd.ZstdDecompressor()
3573 self._dctx = zstd.ZstdDecompressor()
3570 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3574 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3571 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3575 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3572
3576
3573 def compress(self, data):
3577 def compress(self, data):
3574 insize = len(data)
3578 insize = len(data)
3575 # Caller handles empty input case.
3579 # Caller handles empty input case.
3576 assert insize > 0
3580 assert insize > 0
3577
3581
3578 if insize < 50:
3582 if insize < 50:
3579 return None
3583 return None
3580
3584
3581 elif insize <= 1000000:
3585 elif insize <= 1000000:
3582 compressed = self._cctx.compress(data)
3586 compressed = self._cctx.compress(data)
3583 if len(compressed) < insize:
3587 if len(compressed) < insize:
3584 return compressed
3588 return compressed
3585 return None
3589 return None
3586 else:
3590 else:
3587 z = self._cctx.compressobj()
3591 z = self._cctx.compressobj()
3588 chunks = []
3592 chunks = []
3589 pos = 0
3593 pos = 0
3590 while pos < insize:
3594 while pos < insize:
3591 pos2 = pos + self._compinsize
3595 pos2 = pos + self._compinsize
3592 chunk = z.compress(data[pos:pos2])
3596 chunk = z.compress(data[pos:pos2])
3593 if chunk:
3597 if chunk:
3594 chunks.append(chunk)
3598 chunks.append(chunk)
3595 pos = pos2
3599 pos = pos2
3596 chunks.append(z.flush())
3600 chunks.append(z.flush())
3597
3601
3598 if sum(map(len, chunks)) < insize:
3602 if sum(map(len, chunks)) < insize:
3599 return ''.join(chunks)
3603 return ''.join(chunks)
3600 return None
3604 return None
3601
3605
3602 def decompress(self, data):
3606 def decompress(self, data):
3603 insize = len(data)
3607 insize = len(data)
3604
3608
3605 try:
3609 try:
3606 # This was measured to be faster than other streaming
3610 # This was measured to be faster than other streaming
3607 # decompressors.
3611 # decompressors.
3608 dobj = self._dctx.decompressobj()
3612 dobj = self._dctx.decompressobj()
3609 chunks = []
3613 chunks = []
3610 pos = 0
3614 pos = 0
3611 while pos < insize:
3615 while pos < insize:
3612 pos2 = pos + self._decompinsize
3616 pos2 = pos + self._decompinsize
3613 chunk = dobj.decompress(data[pos:pos2])
3617 chunk = dobj.decompress(data[pos:pos2])
3614 if chunk:
3618 if chunk:
3615 chunks.append(chunk)
3619 chunks.append(chunk)
3616 pos = pos2
3620 pos = pos2
3617 # Frame should be exhausted, so no finish() API.
3621 # Frame should be exhausted, so no finish() API.
3618
3622
3619 return ''.join(chunks)
3623 return ''.join(chunks)
3620 except Exception as e:
3624 except Exception as e:
3621 raise error.RevlogError(_('revlog decompress error: %s') %
3625 raise error.RevlogError(_('revlog decompress error: %s') %
3622 str(e))
3626 str(e))
3623
3627
3624 def revlogcompressor(self, opts=None):
3628 def revlogcompressor(self, opts=None):
3625 opts = opts or {}
3629 opts = opts or {}
3626 return self.zstdrevlogcompressor(self._module,
3630 return self.zstdrevlogcompressor(self._module,
3627 level=opts.get('level', 3))
3631 level=opts.get('level', 3))
3628
3632
3629 compengines.register(_zstdengine())
3633 compengines.register(_zstdengine())
3630
3634
3631 # convenient shortcut
3635 # convenient shortcut
3632 dst = debugstacktrace
3636 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now