##// END OF EJS Templates
util: add a way to issue deprecation warning without a UI object...
Pierre-Yves David -
r31950:cc70c6db default
parent child Browse files
Show More
@@ -1,3720 +1,3746 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from __future__ import absolute_import
16 from __future__ import absolute_import
17
17
18 import bz2
18 import bz2
19 import calendar
19 import calendar
20 import codecs
20 import codecs
21 import collections
21 import collections
22 import datetime
22 import datetime
23 import errno
23 import errno
24 import gc
24 import gc
25 import hashlib
25 import hashlib
26 import imp
26 import imp
27 import os
27 import os
28 import platform as pyplatform
28 import platform as pyplatform
29 import re as remod
29 import re as remod
30 import shutil
30 import shutil
31 import signal
31 import signal
32 import socket
32 import socket
33 import stat
33 import stat
34 import string
34 import string
35 import subprocess
35 import subprocess
36 import sys
36 import sys
37 import tempfile
37 import tempfile
38 import textwrap
38 import textwrap
39 import time
39 import time
40 import traceback
40 import traceback
41 import warnings
41 import zlib
42 import zlib
42
43
43 from . import (
44 from . import (
44 encoding,
45 encoding,
45 error,
46 error,
46 i18n,
47 i18n,
47 osutil,
48 osutil,
48 parsers,
49 parsers,
49 pycompat,
50 pycompat,
50 )
51 )
51
52
52 cookielib = pycompat.cookielib
53 cookielib = pycompat.cookielib
53 empty = pycompat.empty
54 empty = pycompat.empty
54 httplib = pycompat.httplib
55 httplib = pycompat.httplib
55 httpserver = pycompat.httpserver
56 httpserver = pycompat.httpserver
56 pickle = pycompat.pickle
57 pickle = pycompat.pickle
57 queue = pycompat.queue
58 queue = pycompat.queue
58 socketserver = pycompat.socketserver
59 socketserver = pycompat.socketserver
59 stderr = pycompat.stderr
60 stderr = pycompat.stderr
60 stdin = pycompat.stdin
61 stdin = pycompat.stdin
61 stdout = pycompat.stdout
62 stdout = pycompat.stdout
62 stringio = pycompat.stringio
63 stringio = pycompat.stringio
63 urlerr = pycompat.urlerr
64 urlerr = pycompat.urlerr
64 urlreq = pycompat.urlreq
65 urlreq = pycompat.urlreq
65 xmlrpclib = pycompat.xmlrpclib
66 xmlrpclib = pycompat.xmlrpclib
66
67
67 def isatty(fp):
68 def isatty(fp):
68 try:
69 try:
69 return fp.isatty()
70 return fp.isatty()
70 except AttributeError:
71 except AttributeError:
71 return False
72 return False
72
73
73 # glibc determines buffering on first write to stdout - if we replace a TTY
74 # glibc determines buffering on first write to stdout - if we replace a TTY
74 # destined stdout with a pipe destined stdout (e.g. pager), we want line
75 # destined stdout with a pipe destined stdout (e.g. pager), we want line
75 # buffering
76 # buffering
76 if isatty(stdout):
77 if isatty(stdout):
77 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
78 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
78
79
79 if pycompat.osname == 'nt':
80 if pycompat.osname == 'nt':
80 from . import windows as platform
81 from . import windows as platform
81 stdout = platform.winstdout(stdout)
82 stdout = platform.winstdout(stdout)
82 else:
83 else:
83 from . import posix as platform
84 from . import posix as platform
84
85
85 _ = i18n._
86 _ = i18n._
86
87
87 bindunixsocket = platform.bindunixsocket
88 bindunixsocket = platform.bindunixsocket
88 cachestat = platform.cachestat
89 cachestat = platform.cachestat
89 checkexec = platform.checkexec
90 checkexec = platform.checkexec
90 checklink = platform.checklink
91 checklink = platform.checklink
91 copymode = platform.copymode
92 copymode = platform.copymode
92 executablepath = platform.executablepath
93 executablepath = platform.executablepath
93 expandglobs = platform.expandglobs
94 expandglobs = platform.expandglobs
94 explainexit = platform.explainexit
95 explainexit = platform.explainexit
95 findexe = platform.findexe
96 findexe = platform.findexe
96 gethgcmd = platform.gethgcmd
97 gethgcmd = platform.gethgcmd
97 getuser = platform.getuser
98 getuser = platform.getuser
98 getpid = os.getpid
99 getpid = os.getpid
99 groupmembers = platform.groupmembers
100 groupmembers = platform.groupmembers
100 groupname = platform.groupname
101 groupname = platform.groupname
101 hidewindow = platform.hidewindow
102 hidewindow = platform.hidewindow
102 isexec = platform.isexec
103 isexec = platform.isexec
103 isowner = platform.isowner
104 isowner = platform.isowner
104 localpath = platform.localpath
105 localpath = platform.localpath
105 lookupreg = platform.lookupreg
106 lookupreg = platform.lookupreg
106 makedir = platform.makedir
107 makedir = platform.makedir
107 nlinks = platform.nlinks
108 nlinks = platform.nlinks
108 normpath = platform.normpath
109 normpath = platform.normpath
109 normcase = platform.normcase
110 normcase = platform.normcase
110 normcasespec = platform.normcasespec
111 normcasespec = platform.normcasespec
111 normcasefallback = platform.normcasefallback
112 normcasefallback = platform.normcasefallback
112 openhardlinks = platform.openhardlinks
113 openhardlinks = platform.openhardlinks
113 oslink = platform.oslink
114 oslink = platform.oslink
114 parsepatchoutput = platform.parsepatchoutput
115 parsepatchoutput = platform.parsepatchoutput
115 pconvert = platform.pconvert
116 pconvert = platform.pconvert
116 poll = platform.poll
117 poll = platform.poll
117 popen = platform.popen
118 popen = platform.popen
118 posixfile = platform.posixfile
119 posixfile = platform.posixfile
119 quotecommand = platform.quotecommand
120 quotecommand = platform.quotecommand
120 readpipe = platform.readpipe
121 readpipe = platform.readpipe
121 rename = platform.rename
122 rename = platform.rename
122 removedirs = platform.removedirs
123 removedirs = platform.removedirs
123 samedevice = platform.samedevice
124 samedevice = platform.samedevice
124 samefile = platform.samefile
125 samefile = platform.samefile
125 samestat = platform.samestat
126 samestat = platform.samestat
126 setbinary = platform.setbinary
127 setbinary = platform.setbinary
127 setflags = platform.setflags
128 setflags = platform.setflags
128 setsignalhandler = platform.setsignalhandler
129 setsignalhandler = platform.setsignalhandler
129 shellquote = platform.shellquote
130 shellquote = platform.shellquote
130 spawndetached = platform.spawndetached
131 spawndetached = platform.spawndetached
131 split = platform.split
132 split = platform.split
132 sshargs = platform.sshargs
133 sshargs = platform.sshargs
133 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
134 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
134 statisexec = platform.statisexec
135 statisexec = platform.statisexec
135 statislink = platform.statislink
136 statislink = platform.statislink
136 testpid = platform.testpid
137 testpid = platform.testpid
137 umask = platform.umask
138 umask = platform.umask
138 unlink = platform.unlink
139 unlink = platform.unlink
139 username = platform.username
140 username = platform.username
140
141
141 # Python compatibility
142 # Python compatibility
142
143
143 _notset = object()
144 _notset = object()
144
145
145 # disable Python's problematic floating point timestamps (issue4836)
146 # disable Python's problematic floating point timestamps (issue4836)
146 # (Python hypocritically says you shouldn't change this behavior in
147 # (Python hypocritically says you shouldn't change this behavior in
147 # libraries, and sure enough Mercurial is not a library.)
148 # libraries, and sure enough Mercurial is not a library.)
148 os.stat_float_times(False)
149 os.stat_float_times(False)
149
150
150 def safehasattr(thing, attr):
151 def safehasattr(thing, attr):
151 return getattr(thing, attr, _notset) is not _notset
152 return getattr(thing, attr, _notset) is not _notset
152
153
153 def bitsfrom(container):
154 def bitsfrom(container):
154 bits = 0
155 bits = 0
155 for bit in container:
156 for bit in container:
156 bits |= bit
157 bits |= bit
157 return bits
158 return bits
158
159
160 # python 2.6 still have deprecation warning enabled by default. We do not want
161 # to display anything to standard user so detect if we are running test and
162 # only use python deprecation warning in this case.
163 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
164 if _dowarn:
165 # explicitly unfilter our warning for python 2.7
166 #
167 # The option of setting PYTHONWARNINGS in the test runner was investigated.
168 # However, module name set through PYTHONWARNINGS was exactly matched, so
169 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
170 # makes the whole PYTHONWARNINGS thing useless for our usecase.
171 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
172 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
173 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
174
175 def nouideprecwarn(msg, version, stacklevel=1):
176 """Issue an python native deprecation warning
177
178 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
179 """
180 if _dowarn:
181 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
182 " update your code.)") % version
183 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
184
159 DIGESTS = {
185 DIGESTS = {
160 'md5': hashlib.md5,
186 'md5': hashlib.md5,
161 'sha1': hashlib.sha1,
187 'sha1': hashlib.sha1,
162 'sha512': hashlib.sha512,
188 'sha512': hashlib.sha512,
163 }
189 }
164 # List of digest types from strongest to weakest
190 # List of digest types from strongest to weakest
165 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
191 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
166
192
167 for k in DIGESTS_BY_STRENGTH:
193 for k in DIGESTS_BY_STRENGTH:
168 assert k in DIGESTS
194 assert k in DIGESTS
169
195
170 class digester(object):
196 class digester(object):
171 """helper to compute digests.
197 """helper to compute digests.
172
198
173 This helper can be used to compute one or more digests given their name.
199 This helper can be used to compute one or more digests given their name.
174
200
175 >>> d = digester(['md5', 'sha1'])
201 >>> d = digester(['md5', 'sha1'])
176 >>> d.update('foo')
202 >>> d.update('foo')
177 >>> [k for k in sorted(d)]
203 >>> [k for k in sorted(d)]
178 ['md5', 'sha1']
204 ['md5', 'sha1']
179 >>> d['md5']
205 >>> d['md5']
180 'acbd18db4cc2f85cedef654fccc4a4d8'
206 'acbd18db4cc2f85cedef654fccc4a4d8'
181 >>> d['sha1']
207 >>> d['sha1']
182 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
208 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
183 >>> digester.preferred(['md5', 'sha1'])
209 >>> digester.preferred(['md5', 'sha1'])
184 'sha1'
210 'sha1'
185 """
211 """
186
212
187 def __init__(self, digests, s=''):
213 def __init__(self, digests, s=''):
188 self._hashes = {}
214 self._hashes = {}
189 for k in digests:
215 for k in digests:
190 if k not in DIGESTS:
216 if k not in DIGESTS:
191 raise Abort(_('unknown digest type: %s') % k)
217 raise Abort(_('unknown digest type: %s') % k)
192 self._hashes[k] = DIGESTS[k]()
218 self._hashes[k] = DIGESTS[k]()
193 if s:
219 if s:
194 self.update(s)
220 self.update(s)
195
221
196 def update(self, data):
222 def update(self, data):
197 for h in self._hashes.values():
223 for h in self._hashes.values():
198 h.update(data)
224 h.update(data)
199
225
200 def __getitem__(self, key):
226 def __getitem__(self, key):
201 if key not in DIGESTS:
227 if key not in DIGESTS:
202 raise Abort(_('unknown digest type: %s') % k)
228 raise Abort(_('unknown digest type: %s') % k)
203 return self._hashes[key].hexdigest()
229 return self._hashes[key].hexdigest()
204
230
205 def __iter__(self):
231 def __iter__(self):
206 return iter(self._hashes)
232 return iter(self._hashes)
207
233
208 @staticmethod
234 @staticmethod
209 def preferred(supported):
235 def preferred(supported):
210 """returns the strongest digest type in both supported and DIGESTS."""
236 """returns the strongest digest type in both supported and DIGESTS."""
211
237
212 for k in DIGESTS_BY_STRENGTH:
238 for k in DIGESTS_BY_STRENGTH:
213 if k in supported:
239 if k in supported:
214 return k
240 return k
215 return None
241 return None
216
242
217 class digestchecker(object):
243 class digestchecker(object):
218 """file handle wrapper that additionally checks content against a given
244 """file handle wrapper that additionally checks content against a given
219 size and digests.
245 size and digests.
220
246
221 d = digestchecker(fh, size, {'md5': '...'})
247 d = digestchecker(fh, size, {'md5': '...'})
222
248
223 When multiple digests are given, all of them are validated.
249 When multiple digests are given, all of them are validated.
224 """
250 """
225
251
226 def __init__(self, fh, size, digests):
252 def __init__(self, fh, size, digests):
227 self._fh = fh
253 self._fh = fh
228 self._size = size
254 self._size = size
229 self._got = 0
255 self._got = 0
230 self._digests = dict(digests)
256 self._digests = dict(digests)
231 self._digester = digester(self._digests.keys())
257 self._digester = digester(self._digests.keys())
232
258
233 def read(self, length=-1):
259 def read(self, length=-1):
234 content = self._fh.read(length)
260 content = self._fh.read(length)
235 self._digester.update(content)
261 self._digester.update(content)
236 self._got += len(content)
262 self._got += len(content)
237 return content
263 return content
238
264
239 def validate(self):
265 def validate(self):
240 if self._size != self._got:
266 if self._size != self._got:
241 raise Abort(_('size mismatch: expected %d, got %d') %
267 raise Abort(_('size mismatch: expected %d, got %d') %
242 (self._size, self._got))
268 (self._size, self._got))
243 for k, v in self._digests.items():
269 for k, v in self._digests.items():
244 if v != self._digester[k]:
270 if v != self._digester[k]:
245 # i18n: first parameter is a digest name
271 # i18n: first parameter is a digest name
246 raise Abort(_('%s mismatch: expected %s, got %s') %
272 raise Abort(_('%s mismatch: expected %s, got %s') %
247 (k, v, self._digester[k]))
273 (k, v, self._digester[k]))
248
274
249 try:
275 try:
250 buffer = buffer
276 buffer = buffer
251 except NameError:
277 except NameError:
252 if not pycompat.ispy3:
278 if not pycompat.ispy3:
253 def buffer(sliceable, offset=0, length=None):
279 def buffer(sliceable, offset=0, length=None):
254 if length is not None:
280 if length is not None:
255 return sliceable[offset:offset + length]
281 return sliceable[offset:offset + length]
256 return sliceable[offset:]
282 return sliceable[offset:]
257 else:
283 else:
258 def buffer(sliceable, offset=0, length=None):
284 def buffer(sliceable, offset=0, length=None):
259 if length is not None:
285 if length is not None:
260 return memoryview(sliceable)[offset:offset + length]
286 return memoryview(sliceable)[offset:offset + length]
261 return memoryview(sliceable)[offset:]
287 return memoryview(sliceable)[offset:]
262
288
263 closefds = pycompat.osname == 'posix'
289 closefds = pycompat.osname == 'posix'
264
290
265 _chunksize = 4096
291 _chunksize = 4096
266
292
267 class bufferedinputpipe(object):
293 class bufferedinputpipe(object):
268 """a manually buffered input pipe
294 """a manually buffered input pipe
269
295
270 Python will not let us use buffered IO and lazy reading with 'polling' at
296 Python will not let us use buffered IO and lazy reading with 'polling' at
271 the same time. We cannot probe the buffer state and select will not detect
297 the same time. We cannot probe the buffer state and select will not detect
272 that data are ready to read if they are already buffered.
298 that data are ready to read if they are already buffered.
273
299
274 This class let us work around that by implementing its own buffering
300 This class let us work around that by implementing its own buffering
275 (allowing efficient readline) while offering a way to know if the buffer is
301 (allowing efficient readline) while offering a way to know if the buffer is
276 empty from the output (allowing collaboration of the buffer with polling).
302 empty from the output (allowing collaboration of the buffer with polling).
277
303
278 This class lives in the 'util' module because it makes use of the 'os'
304 This class lives in the 'util' module because it makes use of the 'os'
279 module from the python stdlib.
305 module from the python stdlib.
280 """
306 """
281
307
282 def __init__(self, input):
308 def __init__(self, input):
283 self._input = input
309 self._input = input
284 self._buffer = []
310 self._buffer = []
285 self._eof = False
311 self._eof = False
286 self._lenbuf = 0
312 self._lenbuf = 0
287
313
288 @property
314 @property
289 def hasbuffer(self):
315 def hasbuffer(self):
290 """True is any data is currently buffered
316 """True is any data is currently buffered
291
317
292 This will be used externally a pre-step for polling IO. If there is
318 This will be used externally a pre-step for polling IO. If there is
293 already data then no polling should be set in place."""
319 already data then no polling should be set in place."""
294 return bool(self._buffer)
320 return bool(self._buffer)
295
321
296 @property
322 @property
297 def closed(self):
323 def closed(self):
298 return self._input.closed
324 return self._input.closed
299
325
300 def fileno(self):
326 def fileno(self):
301 return self._input.fileno()
327 return self._input.fileno()
302
328
303 def close(self):
329 def close(self):
304 return self._input.close()
330 return self._input.close()
305
331
306 def read(self, size):
332 def read(self, size):
307 while (not self._eof) and (self._lenbuf < size):
333 while (not self._eof) and (self._lenbuf < size):
308 self._fillbuffer()
334 self._fillbuffer()
309 return self._frombuffer(size)
335 return self._frombuffer(size)
310
336
311 def readline(self, *args, **kwargs):
337 def readline(self, *args, **kwargs):
312 if 1 < len(self._buffer):
338 if 1 < len(self._buffer):
313 # this should not happen because both read and readline end with a
339 # this should not happen because both read and readline end with a
314 # _frombuffer call that collapse it.
340 # _frombuffer call that collapse it.
315 self._buffer = [''.join(self._buffer)]
341 self._buffer = [''.join(self._buffer)]
316 self._lenbuf = len(self._buffer[0])
342 self._lenbuf = len(self._buffer[0])
317 lfi = -1
343 lfi = -1
318 if self._buffer:
344 if self._buffer:
319 lfi = self._buffer[-1].find('\n')
345 lfi = self._buffer[-1].find('\n')
320 while (not self._eof) and lfi < 0:
346 while (not self._eof) and lfi < 0:
321 self._fillbuffer()
347 self._fillbuffer()
322 if self._buffer:
348 if self._buffer:
323 lfi = self._buffer[-1].find('\n')
349 lfi = self._buffer[-1].find('\n')
324 size = lfi + 1
350 size = lfi + 1
325 if lfi < 0: # end of file
351 if lfi < 0: # end of file
326 size = self._lenbuf
352 size = self._lenbuf
327 elif 1 < len(self._buffer):
353 elif 1 < len(self._buffer):
328 # we need to take previous chunks into account
354 # we need to take previous chunks into account
329 size += self._lenbuf - len(self._buffer[-1])
355 size += self._lenbuf - len(self._buffer[-1])
330 return self._frombuffer(size)
356 return self._frombuffer(size)
331
357
332 def _frombuffer(self, size):
358 def _frombuffer(self, size):
333 """return at most 'size' data from the buffer
359 """return at most 'size' data from the buffer
334
360
335 The data are removed from the buffer."""
361 The data are removed from the buffer."""
336 if size == 0 or not self._buffer:
362 if size == 0 or not self._buffer:
337 return ''
363 return ''
338 buf = self._buffer[0]
364 buf = self._buffer[0]
339 if 1 < len(self._buffer):
365 if 1 < len(self._buffer):
340 buf = ''.join(self._buffer)
366 buf = ''.join(self._buffer)
341
367
342 data = buf[:size]
368 data = buf[:size]
343 buf = buf[len(data):]
369 buf = buf[len(data):]
344 if buf:
370 if buf:
345 self._buffer = [buf]
371 self._buffer = [buf]
346 self._lenbuf = len(buf)
372 self._lenbuf = len(buf)
347 else:
373 else:
348 self._buffer = []
374 self._buffer = []
349 self._lenbuf = 0
375 self._lenbuf = 0
350 return data
376 return data
351
377
352 def _fillbuffer(self):
378 def _fillbuffer(self):
353 """read data to the buffer"""
379 """read data to the buffer"""
354 data = os.read(self._input.fileno(), _chunksize)
380 data = os.read(self._input.fileno(), _chunksize)
355 if not data:
381 if not data:
356 self._eof = True
382 self._eof = True
357 else:
383 else:
358 self._lenbuf += len(data)
384 self._lenbuf += len(data)
359 self._buffer.append(data)
385 self._buffer.append(data)
360
386
361 def popen2(cmd, env=None, newlines=False):
387 def popen2(cmd, env=None, newlines=False):
362 # Setting bufsize to -1 lets the system decide the buffer size.
388 # Setting bufsize to -1 lets the system decide the buffer size.
363 # The default for bufsize is 0, meaning unbuffered. This leads to
389 # The default for bufsize is 0, meaning unbuffered. This leads to
364 # poor performance on Mac OS X: http://bugs.python.org/issue4194
390 # poor performance on Mac OS X: http://bugs.python.org/issue4194
365 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
391 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
366 close_fds=closefds,
392 close_fds=closefds,
367 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
393 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
368 universal_newlines=newlines,
394 universal_newlines=newlines,
369 env=env)
395 env=env)
370 return p.stdin, p.stdout
396 return p.stdin, p.stdout
371
397
372 def popen3(cmd, env=None, newlines=False):
398 def popen3(cmd, env=None, newlines=False):
373 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
399 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
374 return stdin, stdout, stderr
400 return stdin, stdout, stderr
375
401
376 def popen4(cmd, env=None, newlines=False, bufsize=-1):
402 def popen4(cmd, env=None, newlines=False, bufsize=-1):
377 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
403 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
378 close_fds=closefds,
404 close_fds=closefds,
379 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
405 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
380 stderr=subprocess.PIPE,
406 stderr=subprocess.PIPE,
381 universal_newlines=newlines,
407 universal_newlines=newlines,
382 env=env)
408 env=env)
383 return p.stdin, p.stdout, p.stderr, p
409 return p.stdin, p.stdout, p.stderr, p
384
410
385 def version():
411 def version():
386 """Return version information if available."""
412 """Return version information if available."""
387 try:
413 try:
388 from . import __version__
414 from . import __version__
389 return __version__.version
415 return __version__.version
390 except ImportError:
416 except ImportError:
391 return 'unknown'
417 return 'unknown'
392
418
393 def versiontuple(v=None, n=4):
419 def versiontuple(v=None, n=4):
394 """Parses a Mercurial version string into an N-tuple.
420 """Parses a Mercurial version string into an N-tuple.
395
421
396 The version string to be parsed is specified with the ``v`` argument.
422 The version string to be parsed is specified with the ``v`` argument.
397 If it isn't defined, the current Mercurial version string will be parsed.
423 If it isn't defined, the current Mercurial version string will be parsed.
398
424
399 ``n`` can be 2, 3, or 4. Here is how some version strings map to
425 ``n`` can be 2, 3, or 4. Here is how some version strings map to
400 returned values:
426 returned values:
401
427
402 >>> v = '3.6.1+190-df9b73d2d444'
428 >>> v = '3.6.1+190-df9b73d2d444'
403 >>> versiontuple(v, 2)
429 >>> versiontuple(v, 2)
404 (3, 6)
430 (3, 6)
405 >>> versiontuple(v, 3)
431 >>> versiontuple(v, 3)
406 (3, 6, 1)
432 (3, 6, 1)
407 >>> versiontuple(v, 4)
433 >>> versiontuple(v, 4)
408 (3, 6, 1, '190-df9b73d2d444')
434 (3, 6, 1, '190-df9b73d2d444')
409
435
410 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
436 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
411 (3, 6, 1, '190-df9b73d2d444+20151118')
437 (3, 6, 1, '190-df9b73d2d444+20151118')
412
438
413 >>> v = '3.6'
439 >>> v = '3.6'
414 >>> versiontuple(v, 2)
440 >>> versiontuple(v, 2)
415 (3, 6)
441 (3, 6)
416 >>> versiontuple(v, 3)
442 >>> versiontuple(v, 3)
417 (3, 6, None)
443 (3, 6, None)
418 >>> versiontuple(v, 4)
444 >>> versiontuple(v, 4)
419 (3, 6, None, None)
445 (3, 6, None, None)
420
446
421 >>> v = '3.9-rc'
447 >>> v = '3.9-rc'
422 >>> versiontuple(v, 2)
448 >>> versiontuple(v, 2)
423 (3, 9)
449 (3, 9)
424 >>> versiontuple(v, 3)
450 >>> versiontuple(v, 3)
425 (3, 9, None)
451 (3, 9, None)
426 >>> versiontuple(v, 4)
452 >>> versiontuple(v, 4)
427 (3, 9, None, 'rc')
453 (3, 9, None, 'rc')
428
454
429 >>> v = '3.9-rc+2-02a8fea4289b'
455 >>> v = '3.9-rc+2-02a8fea4289b'
430 >>> versiontuple(v, 2)
456 >>> versiontuple(v, 2)
431 (3, 9)
457 (3, 9)
432 >>> versiontuple(v, 3)
458 >>> versiontuple(v, 3)
433 (3, 9, None)
459 (3, 9, None)
434 >>> versiontuple(v, 4)
460 >>> versiontuple(v, 4)
435 (3, 9, None, 'rc+2-02a8fea4289b')
461 (3, 9, None, 'rc+2-02a8fea4289b')
436 """
462 """
437 if not v:
463 if not v:
438 v = version()
464 v = version()
439 parts = remod.split('[\+-]', v, 1)
465 parts = remod.split('[\+-]', v, 1)
440 if len(parts) == 1:
466 if len(parts) == 1:
441 vparts, extra = parts[0], None
467 vparts, extra = parts[0], None
442 else:
468 else:
443 vparts, extra = parts
469 vparts, extra = parts
444
470
445 vints = []
471 vints = []
446 for i in vparts.split('.'):
472 for i in vparts.split('.'):
447 try:
473 try:
448 vints.append(int(i))
474 vints.append(int(i))
449 except ValueError:
475 except ValueError:
450 break
476 break
451 # (3, 6) -> (3, 6, None)
477 # (3, 6) -> (3, 6, None)
452 while len(vints) < 3:
478 while len(vints) < 3:
453 vints.append(None)
479 vints.append(None)
454
480
455 if n == 2:
481 if n == 2:
456 return (vints[0], vints[1])
482 return (vints[0], vints[1])
457 if n == 3:
483 if n == 3:
458 return (vints[0], vints[1], vints[2])
484 return (vints[0], vints[1], vints[2])
459 if n == 4:
485 if n == 4:
460 return (vints[0], vints[1], vints[2], extra)
486 return (vints[0], vints[1], vints[2], extra)
461
487
462 # used by parsedate
488 # used by parsedate
463 defaultdateformats = (
489 defaultdateformats = (
464 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
490 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
465 '%Y-%m-%dT%H:%M', # without seconds
491 '%Y-%m-%dT%H:%M', # without seconds
466 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
492 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
467 '%Y-%m-%dT%H%M', # without seconds
493 '%Y-%m-%dT%H%M', # without seconds
468 '%Y-%m-%d %H:%M:%S', # our common legal variant
494 '%Y-%m-%d %H:%M:%S', # our common legal variant
469 '%Y-%m-%d %H:%M', # without seconds
495 '%Y-%m-%d %H:%M', # without seconds
470 '%Y-%m-%d %H%M%S', # without :
496 '%Y-%m-%d %H%M%S', # without :
471 '%Y-%m-%d %H%M', # without seconds
497 '%Y-%m-%d %H%M', # without seconds
472 '%Y-%m-%d %I:%M:%S%p',
498 '%Y-%m-%d %I:%M:%S%p',
473 '%Y-%m-%d %H:%M',
499 '%Y-%m-%d %H:%M',
474 '%Y-%m-%d %I:%M%p',
500 '%Y-%m-%d %I:%M%p',
475 '%Y-%m-%d',
501 '%Y-%m-%d',
476 '%m-%d',
502 '%m-%d',
477 '%m/%d',
503 '%m/%d',
478 '%m/%d/%y',
504 '%m/%d/%y',
479 '%m/%d/%Y',
505 '%m/%d/%Y',
480 '%a %b %d %H:%M:%S %Y',
506 '%a %b %d %H:%M:%S %Y',
481 '%a %b %d %I:%M:%S%p %Y',
507 '%a %b %d %I:%M:%S%p %Y',
482 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
508 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
483 '%b %d %H:%M:%S %Y',
509 '%b %d %H:%M:%S %Y',
484 '%b %d %I:%M:%S%p %Y',
510 '%b %d %I:%M:%S%p %Y',
485 '%b %d %H:%M:%S',
511 '%b %d %H:%M:%S',
486 '%b %d %I:%M:%S%p',
512 '%b %d %I:%M:%S%p',
487 '%b %d %H:%M',
513 '%b %d %H:%M',
488 '%b %d %I:%M%p',
514 '%b %d %I:%M%p',
489 '%b %d %Y',
515 '%b %d %Y',
490 '%b %d',
516 '%b %d',
491 '%H:%M:%S',
517 '%H:%M:%S',
492 '%I:%M:%S%p',
518 '%I:%M:%S%p',
493 '%H:%M',
519 '%H:%M',
494 '%I:%M%p',
520 '%I:%M%p',
495 )
521 )
496
522
497 extendeddateformats = defaultdateformats + (
523 extendeddateformats = defaultdateformats + (
498 "%Y",
524 "%Y",
499 "%Y-%m",
525 "%Y-%m",
500 "%b",
526 "%b",
501 "%b %Y",
527 "%b %Y",
502 )
528 )
503
529
504 def cachefunc(func):
530 def cachefunc(func):
505 '''cache the result of function calls'''
531 '''cache the result of function calls'''
506 # XXX doesn't handle keywords args
532 # XXX doesn't handle keywords args
507 if func.__code__.co_argcount == 0:
533 if func.__code__.co_argcount == 0:
508 cache = []
534 cache = []
509 def f():
535 def f():
510 if len(cache) == 0:
536 if len(cache) == 0:
511 cache.append(func())
537 cache.append(func())
512 return cache[0]
538 return cache[0]
513 return f
539 return f
514 cache = {}
540 cache = {}
515 if func.__code__.co_argcount == 1:
541 if func.__code__.co_argcount == 1:
516 # we gain a small amount of time because
542 # we gain a small amount of time because
517 # we don't need to pack/unpack the list
543 # we don't need to pack/unpack the list
518 def f(arg):
544 def f(arg):
519 if arg not in cache:
545 if arg not in cache:
520 cache[arg] = func(arg)
546 cache[arg] = func(arg)
521 return cache[arg]
547 return cache[arg]
522 else:
548 else:
523 def f(*args):
549 def f(*args):
524 if args not in cache:
550 if args not in cache:
525 cache[args] = func(*args)
551 cache[args] = func(*args)
526 return cache[args]
552 return cache[args]
527
553
528 return f
554 return f
529
555
530 class sortdict(dict):
556 class sortdict(dict):
531 '''a simple sorted dictionary'''
557 '''a simple sorted dictionary'''
532 def __init__(self, data=None):
558 def __init__(self, data=None):
533 self._list = []
559 self._list = []
534 if data:
560 if data:
535 self.update(data)
561 self.update(data)
536 def copy(self):
562 def copy(self):
537 return sortdict(self)
563 return sortdict(self)
538 def __setitem__(self, key, val):
564 def __setitem__(self, key, val):
539 if key in self:
565 if key in self:
540 self._list.remove(key)
566 self._list.remove(key)
541 self._list.append(key)
567 self._list.append(key)
542 dict.__setitem__(self, key, val)
568 dict.__setitem__(self, key, val)
543 def __iter__(self):
569 def __iter__(self):
544 return self._list.__iter__()
570 return self._list.__iter__()
545 def update(self, src):
571 def update(self, src):
546 if isinstance(src, dict):
572 if isinstance(src, dict):
547 src = src.iteritems()
573 src = src.iteritems()
548 for k, v in src:
574 for k, v in src:
549 self[k] = v
575 self[k] = v
550 def clear(self):
576 def clear(self):
551 dict.clear(self)
577 dict.clear(self)
552 self._list = []
578 self._list = []
553 def items(self):
579 def items(self):
554 return [(k, self[k]) for k in self._list]
580 return [(k, self[k]) for k in self._list]
555 def __delitem__(self, key):
581 def __delitem__(self, key):
556 dict.__delitem__(self, key)
582 dict.__delitem__(self, key)
557 self._list.remove(key)
583 self._list.remove(key)
558 def pop(self, key, *args, **kwargs):
584 def pop(self, key, *args, **kwargs):
559 try:
585 try:
560 self._list.remove(key)
586 self._list.remove(key)
561 except ValueError:
587 except ValueError:
562 pass
588 pass
563 return dict.pop(self, key, *args, **kwargs)
589 return dict.pop(self, key, *args, **kwargs)
564 def keys(self):
590 def keys(self):
565 return self._list[:]
591 return self._list[:]
566 def iterkeys(self):
592 def iterkeys(self):
567 return self._list.__iter__()
593 return self._list.__iter__()
568 def iteritems(self):
594 def iteritems(self):
569 for k in self._list:
595 for k in self._list:
570 yield k, self[k]
596 yield k, self[k]
571 def insert(self, index, key, val):
597 def insert(self, index, key, val):
572 self._list.insert(index, key)
598 self._list.insert(index, key)
573 dict.__setitem__(self, key, val)
599 dict.__setitem__(self, key, val)
574 def __repr__(self):
600 def __repr__(self):
575 if not self:
601 if not self:
576 return '%s()' % self.__class__.__name__
602 return '%s()' % self.__class__.__name__
577 return '%s(%r)' % (self.__class__.__name__, self.items())
603 return '%s(%r)' % (self.__class__.__name__, self.items())
578
604
579 class _lrucachenode(object):
605 class _lrucachenode(object):
580 """A node in a doubly linked list.
606 """A node in a doubly linked list.
581
607
582 Holds a reference to nodes on either side as well as a key-value
608 Holds a reference to nodes on either side as well as a key-value
583 pair for the dictionary entry.
609 pair for the dictionary entry.
584 """
610 """
585 __slots__ = (u'next', u'prev', u'key', u'value')
611 __slots__ = (u'next', u'prev', u'key', u'value')
586
612
587 def __init__(self):
613 def __init__(self):
588 self.next = None
614 self.next = None
589 self.prev = None
615 self.prev = None
590
616
591 self.key = _notset
617 self.key = _notset
592 self.value = None
618 self.value = None
593
619
594 def markempty(self):
620 def markempty(self):
595 """Mark the node as emptied."""
621 """Mark the node as emptied."""
596 self.key = _notset
622 self.key = _notset
597
623
598 class lrucachedict(object):
624 class lrucachedict(object):
599 """Dict that caches most recent accesses and sets.
625 """Dict that caches most recent accesses and sets.
600
626
601 The dict consists of an actual backing dict - indexed by original
627 The dict consists of an actual backing dict - indexed by original
602 key - and a doubly linked circular list defining the order of entries in
628 key - and a doubly linked circular list defining the order of entries in
603 the cache.
629 the cache.
604
630
605 The head node is the newest entry in the cache. If the cache is full,
631 The head node is the newest entry in the cache. If the cache is full,
606 we recycle head.prev and make it the new head. Cache accesses result in
632 we recycle head.prev and make it the new head. Cache accesses result in
607 the node being moved to before the existing head and being marked as the
633 the node being moved to before the existing head and being marked as the
608 new head node.
634 new head node.
609 """
635 """
610 def __init__(self, max):
636 def __init__(self, max):
611 self._cache = {}
637 self._cache = {}
612
638
613 self._head = head = _lrucachenode()
639 self._head = head = _lrucachenode()
614 head.prev = head
640 head.prev = head
615 head.next = head
641 head.next = head
616 self._size = 1
642 self._size = 1
617 self._capacity = max
643 self._capacity = max
618
644
619 def __len__(self):
645 def __len__(self):
620 return len(self._cache)
646 return len(self._cache)
621
647
622 def __contains__(self, k):
648 def __contains__(self, k):
623 return k in self._cache
649 return k in self._cache
624
650
625 def __iter__(self):
651 def __iter__(self):
626 # We don't have to iterate in cache order, but why not.
652 # We don't have to iterate in cache order, but why not.
627 n = self._head
653 n = self._head
628 for i in range(len(self._cache)):
654 for i in range(len(self._cache)):
629 yield n.key
655 yield n.key
630 n = n.next
656 n = n.next
631
657
632 def __getitem__(self, k):
658 def __getitem__(self, k):
633 node = self._cache[k]
659 node = self._cache[k]
634 self._movetohead(node)
660 self._movetohead(node)
635 return node.value
661 return node.value
636
662
637 def __setitem__(self, k, v):
663 def __setitem__(self, k, v):
638 node = self._cache.get(k)
664 node = self._cache.get(k)
639 # Replace existing value and mark as newest.
665 # Replace existing value and mark as newest.
640 if node is not None:
666 if node is not None:
641 node.value = v
667 node.value = v
642 self._movetohead(node)
668 self._movetohead(node)
643 return
669 return
644
670
645 if self._size < self._capacity:
671 if self._size < self._capacity:
646 node = self._addcapacity()
672 node = self._addcapacity()
647 else:
673 else:
648 # Grab the last/oldest item.
674 # Grab the last/oldest item.
649 node = self._head.prev
675 node = self._head.prev
650
676
651 # At capacity. Kill the old entry.
677 # At capacity. Kill the old entry.
652 if node.key is not _notset:
678 if node.key is not _notset:
653 del self._cache[node.key]
679 del self._cache[node.key]
654
680
655 node.key = k
681 node.key = k
656 node.value = v
682 node.value = v
657 self._cache[k] = node
683 self._cache[k] = node
658 # And mark it as newest entry. No need to adjust order since it
684 # And mark it as newest entry. No need to adjust order since it
659 # is already self._head.prev.
685 # is already self._head.prev.
660 self._head = node
686 self._head = node
661
687
662 def __delitem__(self, k):
688 def __delitem__(self, k):
663 node = self._cache.pop(k)
689 node = self._cache.pop(k)
664 node.markempty()
690 node.markempty()
665
691
666 # Temporarily mark as newest item before re-adjusting head to make
692 # Temporarily mark as newest item before re-adjusting head to make
667 # this node the oldest item.
693 # this node the oldest item.
668 self._movetohead(node)
694 self._movetohead(node)
669 self._head = node.next
695 self._head = node.next
670
696
671 # Additional dict methods.
697 # Additional dict methods.
672
698
673 def get(self, k, default=None):
699 def get(self, k, default=None):
674 try:
700 try:
675 return self._cache[k].value
701 return self._cache[k].value
676 except KeyError:
702 except KeyError:
677 return default
703 return default
678
704
679 def clear(self):
705 def clear(self):
680 n = self._head
706 n = self._head
681 while n.key is not _notset:
707 while n.key is not _notset:
682 n.markempty()
708 n.markempty()
683 n = n.next
709 n = n.next
684
710
685 self._cache.clear()
711 self._cache.clear()
686
712
687 def copy(self):
713 def copy(self):
688 result = lrucachedict(self._capacity)
714 result = lrucachedict(self._capacity)
689 n = self._head.prev
715 n = self._head.prev
690 # Iterate in oldest-to-newest order, so the copy has the right ordering
716 # Iterate in oldest-to-newest order, so the copy has the right ordering
691 for i in range(len(self._cache)):
717 for i in range(len(self._cache)):
692 result[n.key] = n.value
718 result[n.key] = n.value
693 n = n.prev
719 n = n.prev
694 return result
720 return result
695
721
696 def _movetohead(self, node):
722 def _movetohead(self, node):
697 """Mark a node as the newest, making it the new head.
723 """Mark a node as the newest, making it the new head.
698
724
699 When a node is accessed, it becomes the freshest entry in the LRU
725 When a node is accessed, it becomes the freshest entry in the LRU
700 list, which is denoted by self._head.
726 list, which is denoted by self._head.
701
727
702 Visually, let's make ``N`` the new head node (* denotes head):
728 Visually, let's make ``N`` the new head node (* denotes head):
703
729
704 previous/oldest <-> head <-> next/next newest
730 previous/oldest <-> head <-> next/next newest
705
731
706 ----<->--- A* ---<->-----
732 ----<->--- A* ---<->-----
707 | |
733 | |
708 E <-> D <-> N <-> C <-> B
734 E <-> D <-> N <-> C <-> B
709
735
710 To:
736 To:
711
737
712 ----<->--- N* ---<->-----
738 ----<->--- N* ---<->-----
713 | |
739 | |
714 E <-> D <-> C <-> B <-> A
740 E <-> D <-> C <-> B <-> A
715
741
716 This requires the following moves:
742 This requires the following moves:
717
743
718 C.next = D (node.prev.next = node.next)
744 C.next = D (node.prev.next = node.next)
719 D.prev = C (node.next.prev = node.prev)
745 D.prev = C (node.next.prev = node.prev)
720 E.next = N (head.prev.next = node)
746 E.next = N (head.prev.next = node)
721 N.prev = E (node.prev = head.prev)
747 N.prev = E (node.prev = head.prev)
722 N.next = A (node.next = head)
748 N.next = A (node.next = head)
723 A.prev = N (head.prev = node)
749 A.prev = N (head.prev = node)
724 """
750 """
725 head = self._head
751 head = self._head
726 # C.next = D
752 # C.next = D
727 node.prev.next = node.next
753 node.prev.next = node.next
728 # D.prev = C
754 # D.prev = C
729 node.next.prev = node.prev
755 node.next.prev = node.prev
730 # N.prev = E
756 # N.prev = E
731 node.prev = head.prev
757 node.prev = head.prev
732 # N.next = A
758 # N.next = A
733 # It is tempting to do just "head" here, however if node is
759 # It is tempting to do just "head" here, however if node is
734 # adjacent to head, this will do bad things.
760 # adjacent to head, this will do bad things.
735 node.next = head.prev.next
761 node.next = head.prev.next
736 # E.next = N
762 # E.next = N
737 node.next.prev = node
763 node.next.prev = node
738 # A.prev = N
764 # A.prev = N
739 node.prev.next = node
765 node.prev.next = node
740
766
741 self._head = node
767 self._head = node
742
768
743 def _addcapacity(self):
769 def _addcapacity(self):
744 """Add a node to the circular linked list.
770 """Add a node to the circular linked list.
745
771
746 The new node is inserted before the head node.
772 The new node is inserted before the head node.
747 """
773 """
748 head = self._head
774 head = self._head
749 node = _lrucachenode()
775 node = _lrucachenode()
750 head.prev.next = node
776 head.prev.next = node
751 node.prev = head.prev
777 node.prev = head.prev
752 node.next = head
778 node.next = head
753 head.prev = node
779 head.prev = node
754 self._size += 1
780 self._size += 1
755 return node
781 return node
756
782
757 def lrucachefunc(func):
783 def lrucachefunc(func):
758 '''cache most recent results of function calls'''
784 '''cache most recent results of function calls'''
759 cache = {}
785 cache = {}
760 order = collections.deque()
786 order = collections.deque()
761 if func.__code__.co_argcount == 1:
787 if func.__code__.co_argcount == 1:
762 def f(arg):
788 def f(arg):
763 if arg not in cache:
789 if arg not in cache:
764 if len(cache) > 20:
790 if len(cache) > 20:
765 del cache[order.popleft()]
791 del cache[order.popleft()]
766 cache[arg] = func(arg)
792 cache[arg] = func(arg)
767 else:
793 else:
768 order.remove(arg)
794 order.remove(arg)
769 order.append(arg)
795 order.append(arg)
770 return cache[arg]
796 return cache[arg]
771 else:
797 else:
772 def f(*args):
798 def f(*args):
773 if args not in cache:
799 if args not in cache:
774 if len(cache) > 20:
800 if len(cache) > 20:
775 del cache[order.popleft()]
801 del cache[order.popleft()]
776 cache[args] = func(*args)
802 cache[args] = func(*args)
777 else:
803 else:
778 order.remove(args)
804 order.remove(args)
779 order.append(args)
805 order.append(args)
780 return cache[args]
806 return cache[args]
781
807
782 return f
808 return f
783
809
784 class propertycache(object):
810 class propertycache(object):
785 def __init__(self, func):
811 def __init__(self, func):
786 self.func = func
812 self.func = func
787 self.name = func.__name__
813 self.name = func.__name__
788 def __get__(self, obj, type=None):
814 def __get__(self, obj, type=None):
789 result = self.func(obj)
815 result = self.func(obj)
790 self.cachevalue(obj, result)
816 self.cachevalue(obj, result)
791 return result
817 return result
792
818
793 def cachevalue(self, obj, value):
819 def cachevalue(self, obj, value):
794 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
820 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
795 obj.__dict__[self.name] = value
821 obj.__dict__[self.name] = value
796
822
797 def pipefilter(s, cmd):
823 def pipefilter(s, cmd):
798 '''filter string S through command CMD, returning its output'''
824 '''filter string S through command CMD, returning its output'''
799 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
825 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
800 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
826 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
801 pout, perr = p.communicate(s)
827 pout, perr = p.communicate(s)
802 return pout
828 return pout
803
829
804 def tempfilter(s, cmd):
830 def tempfilter(s, cmd):
805 '''filter string S through a pair of temporary files with CMD.
831 '''filter string S through a pair of temporary files with CMD.
806 CMD is used as a template to create the real command to be run,
832 CMD is used as a template to create the real command to be run,
807 with the strings INFILE and OUTFILE replaced by the real names of
833 with the strings INFILE and OUTFILE replaced by the real names of
808 the temporary files generated.'''
834 the temporary files generated.'''
809 inname, outname = None, None
835 inname, outname = None, None
810 try:
836 try:
811 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
837 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
812 fp = os.fdopen(infd, pycompat.sysstr('wb'))
838 fp = os.fdopen(infd, pycompat.sysstr('wb'))
813 fp.write(s)
839 fp.write(s)
814 fp.close()
840 fp.close()
815 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
841 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
816 os.close(outfd)
842 os.close(outfd)
817 cmd = cmd.replace('INFILE', inname)
843 cmd = cmd.replace('INFILE', inname)
818 cmd = cmd.replace('OUTFILE', outname)
844 cmd = cmd.replace('OUTFILE', outname)
819 code = os.system(cmd)
845 code = os.system(cmd)
820 if pycompat.sysplatform == 'OpenVMS' and code & 1:
846 if pycompat.sysplatform == 'OpenVMS' and code & 1:
821 code = 0
847 code = 0
822 if code:
848 if code:
823 raise Abort(_("command '%s' failed: %s") %
849 raise Abort(_("command '%s' failed: %s") %
824 (cmd, explainexit(code)))
850 (cmd, explainexit(code)))
825 return readfile(outname)
851 return readfile(outname)
826 finally:
852 finally:
827 try:
853 try:
828 if inname:
854 if inname:
829 os.unlink(inname)
855 os.unlink(inname)
830 except OSError:
856 except OSError:
831 pass
857 pass
832 try:
858 try:
833 if outname:
859 if outname:
834 os.unlink(outname)
860 os.unlink(outname)
835 except OSError:
861 except OSError:
836 pass
862 pass
837
863
838 filtertable = {
864 filtertable = {
839 'tempfile:': tempfilter,
865 'tempfile:': tempfilter,
840 'pipe:': pipefilter,
866 'pipe:': pipefilter,
841 }
867 }
842
868
843 def filter(s, cmd):
869 def filter(s, cmd):
844 "filter a string through a command that transforms its input to its output"
870 "filter a string through a command that transforms its input to its output"
845 for name, fn in filtertable.iteritems():
871 for name, fn in filtertable.iteritems():
846 if cmd.startswith(name):
872 if cmd.startswith(name):
847 return fn(s, cmd[len(name):].lstrip())
873 return fn(s, cmd[len(name):].lstrip())
848 return pipefilter(s, cmd)
874 return pipefilter(s, cmd)
849
875
850 def binary(s):
876 def binary(s):
851 """return true if a string is binary data"""
877 """return true if a string is binary data"""
852 return bool(s and '\0' in s)
878 return bool(s and '\0' in s)
853
879
854 def increasingchunks(source, min=1024, max=65536):
880 def increasingchunks(source, min=1024, max=65536):
855 '''return no less than min bytes per chunk while data remains,
881 '''return no less than min bytes per chunk while data remains,
856 doubling min after each chunk until it reaches max'''
882 doubling min after each chunk until it reaches max'''
857 def log2(x):
883 def log2(x):
858 if not x:
884 if not x:
859 return 0
885 return 0
860 i = 0
886 i = 0
861 while x:
887 while x:
862 x >>= 1
888 x >>= 1
863 i += 1
889 i += 1
864 return i - 1
890 return i - 1
865
891
866 buf = []
892 buf = []
867 blen = 0
893 blen = 0
868 for chunk in source:
894 for chunk in source:
869 buf.append(chunk)
895 buf.append(chunk)
870 blen += len(chunk)
896 blen += len(chunk)
871 if blen >= min:
897 if blen >= min:
872 if min < max:
898 if min < max:
873 min = min << 1
899 min = min << 1
874 nmin = 1 << log2(blen)
900 nmin = 1 << log2(blen)
875 if nmin > min:
901 if nmin > min:
876 min = nmin
902 min = nmin
877 if min > max:
903 if min > max:
878 min = max
904 min = max
879 yield ''.join(buf)
905 yield ''.join(buf)
880 blen = 0
906 blen = 0
881 buf = []
907 buf = []
882 if buf:
908 if buf:
883 yield ''.join(buf)
909 yield ''.join(buf)
884
910
885 Abort = error.Abort
911 Abort = error.Abort
886
912
887 def always(fn):
913 def always(fn):
888 return True
914 return True
889
915
890 def never(fn):
916 def never(fn):
891 return False
917 return False
892
918
893 def nogc(func):
919 def nogc(func):
894 """disable garbage collector
920 """disable garbage collector
895
921
896 Python's garbage collector triggers a GC each time a certain number of
922 Python's garbage collector triggers a GC each time a certain number of
897 container objects (the number being defined by gc.get_threshold()) are
923 container objects (the number being defined by gc.get_threshold()) are
898 allocated even when marked not to be tracked by the collector. Tracking has
924 allocated even when marked not to be tracked by the collector. Tracking has
899 no effect on when GCs are triggered, only on what objects the GC looks
925 no effect on when GCs are triggered, only on what objects the GC looks
900 into. As a workaround, disable GC while building complex (huge)
926 into. As a workaround, disable GC while building complex (huge)
901 containers.
927 containers.
902
928
903 This garbage collector issue have been fixed in 2.7.
929 This garbage collector issue have been fixed in 2.7.
904 """
930 """
905 if sys.version_info >= (2, 7):
931 if sys.version_info >= (2, 7):
906 return func
932 return func
907 def wrapper(*args, **kwargs):
933 def wrapper(*args, **kwargs):
908 gcenabled = gc.isenabled()
934 gcenabled = gc.isenabled()
909 gc.disable()
935 gc.disable()
910 try:
936 try:
911 return func(*args, **kwargs)
937 return func(*args, **kwargs)
912 finally:
938 finally:
913 if gcenabled:
939 if gcenabled:
914 gc.enable()
940 gc.enable()
915 return wrapper
941 return wrapper
916
942
917 def pathto(root, n1, n2):
943 def pathto(root, n1, n2):
918 '''return the relative path from one place to another.
944 '''return the relative path from one place to another.
919 root should use os.sep to separate directories
945 root should use os.sep to separate directories
920 n1 should use os.sep to separate directories
946 n1 should use os.sep to separate directories
921 n2 should use "/" to separate directories
947 n2 should use "/" to separate directories
922 returns an os.sep-separated path.
948 returns an os.sep-separated path.
923
949
924 If n1 is a relative path, it's assumed it's
950 If n1 is a relative path, it's assumed it's
925 relative to root.
951 relative to root.
926 n2 should always be relative to root.
952 n2 should always be relative to root.
927 '''
953 '''
928 if not n1:
954 if not n1:
929 return localpath(n2)
955 return localpath(n2)
930 if os.path.isabs(n1):
956 if os.path.isabs(n1):
931 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
957 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
932 return os.path.join(root, localpath(n2))
958 return os.path.join(root, localpath(n2))
933 n2 = '/'.join((pconvert(root), n2))
959 n2 = '/'.join((pconvert(root), n2))
934 a, b = splitpath(n1), n2.split('/')
960 a, b = splitpath(n1), n2.split('/')
935 a.reverse()
961 a.reverse()
936 b.reverse()
962 b.reverse()
937 while a and b and a[-1] == b[-1]:
963 while a and b and a[-1] == b[-1]:
938 a.pop()
964 a.pop()
939 b.pop()
965 b.pop()
940 b.reverse()
966 b.reverse()
941 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
967 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
942
968
943 def mainfrozen():
969 def mainfrozen():
944 """return True if we are a frozen executable.
970 """return True if we are a frozen executable.
945
971
946 The code supports py2exe (most common, Windows only) and tools/freeze
972 The code supports py2exe (most common, Windows only) and tools/freeze
947 (portable, not much used).
973 (portable, not much used).
948 """
974 """
949 return (safehasattr(sys, "frozen") or # new py2exe
975 return (safehasattr(sys, "frozen") or # new py2exe
950 safehasattr(sys, "importers") or # old py2exe
976 safehasattr(sys, "importers") or # old py2exe
951 imp.is_frozen(u"__main__")) # tools/freeze
977 imp.is_frozen(u"__main__")) # tools/freeze
952
978
953 # the location of data files matching the source code
979 # the location of data files matching the source code
954 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
980 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
955 # executable version (py2exe) doesn't support __file__
981 # executable version (py2exe) doesn't support __file__
956 datapath = os.path.dirname(pycompat.sysexecutable)
982 datapath = os.path.dirname(pycompat.sysexecutable)
957 else:
983 else:
958 datapath = os.path.dirname(pycompat.fsencode(__file__))
984 datapath = os.path.dirname(pycompat.fsencode(__file__))
959
985
960 i18n.setdatapath(datapath)
986 i18n.setdatapath(datapath)
961
987
962 _hgexecutable = None
988 _hgexecutable = None
963
989
964 def hgexecutable():
990 def hgexecutable():
965 """return location of the 'hg' executable.
991 """return location of the 'hg' executable.
966
992
967 Defaults to $HG or 'hg' in the search path.
993 Defaults to $HG or 'hg' in the search path.
968 """
994 """
969 if _hgexecutable is None:
995 if _hgexecutable is None:
970 hg = encoding.environ.get('HG')
996 hg = encoding.environ.get('HG')
971 mainmod = sys.modules[pycompat.sysstr('__main__')]
997 mainmod = sys.modules[pycompat.sysstr('__main__')]
972 if hg:
998 if hg:
973 _sethgexecutable(hg)
999 _sethgexecutable(hg)
974 elif mainfrozen():
1000 elif mainfrozen():
975 if getattr(sys, 'frozen', None) == 'macosx_app':
1001 if getattr(sys, 'frozen', None) == 'macosx_app':
976 # Env variable set by py2app
1002 # Env variable set by py2app
977 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
1003 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
978 else:
1004 else:
979 _sethgexecutable(pycompat.sysexecutable)
1005 _sethgexecutable(pycompat.sysexecutable)
980 elif (os.path.basename(
1006 elif (os.path.basename(
981 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
1007 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
982 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
1008 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
983 else:
1009 else:
984 exe = findexe('hg') or os.path.basename(sys.argv[0])
1010 exe = findexe('hg') or os.path.basename(sys.argv[0])
985 _sethgexecutable(exe)
1011 _sethgexecutable(exe)
986 return _hgexecutable
1012 return _hgexecutable
987
1013
988 def _sethgexecutable(path):
1014 def _sethgexecutable(path):
989 """set location of the 'hg' executable"""
1015 """set location of the 'hg' executable"""
990 global _hgexecutable
1016 global _hgexecutable
991 _hgexecutable = path
1017 _hgexecutable = path
992
1018
993 def _isstdout(f):
1019 def _isstdout(f):
994 fileno = getattr(f, 'fileno', None)
1020 fileno = getattr(f, 'fileno', None)
995 return fileno and fileno() == sys.__stdout__.fileno()
1021 return fileno and fileno() == sys.__stdout__.fileno()
996
1022
997 def shellenviron(environ=None):
1023 def shellenviron(environ=None):
998 """return environ with optional override, useful for shelling out"""
1024 """return environ with optional override, useful for shelling out"""
999 def py2shell(val):
1025 def py2shell(val):
1000 'convert python object into string that is useful to shell'
1026 'convert python object into string that is useful to shell'
1001 if val is None or val is False:
1027 if val is None or val is False:
1002 return '0'
1028 return '0'
1003 if val is True:
1029 if val is True:
1004 return '1'
1030 return '1'
1005 return str(val)
1031 return str(val)
1006 env = dict(encoding.environ)
1032 env = dict(encoding.environ)
1007 if environ:
1033 if environ:
1008 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1034 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1009 env['HG'] = hgexecutable()
1035 env['HG'] = hgexecutable()
1010 return env
1036 return env
1011
1037
1012 def system(cmd, environ=None, cwd=None, out=None):
1038 def system(cmd, environ=None, cwd=None, out=None):
1013 '''enhanced shell command execution.
1039 '''enhanced shell command execution.
1014 run with environment maybe modified, maybe in different dir.
1040 run with environment maybe modified, maybe in different dir.
1015
1041
1016 if out is specified, it is assumed to be a file-like object that has a
1042 if out is specified, it is assumed to be a file-like object that has a
1017 write() method. stdout and stderr will be redirected to out.'''
1043 write() method. stdout and stderr will be redirected to out.'''
1018 try:
1044 try:
1019 stdout.flush()
1045 stdout.flush()
1020 except Exception:
1046 except Exception:
1021 pass
1047 pass
1022 cmd = quotecommand(cmd)
1048 cmd = quotecommand(cmd)
1023 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1049 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1024 and sys.version_info[1] < 7):
1050 and sys.version_info[1] < 7):
1025 # subprocess kludge to work around issues in half-baked Python
1051 # subprocess kludge to work around issues in half-baked Python
1026 # ports, notably bichued/python:
1052 # ports, notably bichued/python:
1027 if not cwd is None:
1053 if not cwd is None:
1028 os.chdir(cwd)
1054 os.chdir(cwd)
1029 rc = os.system(cmd)
1055 rc = os.system(cmd)
1030 else:
1056 else:
1031 env = shellenviron(environ)
1057 env = shellenviron(environ)
1032 if out is None or _isstdout(out):
1058 if out is None or _isstdout(out):
1033 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1059 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1034 env=env, cwd=cwd)
1060 env=env, cwd=cwd)
1035 else:
1061 else:
1036 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1062 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1037 env=env, cwd=cwd, stdout=subprocess.PIPE,
1063 env=env, cwd=cwd, stdout=subprocess.PIPE,
1038 stderr=subprocess.STDOUT)
1064 stderr=subprocess.STDOUT)
1039 for line in iter(proc.stdout.readline, ''):
1065 for line in iter(proc.stdout.readline, ''):
1040 out.write(line)
1066 out.write(line)
1041 proc.wait()
1067 proc.wait()
1042 rc = proc.returncode
1068 rc = proc.returncode
1043 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1069 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1044 rc = 0
1070 rc = 0
1045 return rc
1071 return rc
1046
1072
1047 def checksignature(func):
1073 def checksignature(func):
1048 '''wrap a function with code to check for calling errors'''
1074 '''wrap a function with code to check for calling errors'''
1049 def check(*args, **kwargs):
1075 def check(*args, **kwargs):
1050 try:
1076 try:
1051 return func(*args, **kwargs)
1077 return func(*args, **kwargs)
1052 except TypeError:
1078 except TypeError:
1053 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1079 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1054 raise error.SignatureError
1080 raise error.SignatureError
1055 raise
1081 raise
1056
1082
1057 return check
1083 return check
1058
1084
1059 # a whilelist of known filesystems where hardlink works reliably
1085 # a whilelist of known filesystems where hardlink works reliably
1060 _hardlinkfswhitelist = set([
1086 _hardlinkfswhitelist = set([
1061 'btrfs',
1087 'btrfs',
1062 'ext2',
1088 'ext2',
1063 'ext3',
1089 'ext3',
1064 'ext4',
1090 'ext4',
1065 'hfs',
1091 'hfs',
1066 'jfs',
1092 'jfs',
1067 'reiserfs',
1093 'reiserfs',
1068 'tmpfs',
1094 'tmpfs',
1069 'ufs',
1095 'ufs',
1070 'xfs',
1096 'xfs',
1071 'zfs',
1097 'zfs',
1072 ])
1098 ])
1073
1099
1074 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1100 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1075 '''copy a file, preserving mode and optionally other stat info like
1101 '''copy a file, preserving mode and optionally other stat info like
1076 atime/mtime
1102 atime/mtime
1077
1103
1078 checkambig argument is used with filestat, and is useful only if
1104 checkambig argument is used with filestat, and is useful only if
1079 destination file is guarded by any lock (e.g. repo.lock or
1105 destination file is guarded by any lock (e.g. repo.lock or
1080 repo.wlock).
1106 repo.wlock).
1081
1107
1082 copystat and checkambig should be exclusive.
1108 copystat and checkambig should be exclusive.
1083 '''
1109 '''
1084 assert not (copystat and checkambig)
1110 assert not (copystat and checkambig)
1085 oldstat = None
1111 oldstat = None
1086 if os.path.lexists(dest):
1112 if os.path.lexists(dest):
1087 if checkambig:
1113 if checkambig:
1088 oldstat = checkambig and filestat(dest)
1114 oldstat = checkambig and filestat(dest)
1089 unlink(dest)
1115 unlink(dest)
1090 if hardlink:
1116 if hardlink:
1091 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1117 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1092 # unless we are confident that dest is on a whitelisted filesystem.
1118 # unless we are confident that dest is on a whitelisted filesystem.
1093 try:
1119 try:
1094 fstype = getfstype(os.path.dirname(dest))
1120 fstype = getfstype(os.path.dirname(dest))
1095 except OSError:
1121 except OSError:
1096 fstype = None
1122 fstype = None
1097 if fstype not in _hardlinkfswhitelist:
1123 if fstype not in _hardlinkfswhitelist:
1098 hardlink = False
1124 hardlink = False
1099 if hardlink:
1125 if hardlink:
1100 try:
1126 try:
1101 oslink(src, dest)
1127 oslink(src, dest)
1102 return
1128 return
1103 except (IOError, OSError):
1129 except (IOError, OSError):
1104 pass # fall back to normal copy
1130 pass # fall back to normal copy
1105 if os.path.islink(src):
1131 if os.path.islink(src):
1106 os.symlink(os.readlink(src), dest)
1132 os.symlink(os.readlink(src), dest)
1107 # copytime is ignored for symlinks, but in general copytime isn't needed
1133 # copytime is ignored for symlinks, but in general copytime isn't needed
1108 # for them anyway
1134 # for them anyway
1109 else:
1135 else:
1110 try:
1136 try:
1111 shutil.copyfile(src, dest)
1137 shutil.copyfile(src, dest)
1112 if copystat:
1138 if copystat:
1113 # copystat also copies mode
1139 # copystat also copies mode
1114 shutil.copystat(src, dest)
1140 shutil.copystat(src, dest)
1115 else:
1141 else:
1116 shutil.copymode(src, dest)
1142 shutil.copymode(src, dest)
1117 if oldstat and oldstat.stat:
1143 if oldstat and oldstat.stat:
1118 newstat = filestat(dest)
1144 newstat = filestat(dest)
1119 if newstat.isambig(oldstat):
1145 if newstat.isambig(oldstat):
1120 # stat of copied file is ambiguous to original one
1146 # stat of copied file is ambiguous to original one
1121 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1147 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1122 os.utime(dest, (advanced, advanced))
1148 os.utime(dest, (advanced, advanced))
1123 except shutil.Error as inst:
1149 except shutil.Error as inst:
1124 raise Abort(str(inst))
1150 raise Abort(str(inst))
1125
1151
1126 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1152 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1127 """Copy a directory tree using hardlinks if possible."""
1153 """Copy a directory tree using hardlinks if possible."""
1128 num = 0
1154 num = 0
1129
1155
1130 gettopic = lambda: hardlink and _('linking') or _('copying')
1156 gettopic = lambda: hardlink and _('linking') or _('copying')
1131
1157
1132 if os.path.isdir(src):
1158 if os.path.isdir(src):
1133 if hardlink is None:
1159 if hardlink is None:
1134 hardlink = (os.stat(src).st_dev ==
1160 hardlink = (os.stat(src).st_dev ==
1135 os.stat(os.path.dirname(dst)).st_dev)
1161 os.stat(os.path.dirname(dst)).st_dev)
1136 topic = gettopic()
1162 topic = gettopic()
1137 os.mkdir(dst)
1163 os.mkdir(dst)
1138 for name, kind in osutil.listdir(src):
1164 for name, kind in osutil.listdir(src):
1139 srcname = os.path.join(src, name)
1165 srcname = os.path.join(src, name)
1140 dstname = os.path.join(dst, name)
1166 dstname = os.path.join(dst, name)
1141 def nprog(t, pos):
1167 def nprog(t, pos):
1142 if pos is not None:
1168 if pos is not None:
1143 return progress(t, pos + num)
1169 return progress(t, pos + num)
1144 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1170 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1145 num += n
1171 num += n
1146 else:
1172 else:
1147 if hardlink is None:
1173 if hardlink is None:
1148 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1174 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1149 os.stat(os.path.dirname(dst)).st_dev)
1175 os.stat(os.path.dirname(dst)).st_dev)
1150 topic = gettopic()
1176 topic = gettopic()
1151
1177
1152 if hardlink:
1178 if hardlink:
1153 try:
1179 try:
1154 oslink(src, dst)
1180 oslink(src, dst)
1155 except (IOError, OSError):
1181 except (IOError, OSError):
1156 hardlink = False
1182 hardlink = False
1157 shutil.copy(src, dst)
1183 shutil.copy(src, dst)
1158 else:
1184 else:
1159 shutil.copy(src, dst)
1185 shutil.copy(src, dst)
1160 num += 1
1186 num += 1
1161 progress(topic, num)
1187 progress(topic, num)
1162 progress(topic, None)
1188 progress(topic, None)
1163
1189
1164 return hardlink, num
1190 return hardlink, num
1165
1191
1166 _winreservednames = '''con prn aux nul
1192 _winreservednames = '''con prn aux nul
1167 com1 com2 com3 com4 com5 com6 com7 com8 com9
1193 com1 com2 com3 com4 com5 com6 com7 com8 com9
1168 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1194 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1169 _winreservedchars = ':*?"<>|'
1195 _winreservedchars = ':*?"<>|'
1170 def checkwinfilename(path):
1196 def checkwinfilename(path):
1171 r'''Check that the base-relative path is a valid filename on Windows.
1197 r'''Check that the base-relative path is a valid filename on Windows.
1172 Returns None if the path is ok, or a UI string describing the problem.
1198 Returns None if the path is ok, or a UI string describing the problem.
1173
1199
1174 >>> checkwinfilename("just/a/normal/path")
1200 >>> checkwinfilename("just/a/normal/path")
1175 >>> checkwinfilename("foo/bar/con.xml")
1201 >>> checkwinfilename("foo/bar/con.xml")
1176 "filename contains 'con', which is reserved on Windows"
1202 "filename contains 'con', which is reserved on Windows"
1177 >>> checkwinfilename("foo/con.xml/bar")
1203 >>> checkwinfilename("foo/con.xml/bar")
1178 "filename contains 'con', which is reserved on Windows"
1204 "filename contains 'con', which is reserved on Windows"
1179 >>> checkwinfilename("foo/bar/xml.con")
1205 >>> checkwinfilename("foo/bar/xml.con")
1180 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1206 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1181 "filename contains 'AUX', which is reserved on Windows"
1207 "filename contains 'AUX', which is reserved on Windows"
1182 >>> checkwinfilename("foo/bar/bla:.txt")
1208 >>> checkwinfilename("foo/bar/bla:.txt")
1183 "filename contains ':', which is reserved on Windows"
1209 "filename contains ':', which is reserved on Windows"
1184 >>> checkwinfilename("foo/bar/b\07la.txt")
1210 >>> checkwinfilename("foo/bar/b\07la.txt")
1185 "filename contains '\\x07', which is invalid on Windows"
1211 "filename contains '\\x07', which is invalid on Windows"
1186 >>> checkwinfilename("foo/bar/bla ")
1212 >>> checkwinfilename("foo/bar/bla ")
1187 "filename ends with ' ', which is not allowed on Windows"
1213 "filename ends with ' ', which is not allowed on Windows"
1188 >>> checkwinfilename("../bar")
1214 >>> checkwinfilename("../bar")
1189 >>> checkwinfilename("foo\\")
1215 >>> checkwinfilename("foo\\")
1190 "filename ends with '\\', which is invalid on Windows"
1216 "filename ends with '\\', which is invalid on Windows"
1191 >>> checkwinfilename("foo\\/bar")
1217 >>> checkwinfilename("foo\\/bar")
1192 "directory name ends with '\\', which is invalid on Windows"
1218 "directory name ends with '\\', which is invalid on Windows"
1193 '''
1219 '''
1194 if path.endswith('\\'):
1220 if path.endswith('\\'):
1195 return _("filename ends with '\\', which is invalid on Windows")
1221 return _("filename ends with '\\', which is invalid on Windows")
1196 if '\\/' in path:
1222 if '\\/' in path:
1197 return _("directory name ends with '\\', which is invalid on Windows")
1223 return _("directory name ends with '\\', which is invalid on Windows")
1198 for n in path.replace('\\', '/').split('/'):
1224 for n in path.replace('\\', '/').split('/'):
1199 if not n:
1225 if not n:
1200 continue
1226 continue
1201 for c in pycompat.bytestr(n):
1227 for c in pycompat.bytestr(n):
1202 if c in _winreservedchars:
1228 if c in _winreservedchars:
1203 return _("filename contains '%s', which is reserved "
1229 return _("filename contains '%s', which is reserved "
1204 "on Windows") % c
1230 "on Windows") % c
1205 if ord(c) <= 31:
1231 if ord(c) <= 31:
1206 return _("filename contains %r, which is invalid "
1232 return _("filename contains %r, which is invalid "
1207 "on Windows") % c
1233 "on Windows") % c
1208 base = n.split('.')[0]
1234 base = n.split('.')[0]
1209 if base and base.lower() in _winreservednames:
1235 if base and base.lower() in _winreservednames:
1210 return _("filename contains '%s', which is reserved "
1236 return _("filename contains '%s', which is reserved "
1211 "on Windows") % base
1237 "on Windows") % base
1212 t = n[-1]
1238 t = n[-1]
1213 if t in '. ' and n not in '..':
1239 if t in '. ' and n not in '..':
1214 return _("filename ends with '%s', which is not allowed "
1240 return _("filename ends with '%s', which is not allowed "
1215 "on Windows") % t
1241 "on Windows") % t
1216
1242
1217 if pycompat.osname == 'nt':
1243 if pycompat.osname == 'nt':
1218 checkosfilename = checkwinfilename
1244 checkosfilename = checkwinfilename
1219 timer = time.clock
1245 timer = time.clock
1220 else:
1246 else:
1221 checkosfilename = platform.checkosfilename
1247 checkosfilename = platform.checkosfilename
1222 timer = time.time
1248 timer = time.time
1223
1249
1224 if safehasattr(time, "perf_counter"):
1250 if safehasattr(time, "perf_counter"):
1225 timer = time.perf_counter
1251 timer = time.perf_counter
1226
1252
1227 def makelock(info, pathname):
1253 def makelock(info, pathname):
1228 try:
1254 try:
1229 return os.symlink(info, pathname)
1255 return os.symlink(info, pathname)
1230 except OSError as why:
1256 except OSError as why:
1231 if why.errno == errno.EEXIST:
1257 if why.errno == errno.EEXIST:
1232 raise
1258 raise
1233 except AttributeError: # no symlink in os
1259 except AttributeError: # no symlink in os
1234 pass
1260 pass
1235
1261
1236 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1262 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1237 os.write(ld, info)
1263 os.write(ld, info)
1238 os.close(ld)
1264 os.close(ld)
1239
1265
1240 def readlock(pathname):
1266 def readlock(pathname):
1241 try:
1267 try:
1242 return os.readlink(pathname)
1268 return os.readlink(pathname)
1243 except OSError as why:
1269 except OSError as why:
1244 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1270 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1245 raise
1271 raise
1246 except AttributeError: # no symlink in os
1272 except AttributeError: # no symlink in os
1247 pass
1273 pass
1248 fp = posixfile(pathname)
1274 fp = posixfile(pathname)
1249 r = fp.read()
1275 r = fp.read()
1250 fp.close()
1276 fp.close()
1251 return r
1277 return r
1252
1278
1253 def fstat(fp):
1279 def fstat(fp):
1254 '''stat file object that may not have fileno method.'''
1280 '''stat file object that may not have fileno method.'''
1255 try:
1281 try:
1256 return os.fstat(fp.fileno())
1282 return os.fstat(fp.fileno())
1257 except AttributeError:
1283 except AttributeError:
1258 return os.stat(fp.name)
1284 return os.stat(fp.name)
1259
1285
1260 # File system features
1286 # File system features
1261
1287
1262 def fscasesensitive(path):
1288 def fscasesensitive(path):
1263 """
1289 """
1264 Return true if the given path is on a case-sensitive filesystem
1290 Return true if the given path is on a case-sensitive filesystem
1265
1291
1266 Requires a path (like /foo/.hg) ending with a foldable final
1292 Requires a path (like /foo/.hg) ending with a foldable final
1267 directory component.
1293 directory component.
1268 """
1294 """
1269 s1 = os.lstat(path)
1295 s1 = os.lstat(path)
1270 d, b = os.path.split(path)
1296 d, b = os.path.split(path)
1271 b2 = b.upper()
1297 b2 = b.upper()
1272 if b == b2:
1298 if b == b2:
1273 b2 = b.lower()
1299 b2 = b.lower()
1274 if b == b2:
1300 if b == b2:
1275 return True # no evidence against case sensitivity
1301 return True # no evidence against case sensitivity
1276 p2 = os.path.join(d, b2)
1302 p2 = os.path.join(d, b2)
1277 try:
1303 try:
1278 s2 = os.lstat(p2)
1304 s2 = os.lstat(p2)
1279 if s2 == s1:
1305 if s2 == s1:
1280 return False
1306 return False
1281 return True
1307 return True
1282 except OSError:
1308 except OSError:
1283 return True
1309 return True
1284
1310
1285 try:
1311 try:
1286 import re2
1312 import re2
1287 _re2 = None
1313 _re2 = None
1288 except ImportError:
1314 except ImportError:
1289 _re2 = False
1315 _re2 = False
1290
1316
1291 class _re(object):
1317 class _re(object):
1292 def _checkre2(self):
1318 def _checkre2(self):
1293 global _re2
1319 global _re2
1294 try:
1320 try:
1295 # check if match works, see issue3964
1321 # check if match works, see issue3964
1296 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1322 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1297 except ImportError:
1323 except ImportError:
1298 _re2 = False
1324 _re2 = False
1299
1325
1300 def compile(self, pat, flags=0):
1326 def compile(self, pat, flags=0):
1301 '''Compile a regular expression, using re2 if possible
1327 '''Compile a regular expression, using re2 if possible
1302
1328
1303 For best performance, use only re2-compatible regexp features. The
1329 For best performance, use only re2-compatible regexp features. The
1304 only flags from the re module that are re2-compatible are
1330 only flags from the re module that are re2-compatible are
1305 IGNORECASE and MULTILINE.'''
1331 IGNORECASE and MULTILINE.'''
1306 if _re2 is None:
1332 if _re2 is None:
1307 self._checkre2()
1333 self._checkre2()
1308 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1334 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1309 if flags & remod.IGNORECASE:
1335 if flags & remod.IGNORECASE:
1310 pat = '(?i)' + pat
1336 pat = '(?i)' + pat
1311 if flags & remod.MULTILINE:
1337 if flags & remod.MULTILINE:
1312 pat = '(?m)' + pat
1338 pat = '(?m)' + pat
1313 try:
1339 try:
1314 return re2.compile(pat)
1340 return re2.compile(pat)
1315 except re2.error:
1341 except re2.error:
1316 pass
1342 pass
1317 return remod.compile(pat, flags)
1343 return remod.compile(pat, flags)
1318
1344
1319 @propertycache
1345 @propertycache
1320 def escape(self):
1346 def escape(self):
1321 '''Return the version of escape corresponding to self.compile.
1347 '''Return the version of escape corresponding to self.compile.
1322
1348
1323 This is imperfect because whether re2 or re is used for a particular
1349 This is imperfect because whether re2 or re is used for a particular
1324 function depends on the flags, etc, but it's the best we can do.
1350 function depends on the flags, etc, but it's the best we can do.
1325 '''
1351 '''
1326 global _re2
1352 global _re2
1327 if _re2 is None:
1353 if _re2 is None:
1328 self._checkre2()
1354 self._checkre2()
1329 if _re2:
1355 if _re2:
1330 return re2.escape
1356 return re2.escape
1331 else:
1357 else:
1332 return remod.escape
1358 return remod.escape
1333
1359
1334 re = _re()
1360 re = _re()
1335
1361
1336 _fspathcache = {}
1362 _fspathcache = {}
1337 def fspath(name, root):
1363 def fspath(name, root):
1338 '''Get name in the case stored in the filesystem
1364 '''Get name in the case stored in the filesystem
1339
1365
1340 The name should be relative to root, and be normcase-ed for efficiency.
1366 The name should be relative to root, and be normcase-ed for efficiency.
1341
1367
1342 Note that this function is unnecessary, and should not be
1368 Note that this function is unnecessary, and should not be
1343 called, for case-sensitive filesystems (simply because it's expensive).
1369 called, for case-sensitive filesystems (simply because it's expensive).
1344
1370
1345 The root should be normcase-ed, too.
1371 The root should be normcase-ed, too.
1346 '''
1372 '''
1347 def _makefspathcacheentry(dir):
1373 def _makefspathcacheentry(dir):
1348 return dict((normcase(n), n) for n in os.listdir(dir))
1374 return dict((normcase(n), n) for n in os.listdir(dir))
1349
1375
1350 seps = pycompat.ossep
1376 seps = pycompat.ossep
1351 if pycompat.osaltsep:
1377 if pycompat.osaltsep:
1352 seps = seps + pycompat.osaltsep
1378 seps = seps + pycompat.osaltsep
1353 # Protect backslashes. This gets silly very quickly.
1379 # Protect backslashes. This gets silly very quickly.
1354 seps.replace('\\','\\\\')
1380 seps.replace('\\','\\\\')
1355 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1381 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1356 dir = os.path.normpath(root)
1382 dir = os.path.normpath(root)
1357 result = []
1383 result = []
1358 for part, sep in pattern.findall(name):
1384 for part, sep in pattern.findall(name):
1359 if sep:
1385 if sep:
1360 result.append(sep)
1386 result.append(sep)
1361 continue
1387 continue
1362
1388
1363 if dir not in _fspathcache:
1389 if dir not in _fspathcache:
1364 _fspathcache[dir] = _makefspathcacheentry(dir)
1390 _fspathcache[dir] = _makefspathcacheentry(dir)
1365 contents = _fspathcache[dir]
1391 contents = _fspathcache[dir]
1366
1392
1367 found = contents.get(part)
1393 found = contents.get(part)
1368 if not found:
1394 if not found:
1369 # retry "once per directory" per "dirstate.walk" which
1395 # retry "once per directory" per "dirstate.walk" which
1370 # may take place for each patches of "hg qpush", for example
1396 # may take place for each patches of "hg qpush", for example
1371 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1397 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1372 found = contents.get(part)
1398 found = contents.get(part)
1373
1399
1374 result.append(found or part)
1400 result.append(found or part)
1375 dir = os.path.join(dir, part)
1401 dir = os.path.join(dir, part)
1376
1402
1377 return ''.join(result)
1403 return ''.join(result)
1378
1404
1379 def getfstype(dirpath):
1405 def getfstype(dirpath):
1380 '''Get the filesystem type name from a directory (best-effort)
1406 '''Get the filesystem type name from a directory (best-effort)
1381
1407
1382 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1408 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1383 '''
1409 '''
1384 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1410 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1385
1411
1386 def checknlink(testfile):
1412 def checknlink(testfile):
1387 '''check whether hardlink count reporting works properly'''
1413 '''check whether hardlink count reporting works properly'''
1388
1414
1389 # testfile may be open, so we need a separate file for checking to
1415 # testfile may be open, so we need a separate file for checking to
1390 # work around issue2543 (or testfile may get lost on Samba shares)
1416 # work around issue2543 (or testfile may get lost on Samba shares)
1391 f1 = testfile + ".hgtmp1"
1417 f1 = testfile + ".hgtmp1"
1392 if os.path.lexists(f1):
1418 if os.path.lexists(f1):
1393 return False
1419 return False
1394 try:
1420 try:
1395 posixfile(f1, 'w').close()
1421 posixfile(f1, 'w').close()
1396 except IOError:
1422 except IOError:
1397 try:
1423 try:
1398 os.unlink(f1)
1424 os.unlink(f1)
1399 except OSError:
1425 except OSError:
1400 pass
1426 pass
1401 return False
1427 return False
1402
1428
1403 f2 = testfile + ".hgtmp2"
1429 f2 = testfile + ".hgtmp2"
1404 fd = None
1430 fd = None
1405 try:
1431 try:
1406 oslink(f1, f2)
1432 oslink(f1, f2)
1407 # nlinks() may behave differently for files on Windows shares if
1433 # nlinks() may behave differently for files on Windows shares if
1408 # the file is open.
1434 # the file is open.
1409 fd = posixfile(f2)
1435 fd = posixfile(f2)
1410 return nlinks(f2) > 1
1436 return nlinks(f2) > 1
1411 except OSError:
1437 except OSError:
1412 return False
1438 return False
1413 finally:
1439 finally:
1414 if fd is not None:
1440 if fd is not None:
1415 fd.close()
1441 fd.close()
1416 for f in (f1, f2):
1442 for f in (f1, f2):
1417 try:
1443 try:
1418 os.unlink(f)
1444 os.unlink(f)
1419 except OSError:
1445 except OSError:
1420 pass
1446 pass
1421
1447
1422 def endswithsep(path):
1448 def endswithsep(path):
1423 '''Check path ends with os.sep or os.altsep.'''
1449 '''Check path ends with os.sep or os.altsep.'''
1424 return (path.endswith(pycompat.ossep)
1450 return (path.endswith(pycompat.ossep)
1425 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1451 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1426
1452
1427 def splitpath(path):
1453 def splitpath(path):
1428 '''Split path by os.sep.
1454 '''Split path by os.sep.
1429 Note that this function does not use os.altsep because this is
1455 Note that this function does not use os.altsep because this is
1430 an alternative of simple "xxx.split(os.sep)".
1456 an alternative of simple "xxx.split(os.sep)".
1431 It is recommended to use os.path.normpath() before using this
1457 It is recommended to use os.path.normpath() before using this
1432 function if need.'''
1458 function if need.'''
1433 return path.split(pycompat.ossep)
1459 return path.split(pycompat.ossep)
1434
1460
1435 def gui():
1461 def gui():
1436 '''Are we running in a GUI?'''
1462 '''Are we running in a GUI?'''
1437 if pycompat.sysplatform == 'darwin':
1463 if pycompat.sysplatform == 'darwin':
1438 if 'SSH_CONNECTION' in encoding.environ:
1464 if 'SSH_CONNECTION' in encoding.environ:
1439 # handle SSH access to a box where the user is logged in
1465 # handle SSH access to a box where the user is logged in
1440 return False
1466 return False
1441 elif getattr(osutil, 'isgui', None):
1467 elif getattr(osutil, 'isgui', None):
1442 # check if a CoreGraphics session is available
1468 # check if a CoreGraphics session is available
1443 return osutil.isgui()
1469 return osutil.isgui()
1444 else:
1470 else:
1445 # pure build; use a safe default
1471 # pure build; use a safe default
1446 return True
1472 return True
1447 else:
1473 else:
1448 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1474 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1449
1475
1450 def mktempcopy(name, emptyok=False, createmode=None):
1476 def mktempcopy(name, emptyok=False, createmode=None):
1451 """Create a temporary file with the same contents from name
1477 """Create a temporary file with the same contents from name
1452
1478
1453 The permission bits are copied from the original file.
1479 The permission bits are copied from the original file.
1454
1480
1455 If the temporary file is going to be truncated immediately, you
1481 If the temporary file is going to be truncated immediately, you
1456 can use emptyok=True as an optimization.
1482 can use emptyok=True as an optimization.
1457
1483
1458 Returns the name of the temporary file.
1484 Returns the name of the temporary file.
1459 """
1485 """
1460 d, fn = os.path.split(name)
1486 d, fn = os.path.split(name)
1461 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1487 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1462 os.close(fd)
1488 os.close(fd)
1463 # Temporary files are created with mode 0600, which is usually not
1489 # Temporary files are created with mode 0600, which is usually not
1464 # what we want. If the original file already exists, just copy
1490 # what we want. If the original file already exists, just copy
1465 # its mode. Otherwise, manually obey umask.
1491 # its mode. Otherwise, manually obey umask.
1466 copymode(name, temp, createmode)
1492 copymode(name, temp, createmode)
1467 if emptyok:
1493 if emptyok:
1468 return temp
1494 return temp
1469 try:
1495 try:
1470 try:
1496 try:
1471 ifp = posixfile(name, "rb")
1497 ifp = posixfile(name, "rb")
1472 except IOError as inst:
1498 except IOError as inst:
1473 if inst.errno == errno.ENOENT:
1499 if inst.errno == errno.ENOENT:
1474 return temp
1500 return temp
1475 if not getattr(inst, 'filename', None):
1501 if not getattr(inst, 'filename', None):
1476 inst.filename = name
1502 inst.filename = name
1477 raise
1503 raise
1478 ofp = posixfile(temp, "wb")
1504 ofp = posixfile(temp, "wb")
1479 for chunk in filechunkiter(ifp):
1505 for chunk in filechunkiter(ifp):
1480 ofp.write(chunk)
1506 ofp.write(chunk)
1481 ifp.close()
1507 ifp.close()
1482 ofp.close()
1508 ofp.close()
1483 except: # re-raises
1509 except: # re-raises
1484 try: os.unlink(temp)
1510 try: os.unlink(temp)
1485 except OSError: pass
1511 except OSError: pass
1486 raise
1512 raise
1487 return temp
1513 return temp
1488
1514
1489 class filestat(object):
1515 class filestat(object):
1490 """help to exactly detect change of a file
1516 """help to exactly detect change of a file
1491
1517
1492 'stat' attribute is result of 'os.stat()' if specified 'path'
1518 'stat' attribute is result of 'os.stat()' if specified 'path'
1493 exists. Otherwise, it is None. This can avoid preparative
1519 exists. Otherwise, it is None. This can avoid preparative
1494 'exists()' examination on client side of this class.
1520 'exists()' examination on client side of this class.
1495 """
1521 """
1496 def __init__(self, path):
1522 def __init__(self, path):
1497 try:
1523 try:
1498 self.stat = os.stat(path)
1524 self.stat = os.stat(path)
1499 except OSError as err:
1525 except OSError as err:
1500 if err.errno != errno.ENOENT:
1526 if err.errno != errno.ENOENT:
1501 raise
1527 raise
1502 self.stat = None
1528 self.stat = None
1503
1529
1504 __hash__ = object.__hash__
1530 __hash__ = object.__hash__
1505
1531
1506 def __eq__(self, old):
1532 def __eq__(self, old):
1507 try:
1533 try:
1508 # if ambiguity between stat of new and old file is
1534 # if ambiguity between stat of new and old file is
1509 # avoided, comparison of size, ctime and mtime is enough
1535 # avoided, comparison of size, ctime and mtime is enough
1510 # to exactly detect change of a file regardless of platform
1536 # to exactly detect change of a file regardless of platform
1511 return (self.stat.st_size == old.stat.st_size and
1537 return (self.stat.st_size == old.stat.st_size and
1512 self.stat.st_ctime == old.stat.st_ctime and
1538 self.stat.st_ctime == old.stat.st_ctime and
1513 self.stat.st_mtime == old.stat.st_mtime)
1539 self.stat.st_mtime == old.stat.st_mtime)
1514 except AttributeError:
1540 except AttributeError:
1515 return False
1541 return False
1516
1542
1517 def isambig(self, old):
1543 def isambig(self, old):
1518 """Examine whether new (= self) stat is ambiguous against old one
1544 """Examine whether new (= self) stat is ambiguous against old one
1519
1545
1520 "S[N]" below means stat of a file at N-th change:
1546 "S[N]" below means stat of a file at N-th change:
1521
1547
1522 - S[n-1].ctime < S[n].ctime: can detect change of a file
1548 - S[n-1].ctime < S[n].ctime: can detect change of a file
1523 - S[n-1].ctime == S[n].ctime
1549 - S[n-1].ctime == S[n].ctime
1524 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1550 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1525 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1551 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1526 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1552 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1527 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1553 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1528
1554
1529 Case (*2) above means that a file was changed twice or more at
1555 Case (*2) above means that a file was changed twice or more at
1530 same time in sec (= S[n-1].ctime), and comparison of timestamp
1556 same time in sec (= S[n-1].ctime), and comparison of timestamp
1531 is ambiguous.
1557 is ambiguous.
1532
1558
1533 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1559 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1534 timestamp is ambiguous".
1560 timestamp is ambiguous".
1535
1561
1536 But advancing mtime only in case (*2) doesn't work as
1562 But advancing mtime only in case (*2) doesn't work as
1537 expected, because naturally advanced S[n].mtime in case (*1)
1563 expected, because naturally advanced S[n].mtime in case (*1)
1538 might be equal to manually advanced S[n-1 or earlier].mtime.
1564 might be equal to manually advanced S[n-1 or earlier].mtime.
1539
1565
1540 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1566 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1541 treated as ambiguous regardless of mtime, to avoid overlooking
1567 treated as ambiguous regardless of mtime, to avoid overlooking
1542 by confliction between such mtime.
1568 by confliction between such mtime.
1543
1569
1544 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1570 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1545 S[n].mtime", even if size of a file isn't changed.
1571 S[n].mtime", even if size of a file isn't changed.
1546 """
1572 """
1547 try:
1573 try:
1548 return (self.stat.st_ctime == old.stat.st_ctime)
1574 return (self.stat.st_ctime == old.stat.st_ctime)
1549 except AttributeError:
1575 except AttributeError:
1550 return False
1576 return False
1551
1577
1552 def avoidambig(self, path, old):
1578 def avoidambig(self, path, old):
1553 """Change file stat of specified path to avoid ambiguity
1579 """Change file stat of specified path to avoid ambiguity
1554
1580
1555 'old' should be previous filestat of 'path'.
1581 'old' should be previous filestat of 'path'.
1556
1582
1557 This skips avoiding ambiguity, if a process doesn't have
1583 This skips avoiding ambiguity, if a process doesn't have
1558 appropriate privileges for 'path'.
1584 appropriate privileges for 'path'.
1559 """
1585 """
1560 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1586 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1561 try:
1587 try:
1562 os.utime(path, (advanced, advanced))
1588 os.utime(path, (advanced, advanced))
1563 except OSError as inst:
1589 except OSError as inst:
1564 if inst.errno == errno.EPERM:
1590 if inst.errno == errno.EPERM:
1565 # utime() on the file created by another user causes EPERM,
1591 # utime() on the file created by another user causes EPERM,
1566 # if a process doesn't have appropriate privileges
1592 # if a process doesn't have appropriate privileges
1567 return
1593 return
1568 raise
1594 raise
1569
1595
1570 def __ne__(self, other):
1596 def __ne__(self, other):
1571 return not self == other
1597 return not self == other
1572
1598
1573 class atomictempfile(object):
1599 class atomictempfile(object):
1574 '''writable file object that atomically updates a file
1600 '''writable file object that atomically updates a file
1575
1601
1576 All writes will go to a temporary copy of the original file. Call
1602 All writes will go to a temporary copy of the original file. Call
1577 close() when you are done writing, and atomictempfile will rename
1603 close() when you are done writing, and atomictempfile will rename
1578 the temporary copy to the original name, making the changes
1604 the temporary copy to the original name, making the changes
1579 visible. If the object is destroyed without being closed, all your
1605 visible. If the object is destroyed without being closed, all your
1580 writes are discarded.
1606 writes are discarded.
1581
1607
1582 checkambig argument of constructor is used with filestat, and is
1608 checkambig argument of constructor is used with filestat, and is
1583 useful only if target file is guarded by any lock (e.g. repo.lock
1609 useful only if target file is guarded by any lock (e.g. repo.lock
1584 or repo.wlock).
1610 or repo.wlock).
1585 '''
1611 '''
1586 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1612 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1587 self.__name = name # permanent name
1613 self.__name = name # permanent name
1588 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1614 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1589 createmode=createmode)
1615 createmode=createmode)
1590 self._fp = posixfile(self._tempname, mode)
1616 self._fp = posixfile(self._tempname, mode)
1591 self._checkambig = checkambig
1617 self._checkambig = checkambig
1592
1618
1593 # delegated methods
1619 # delegated methods
1594 self.read = self._fp.read
1620 self.read = self._fp.read
1595 self.write = self._fp.write
1621 self.write = self._fp.write
1596 self.seek = self._fp.seek
1622 self.seek = self._fp.seek
1597 self.tell = self._fp.tell
1623 self.tell = self._fp.tell
1598 self.fileno = self._fp.fileno
1624 self.fileno = self._fp.fileno
1599
1625
1600 def close(self):
1626 def close(self):
1601 if not self._fp.closed:
1627 if not self._fp.closed:
1602 self._fp.close()
1628 self._fp.close()
1603 filename = localpath(self.__name)
1629 filename = localpath(self.__name)
1604 oldstat = self._checkambig and filestat(filename)
1630 oldstat = self._checkambig and filestat(filename)
1605 if oldstat and oldstat.stat:
1631 if oldstat and oldstat.stat:
1606 rename(self._tempname, filename)
1632 rename(self._tempname, filename)
1607 newstat = filestat(filename)
1633 newstat = filestat(filename)
1608 if newstat.isambig(oldstat):
1634 if newstat.isambig(oldstat):
1609 # stat of changed file is ambiguous to original one
1635 # stat of changed file is ambiguous to original one
1610 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1636 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1611 os.utime(filename, (advanced, advanced))
1637 os.utime(filename, (advanced, advanced))
1612 else:
1638 else:
1613 rename(self._tempname, filename)
1639 rename(self._tempname, filename)
1614
1640
1615 def discard(self):
1641 def discard(self):
1616 if not self._fp.closed:
1642 if not self._fp.closed:
1617 try:
1643 try:
1618 os.unlink(self._tempname)
1644 os.unlink(self._tempname)
1619 except OSError:
1645 except OSError:
1620 pass
1646 pass
1621 self._fp.close()
1647 self._fp.close()
1622
1648
1623 def __del__(self):
1649 def __del__(self):
1624 if safehasattr(self, '_fp'): # constructor actually did something
1650 if safehasattr(self, '_fp'): # constructor actually did something
1625 self.discard()
1651 self.discard()
1626
1652
1627 def __enter__(self):
1653 def __enter__(self):
1628 return self
1654 return self
1629
1655
1630 def __exit__(self, exctype, excvalue, traceback):
1656 def __exit__(self, exctype, excvalue, traceback):
1631 if exctype is not None:
1657 if exctype is not None:
1632 self.discard()
1658 self.discard()
1633 else:
1659 else:
1634 self.close()
1660 self.close()
1635
1661
1636 def unlinkpath(f, ignoremissing=False):
1662 def unlinkpath(f, ignoremissing=False):
1637 """unlink and remove the directory if it is empty"""
1663 """unlink and remove the directory if it is empty"""
1638 if ignoremissing:
1664 if ignoremissing:
1639 tryunlink(f)
1665 tryunlink(f)
1640 else:
1666 else:
1641 unlink(f)
1667 unlink(f)
1642 # try removing directories that might now be empty
1668 # try removing directories that might now be empty
1643 try:
1669 try:
1644 removedirs(os.path.dirname(f))
1670 removedirs(os.path.dirname(f))
1645 except OSError:
1671 except OSError:
1646 pass
1672 pass
1647
1673
1648 def tryunlink(f):
1674 def tryunlink(f):
1649 """Attempt to remove a file, ignoring ENOENT errors."""
1675 """Attempt to remove a file, ignoring ENOENT errors."""
1650 try:
1676 try:
1651 unlink(f)
1677 unlink(f)
1652 except OSError as e:
1678 except OSError as e:
1653 if e.errno != errno.ENOENT:
1679 if e.errno != errno.ENOENT:
1654 raise
1680 raise
1655
1681
1656 def makedirs(name, mode=None, notindexed=False):
1682 def makedirs(name, mode=None, notindexed=False):
1657 """recursive directory creation with parent mode inheritance
1683 """recursive directory creation with parent mode inheritance
1658
1684
1659 Newly created directories are marked as "not to be indexed by
1685 Newly created directories are marked as "not to be indexed by
1660 the content indexing service", if ``notindexed`` is specified
1686 the content indexing service", if ``notindexed`` is specified
1661 for "write" mode access.
1687 for "write" mode access.
1662 """
1688 """
1663 try:
1689 try:
1664 makedir(name, notindexed)
1690 makedir(name, notindexed)
1665 except OSError as err:
1691 except OSError as err:
1666 if err.errno == errno.EEXIST:
1692 if err.errno == errno.EEXIST:
1667 return
1693 return
1668 if err.errno != errno.ENOENT or not name:
1694 if err.errno != errno.ENOENT or not name:
1669 raise
1695 raise
1670 parent = os.path.dirname(os.path.abspath(name))
1696 parent = os.path.dirname(os.path.abspath(name))
1671 if parent == name:
1697 if parent == name:
1672 raise
1698 raise
1673 makedirs(parent, mode, notindexed)
1699 makedirs(parent, mode, notindexed)
1674 try:
1700 try:
1675 makedir(name, notindexed)
1701 makedir(name, notindexed)
1676 except OSError as err:
1702 except OSError as err:
1677 # Catch EEXIST to handle races
1703 # Catch EEXIST to handle races
1678 if err.errno == errno.EEXIST:
1704 if err.errno == errno.EEXIST:
1679 return
1705 return
1680 raise
1706 raise
1681 if mode is not None:
1707 if mode is not None:
1682 os.chmod(name, mode)
1708 os.chmod(name, mode)
1683
1709
1684 def readfile(path):
1710 def readfile(path):
1685 with open(path, 'rb') as fp:
1711 with open(path, 'rb') as fp:
1686 return fp.read()
1712 return fp.read()
1687
1713
1688 def writefile(path, text):
1714 def writefile(path, text):
1689 with open(path, 'wb') as fp:
1715 with open(path, 'wb') as fp:
1690 fp.write(text)
1716 fp.write(text)
1691
1717
1692 def appendfile(path, text):
1718 def appendfile(path, text):
1693 with open(path, 'ab') as fp:
1719 with open(path, 'ab') as fp:
1694 fp.write(text)
1720 fp.write(text)
1695
1721
1696 class chunkbuffer(object):
1722 class chunkbuffer(object):
1697 """Allow arbitrary sized chunks of data to be efficiently read from an
1723 """Allow arbitrary sized chunks of data to be efficiently read from an
1698 iterator over chunks of arbitrary size."""
1724 iterator over chunks of arbitrary size."""
1699
1725
1700 def __init__(self, in_iter):
1726 def __init__(self, in_iter):
1701 """in_iter is the iterator that's iterating over the input chunks.
1727 """in_iter is the iterator that's iterating over the input chunks.
1702 targetsize is how big a buffer to try to maintain."""
1728 targetsize is how big a buffer to try to maintain."""
1703 def splitbig(chunks):
1729 def splitbig(chunks):
1704 for chunk in chunks:
1730 for chunk in chunks:
1705 if len(chunk) > 2**20:
1731 if len(chunk) > 2**20:
1706 pos = 0
1732 pos = 0
1707 while pos < len(chunk):
1733 while pos < len(chunk):
1708 end = pos + 2 ** 18
1734 end = pos + 2 ** 18
1709 yield chunk[pos:end]
1735 yield chunk[pos:end]
1710 pos = end
1736 pos = end
1711 else:
1737 else:
1712 yield chunk
1738 yield chunk
1713 self.iter = splitbig(in_iter)
1739 self.iter = splitbig(in_iter)
1714 self._queue = collections.deque()
1740 self._queue = collections.deque()
1715 self._chunkoffset = 0
1741 self._chunkoffset = 0
1716
1742
1717 def read(self, l=None):
1743 def read(self, l=None):
1718 """Read L bytes of data from the iterator of chunks of data.
1744 """Read L bytes of data from the iterator of chunks of data.
1719 Returns less than L bytes if the iterator runs dry.
1745 Returns less than L bytes if the iterator runs dry.
1720
1746
1721 If size parameter is omitted, read everything"""
1747 If size parameter is omitted, read everything"""
1722 if l is None:
1748 if l is None:
1723 return ''.join(self.iter)
1749 return ''.join(self.iter)
1724
1750
1725 left = l
1751 left = l
1726 buf = []
1752 buf = []
1727 queue = self._queue
1753 queue = self._queue
1728 while left > 0:
1754 while left > 0:
1729 # refill the queue
1755 # refill the queue
1730 if not queue:
1756 if not queue:
1731 target = 2**18
1757 target = 2**18
1732 for chunk in self.iter:
1758 for chunk in self.iter:
1733 queue.append(chunk)
1759 queue.append(chunk)
1734 target -= len(chunk)
1760 target -= len(chunk)
1735 if target <= 0:
1761 if target <= 0:
1736 break
1762 break
1737 if not queue:
1763 if not queue:
1738 break
1764 break
1739
1765
1740 # The easy way to do this would be to queue.popleft(), modify the
1766 # The easy way to do this would be to queue.popleft(), modify the
1741 # chunk (if necessary), then queue.appendleft(). However, for cases
1767 # chunk (if necessary), then queue.appendleft(). However, for cases
1742 # where we read partial chunk content, this incurs 2 dequeue
1768 # where we read partial chunk content, this incurs 2 dequeue
1743 # mutations and creates a new str for the remaining chunk in the
1769 # mutations and creates a new str for the remaining chunk in the
1744 # queue. Our code below avoids this overhead.
1770 # queue. Our code below avoids this overhead.
1745
1771
1746 chunk = queue[0]
1772 chunk = queue[0]
1747 chunkl = len(chunk)
1773 chunkl = len(chunk)
1748 offset = self._chunkoffset
1774 offset = self._chunkoffset
1749
1775
1750 # Use full chunk.
1776 # Use full chunk.
1751 if offset == 0 and left >= chunkl:
1777 if offset == 0 and left >= chunkl:
1752 left -= chunkl
1778 left -= chunkl
1753 queue.popleft()
1779 queue.popleft()
1754 buf.append(chunk)
1780 buf.append(chunk)
1755 # self._chunkoffset remains at 0.
1781 # self._chunkoffset remains at 0.
1756 continue
1782 continue
1757
1783
1758 chunkremaining = chunkl - offset
1784 chunkremaining = chunkl - offset
1759
1785
1760 # Use all of unconsumed part of chunk.
1786 # Use all of unconsumed part of chunk.
1761 if left >= chunkremaining:
1787 if left >= chunkremaining:
1762 left -= chunkremaining
1788 left -= chunkremaining
1763 queue.popleft()
1789 queue.popleft()
1764 # offset == 0 is enabled by block above, so this won't merely
1790 # offset == 0 is enabled by block above, so this won't merely
1765 # copy via ``chunk[0:]``.
1791 # copy via ``chunk[0:]``.
1766 buf.append(chunk[offset:])
1792 buf.append(chunk[offset:])
1767 self._chunkoffset = 0
1793 self._chunkoffset = 0
1768
1794
1769 # Partial chunk needed.
1795 # Partial chunk needed.
1770 else:
1796 else:
1771 buf.append(chunk[offset:offset + left])
1797 buf.append(chunk[offset:offset + left])
1772 self._chunkoffset += left
1798 self._chunkoffset += left
1773 left -= chunkremaining
1799 left -= chunkremaining
1774
1800
1775 return ''.join(buf)
1801 return ''.join(buf)
1776
1802
1777 def filechunkiter(f, size=131072, limit=None):
1803 def filechunkiter(f, size=131072, limit=None):
1778 """Create a generator that produces the data in the file size
1804 """Create a generator that produces the data in the file size
1779 (default 131072) bytes at a time, up to optional limit (default is
1805 (default 131072) bytes at a time, up to optional limit (default is
1780 to read all data). Chunks may be less than size bytes if the
1806 to read all data). Chunks may be less than size bytes if the
1781 chunk is the last chunk in the file, or the file is a socket or
1807 chunk is the last chunk in the file, or the file is a socket or
1782 some other type of file that sometimes reads less data than is
1808 some other type of file that sometimes reads less data than is
1783 requested."""
1809 requested."""
1784 assert size >= 0
1810 assert size >= 0
1785 assert limit is None or limit >= 0
1811 assert limit is None or limit >= 0
1786 while True:
1812 while True:
1787 if limit is None:
1813 if limit is None:
1788 nbytes = size
1814 nbytes = size
1789 else:
1815 else:
1790 nbytes = min(limit, size)
1816 nbytes = min(limit, size)
1791 s = nbytes and f.read(nbytes)
1817 s = nbytes and f.read(nbytes)
1792 if not s:
1818 if not s:
1793 break
1819 break
1794 if limit:
1820 if limit:
1795 limit -= len(s)
1821 limit -= len(s)
1796 yield s
1822 yield s
1797
1823
1798 def makedate(timestamp=None):
1824 def makedate(timestamp=None):
1799 '''Return a unix timestamp (or the current time) as a (unixtime,
1825 '''Return a unix timestamp (or the current time) as a (unixtime,
1800 offset) tuple based off the local timezone.'''
1826 offset) tuple based off the local timezone.'''
1801 if timestamp is None:
1827 if timestamp is None:
1802 timestamp = time.time()
1828 timestamp = time.time()
1803 if timestamp < 0:
1829 if timestamp < 0:
1804 hint = _("check your clock")
1830 hint = _("check your clock")
1805 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1831 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1806 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1832 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1807 datetime.datetime.fromtimestamp(timestamp))
1833 datetime.datetime.fromtimestamp(timestamp))
1808 tz = delta.days * 86400 + delta.seconds
1834 tz = delta.days * 86400 + delta.seconds
1809 return timestamp, tz
1835 return timestamp, tz
1810
1836
1811 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1837 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1812 """represent a (unixtime, offset) tuple as a localized time.
1838 """represent a (unixtime, offset) tuple as a localized time.
1813 unixtime is seconds since the epoch, and offset is the time zone's
1839 unixtime is seconds since the epoch, and offset is the time zone's
1814 number of seconds away from UTC.
1840 number of seconds away from UTC.
1815
1841
1816 >>> datestr((0, 0))
1842 >>> datestr((0, 0))
1817 'Thu Jan 01 00:00:00 1970 +0000'
1843 'Thu Jan 01 00:00:00 1970 +0000'
1818 >>> datestr((42, 0))
1844 >>> datestr((42, 0))
1819 'Thu Jan 01 00:00:42 1970 +0000'
1845 'Thu Jan 01 00:00:42 1970 +0000'
1820 >>> datestr((-42, 0))
1846 >>> datestr((-42, 0))
1821 'Wed Dec 31 23:59:18 1969 +0000'
1847 'Wed Dec 31 23:59:18 1969 +0000'
1822 >>> datestr((0x7fffffff, 0))
1848 >>> datestr((0x7fffffff, 0))
1823 'Tue Jan 19 03:14:07 2038 +0000'
1849 'Tue Jan 19 03:14:07 2038 +0000'
1824 >>> datestr((-0x80000000, 0))
1850 >>> datestr((-0x80000000, 0))
1825 'Fri Dec 13 20:45:52 1901 +0000'
1851 'Fri Dec 13 20:45:52 1901 +0000'
1826 """
1852 """
1827 t, tz = date or makedate()
1853 t, tz = date or makedate()
1828 if "%1" in format or "%2" in format or "%z" in format:
1854 if "%1" in format or "%2" in format or "%z" in format:
1829 sign = (tz > 0) and "-" or "+"
1855 sign = (tz > 0) and "-" or "+"
1830 minutes = abs(tz) // 60
1856 minutes = abs(tz) // 60
1831 q, r = divmod(minutes, 60)
1857 q, r = divmod(minutes, 60)
1832 format = format.replace("%z", "%1%2")
1858 format = format.replace("%z", "%1%2")
1833 format = format.replace("%1", "%c%02d" % (sign, q))
1859 format = format.replace("%1", "%c%02d" % (sign, q))
1834 format = format.replace("%2", "%02d" % r)
1860 format = format.replace("%2", "%02d" % r)
1835 d = t - tz
1861 d = t - tz
1836 if d > 0x7fffffff:
1862 if d > 0x7fffffff:
1837 d = 0x7fffffff
1863 d = 0x7fffffff
1838 elif d < -0x80000000:
1864 elif d < -0x80000000:
1839 d = -0x80000000
1865 d = -0x80000000
1840 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1866 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1841 # because they use the gmtime() system call which is buggy on Windows
1867 # because they use the gmtime() system call which is buggy on Windows
1842 # for negative values.
1868 # for negative values.
1843 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1869 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1844 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1870 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1845 return s
1871 return s
1846
1872
1847 def shortdate(date=None):
1873 def shortdate(date=None):
1848 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1874 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1849 return datestr(date, format='%Y-%m-%d')
1875 return datestr(date, format='%Y-%m-%d')
1850
1876
1851 def parsetimezone(s):
1877 def parsetimezone(s):
1852 """find a trailing timezone, if any, in string, and return a
1878 """find a trailing timezone, if any, in string, and return a
1853 (offset, remainder) pair"""
1879 (offset, remainder) pair"""
1854
1880
1855 if s.endswith("GMT") or s.endswith("UTC"):
1881 if s.endswith("GMT") or s.endswith("UTC"):
1856 return 0, s[:-3].rstrip()
1882 return 0, s[:-3].rstrip()
1857
1883
1858 # Unix-style timezones [+-]hhmm
1884 # Unix-style timezones [+-]hhmm
1859 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1885 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1860 sign = (s[-5] == "+") and 1 or -1
1886 sign = (s[-5] == "+") and 1 or -1
1861 hours = int(s[-4:-2])
1887 hours = int(s[-4:-2])
1862 minutes = int(s[-2:])
1888 minutes = int(s[-2:])
1863 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1889 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1864
1890
1865 # ISO8601 trailing Z
1891 # ISO8601 trailing Z
1866 if s.endswith("Z") and s[-2:-1].isdigit():
1892 if s.endswith("Z") and s[-2:-1].isdigit():
1867 return 0, s[:-1]
1893 return 0, s[:-1]
1868
1894
1869 # ISO8601-style [+-]hh:mm
1895 # ISO8601-style [+-]hh:mm
1870 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1896 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1871 s[-5:-3].isdigit() and s[-2:].isdigit()):
1897 s[-5:-3].isdigit() and s[-2:].isdigit()):
1872 sign = (s[-6] == "+") and 1 or -1
1898 sign = (s[-6] == "+") and 1 or -1
1873 hours = int(s[-5:-3])
1899 hours = int(s[-5:-3])
1874 minutes = int(s[-2:])
1900 minutes = int(s[-2:])
1875 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1901 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1876
1902
1877 return None, s
1903 return None, s
1878
1904
1879 def strdate(string, format, defaults=None):
1905 def strdate(string, format, defaults=None):
1880 """parse a localized time string and return a (unixtime, offset) tuple.
1906 """parse a localized time string and return a (unixtime, offset) tuple.
1881 if the string cannot be parsed, ValueError is raised."""
1907 if the string cannot be parsed, ValueError is raised."""
1882 if defaults is None:
1908 if defaults is None:
1883 defaults = {}
1909 defaults = {}
1884
1910
1885 # NOTE: unixtime = localunixtime + offset
1911 # NOTE: unixtime = localunixtime + offset
1886 offset, date = parsetimezone(string)
1912 offset, date = parsetimezone(string)
1887
1913
1888 # add missing elements from defaults
1914 # add missing elements from defaults
1889 usenow = False # default to using biased defaults
1915 usenow = False # default to using biased defaults
1890 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1916 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1891 found = [True for p in part if ("%"+p) in format]
1917 found = [True for p in part if ("%"+p) in format]
1892 if not found:
1918 if not found:
1893 date += "@" + defaults[part][usenow]
1919 date += "@" + defaults[part][usenow]
1894 format += "@%" + part[0]
1920 format += "@%" + part[0]
1895 else:
1921 else:
1896 # We've found a specific time element, less specific time
1922 # We've found a specific time element, less specific time
1897 # elements are relative to today
1923 # elements are relative to today
1898 usenow = True
1924 usenow = True
1899
1925
1900 timetuple = time.strptime(date, format)
1926 timetuple = time.strptime(date, format)
1901 localunixtime = int(calendar.timegm(timetuple))
1927 localunixtime = int(calendar.timegm(timetuple))
1902 if offset is None:
1928 if offset is None:
1903 # local timezone
1929 # local timezone
1904 unixtime = int(time.mktime(timetuple))
1930 unixtime = int(time.mktime(timetuple))
1905 offset = unixtime - localunixtime
1931 offset = unixtime - localunixtime
1906 else:
1932 else:
1907 unixtime = localunixtime + offset
1933 unixtime = localunixtime + offset
1908 return unixtime, offset
1934 return unixtime, offset
1909
1935
1910 def parsedate(date, formats=None, bias=None):
1936 def parsedate(date, formats=None, bias=None):
1911 """parse a localized date/time and return a (unixtime, offset) tuple.
1937 """parse a localized date/time and return a (unixtime, offset) tuple.
1912
1938
1913 The date may be a "unixtime offset" string or in one of the specified
1939 The date may be a "unixtime offset" string or in one of the specified
1914 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1940 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1915
1941
1916 >>> parsedate(' today ') == parsedate(\
1942 >>> parsedate(' today ') == parsedate(\
1917 datetime.date.today().strftime('%b %d'))
1943 datetime.date.today().strftime('%b %d'))
1918 True
1944 True
1919 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1945 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1920 datetime.timedelta(days=1)\
1946 datetime.timedelta(days=1)\
1921 ).strftime('%b %d'))
1947 ).strftime('%b %d'))
1922 True
1948 True
1923 >>> now, tz = makedate()
1949 >>> now, tz = makedate()
1924 >>> strnow, strtz = parsedate('now')
1950 >>> strnow, strtz = parsedate('now')
1925 >>> (strnow - now) < 1
1951 >>> (strnow - now) < 1
1926 True
1952 True
1927 >>> tz == strtz
1953 >>> tz == strtz
1928 True
1954 True
1929 """
1955 """
1930 if bias is None:
1956 if bias is None:
1931 bias = {}
1957 bias = {}
1932 if not date:
1958 if not date:
1933 return 0, 0
1959 return 0, 0
1934 if isinstance(date, tuple) and len(date) == 2:
1960 if isinstance(date, tuple) and len(date) == 2:
1935 return date
1961 return date
1936 if not formats:
1962 if not formats:
1937 formats = defaultdateformats
1963 formats = defaultdateformats
1938 date = date.strip()
1964 date = date.strip()
1939
1965
1940 if date == 'now' or date == _('now'):
1966 if date == 'now' or date == _('now'):
1941 return makedate()
1967 return makedate()
1942 if date == 'today' or date == _('today'):
1968 if date == 'today' or date == _('today'):
1943 date = datetime.date.today().strftime('%b %d')
1969 date = datetime.date.today().strftime('%b %d')
1944 elif date == 'yesterday' or date == _('yesterday'):
1970 elif date == 'yesterday' or date == _('yesterday'):
1945 date = (datetime.date.today() -
1971 date = (datetime.date.today() -
1946 datetime.timedelta(days=1)).strftime('%b %d')
1972 datetime.timedelta(days=1)).strftime('%b %d')
1947
1973
1948 try:
1974 try:
1949 when, offset = map(int, date.split(' '))
1975 when, offset = map(int, date.split(' '))
1950 except ValueError:
1976 except ValueError:
1951 # fill out defaults
1977 # fill out defaults
1952 now = makedate()
1978 now = makedate()
1953 defaults = {}
1979 defaults = {}
1954 for part in ("d", "mb", "yY", "HI", "M", "S"):
1980 for part in ("d", "mb", "yY", "HI", "M", "S"):
1955 # this piece is for rounding the specific end of unknowns
1981 # this piece is for rounding the specific end of unknowns
1956 b = bias.get(part)
1982 b = bias.get(part)
1957 if b is None:
1983 if b is None:
1958 if part[0] in "HMS":
1984 if part[0] in "HMS":
1959 b = "00"
1985 b = "00"
1960 else:
1986 else:
1961 b = "0"
1987 b = "0"
1962
1988
1963 # this piece is for matching the generic end to today's date
1989 # this piece is for matching the generic end to today's date
1964 n = datestr(now, "%" + part[0])
1990 n = datestr(now, "%" + part[0])
1965
1991
1966 defaults[part] = (b, n)
1992 defaults[part] = (b, n)
1967
1993
1968 for format in formats:
1994 for format in formats:
1969 try:
1995 try:
1970 when, offset = strdate(date, format, defaults)
1996 when, offset = strdate(date, format, defaults)
1971 except (ValueError, OverflowError):
1997 except (ValueError, OverflowError):
1972 pass
1998 pass
1973 else:
1999 else:
1974 break
2000 break
1975 else:
2001 else:
1976 raise Abort(_('invalid date: %r') % date)
2002 raise Abort(_('invalid date: %r') % date)
1977 # validate explicit (probably user-specified) date and
2003 # validate explicit (probably user-specified) date and
1978 # time zone offset. values must fit in signed 32 bits for
2004 # time zone offset. values must fit in signed 32 bits for
1979 # current 32-bit linux runtimes. timezones go from UTC-12
2005 # current 32-bit linux runtimes. timezones go from UTC-12
1980 # to UTC+14
2006 # to UTC+14
1981 if when < -0x80000000 or when > 0x7fffffff:
2007 if when < -0x80000000 or when > 0x7fffffff:
1982 raise Abort(_('date exceeds 32 bits: %d') % when)
2008 raise Abort(_('date exceeds 32 bits: %d') % when)
1983 if offset < -50400 or offset > 43200:
2009 if offset < -50400 or offset > 43200:
1984 raise Abort(_('impossible time zone offset: %d') % offset)
2010 raise Abort(_('impossible time zone offset: %d') % offset)
1985 return when, offset
2011 return when, offset
1986
2012
1987 def matchdate(date):
2013 def matchdate(date):
1988 """Return a function that matches a given date match specifier
2014 """Return a function that matches a given date match specifier
1989
2015
1990 Formats include:
2016 Formats include:
1991
2017
1992 '{date}' match a given date to the accuracy provided
2018 '{date}' match a given date to the accuracy provided
1993
2019
1994 '<{date}' on or before a given date
2020 '<{date}' on or before a given date
1995
2021
1996 '>{date}' on or after a given date
2022 '>{date}' on or after a given date
1997
2023
1998 >>> p1 = parsedate("10:29:59")
2024 >>> p1 = parsedate("10:29:59")
1999 >>> p2 = parsedate("10:30:00")
2025 >>> p2 = parsedate("10:30:00")
2000 >>> p3 = parsedate("10:30:59")
2026 >>> p3 = parsedate("10:30:59")
2001 >>> p4 = parsedate("10:31:00")
2027 >>> p4 = parsedate("10:31:00")
2002 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2028 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2003 >>> f = matchdate("10:30")
2029 >>> f = matchdate("10:30")
2004 >>> f(p1[0])
2030 >>> f(p1[0])
2005 False
2031 False
2006 >>> f(p2[0])
2032 >>> f(p2[0])
2007 True
2033 True
2008 >>> f(p3[0])
2034 >>> f(p3[0])
2009 True
2035 True
2010 >>> f(p4[0])
2036 >>> f(p4[0])
2011 False
2037 False
2012 >>> f(p5[0])
2038 >>> f(p5[0])
2013 False
2039 False
2014 """
2040 """
2015
2041
2016 def lower(date):
2042 def lower(date):
2017 d = {'mb': "1", 'd': "1"}
2043 d = {'mb': "1", 'd': "1"}
2018 return parsedate(date, extendeddateformats, d)[0]
2044 return parsedate(date, extendeddateformats, d)[0]
2019
2045
2020 def upper(date):
2046 def upper(date):
2021 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2047 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2022 for days in ("31", "30", "29"):
2048 for days in ("31", "30", "29"):
2023 try:
2049 try:
2024 d["d"] = days
2050 d["d"] = days
2025 return parsedate(date, extendeddateformats, d)[0]
2051 return parsedate(date, extendeddateformats, d)[0]
2026 except Abort:
2052 except Abort:
2027 pass
2053 pass
2028 d["d"] = "28"
2054 d["d"] = "28"
2029 return parsedate(date, extendeddateformats, d)[0]
2055 return parsedate(date, extendeddateformats, d)[0]
2030
2056
2031 date = date.strip()
2057 date = date.strip()
2032
2058
2033 if not date:
2059 if not date:
2034 raise Abort(_("dates cannot consist entirely of whitespace"))
2060 raise Abort(_("dates cannot consist entirely of whitespace"))
2035 elif date[0] == "<":
2061 elif date[0] == "<":
2036 if not date[1:]:
2062 if not date[1:]:
2037 raise Abort(_("invalid day spec, use '<DATE'"))
2063 raise Abort(_("invalid day spec, use '<DATE'"))
2038 when = upper(date[1:])
2064 when = upper(date[1:])
2039 return lambda x: x <= when
2065 return lambda x: x <= when
2040 elif date[0] == ">":
2066 elif date[0] == ">":
2041 if not date[1:]:
2067 if not date[1:]:
2042 raise Abort(_("invalid day spec, use '>DATE'"))
2068 raise Abort(_("invalid day spec, use '>DATE'"))
2043 when = lower(date[1:])
2069 when = lower(date[1:])
2044 return lambda x: x >= when
2070 return lambda x: x >= when
2045 elif date[0] == "-":
2071 elif date[0] == "-":
2046 try:
2072 try:
2047 days = int(date[1:])
2073 days = int(date[1:])
2048 except ValueError:
2074 except ValueError:
2049 raise Abort(_("invalid day spec: %s") % date[1:])
2075 raise Abort(_("invalid day spec: %s") % date[1:])
2050 if days < 0:
2076 if days < 0:
2051 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2077 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2052 % date[1:])
2078 % date[1:])
2053 when = makedate()[0] - days * 3600 * 24
2079 when = makedate()[0] - days * 3600 * 24
2054 return lambda x: x >= when
2080 return lambda x: x >= when
2055 elif " to " in date:
2081 elif " to " in date:
2056 a, b = date.split(" to ")
2082 a, b = date.split(" to ")
2057 start, stop = lower(a), upper(b)
2083 start, stop = lower(a), upper(b)
2058 return lambda x: x >= start and x <= stop
2084 return lambda x: x >= start and x <= stop
2059 else:
2085 else:
2060 start, stop = lower(date), upper(date)
2086 start, stop = lower(date), upper(date)
2061 return lambda x: x >= start and x <= stop
2087 return lambda x: x >= start and x <= stop
2062
2088
2063 def stringmatcher(pattern, casesensitive=True):
2089 def stringmatcher(pattern, casesensitive=True):
2064 """
2090 """
2065 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2091 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2066 returns the matcher name, pattern, and matcher function.
2092 returns the matcher name, pattern, and matcher function.
2067 missing or unknown prefixes are treated as literal matches.
2093 missing or unknown prefixes are treated as literal matches.
2068
2094
2069 helper for tests:
2095 helper for tests:
2070 >>> def test(pattern, *tests):
2096 >>> def test(pattern, *tests):
2071 ... kind, pattern, matcher = stringmatcher(pattern)
2097 ... kind, pattern, matcher = stringmatcher(pattern)
2072 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2098 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2073 >>> def itest(pattern, *tests):
2099 >>> def itest(pattern, *tests):
2074 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2100 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2075 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2101 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2076
2102
2077 exact matching (no prefix):
2103 exact matching (no prefix):
2078 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2104 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2079 ('literal', 'abcdefg', [False, False, True])
2105 ('literal', 'abcdefg', [False, False, True])
2080
2106
2081 regex matching ('re:' prefix)
2107 regex matching ('re:' prefix)
2082 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2108 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2083 ('re', 'a.+b', [False, False, True])
2109 ('re', 'a.+b', [False, False, True])
2084
2110
2085 force exact matches ('literal:' prefix)
2111 force exact matches ('literal:' prefix)
2086 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2112 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2087 ('literal', 're:foobar', [False, True])
2113 ('literal', 're:foobar', [False, True])
2088
2114
2089 unknown prefixes are ignored and treated as literals
2115 unknown prefixes are ignored and treated as literals
2090 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2116 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2091 ('literal', 'foo:bar', [False, False, True])
2117 ('literal', 'foo:bar', [False, False, True])
2092
2118
2093 case insensitive regex matches
2119 case insensitive regex matches
2094 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2120 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2095 ('re', 'A.+b', [False, False, True])
2121 ('re', 'A.+b', [False, False, True])
2096
2122
2097 case insensitive literal matches
2123 case insensitive literal matches
2098 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2124 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2099 ('literal', 'ABCDEFG', [False, False, True])
2125 ('literal', 'ABCDEFG', [False, False, True])
2100 """
2126 """
2101 if pattern.startswith('re:'):
2127 if pattern.startswith('re:'):
2102 pattern = pattern[3:]
2128 pattern = pattern[3:]
2103 try:
2129 try:
2104 flags = 0
2130 flags = 0
2105 if not casesensitive:
2131 if not casesensitive:
2106 flags = remod.I
2132 flags = remod.I
2107 regex = remod.compile(pattern, flags)
2133 regex = remod.compile(pattern, flags)
2108 except remod.error as e:
2134 except remod.error as e:
2109 raise error.ParseError(_('invalid regular expression: %s')
2135 raise error.ParseError(_('invalid regular expression: %s')
2110 % e)
2136 % e)
2111 return 're', pattern, regex.search
2137 return 're', pattern, regex.search
2112 elif pattern.startswith('literal:'):
2138 elif pattern.startswith('literal:'):
2113 pattern = pattern[8:]
2139 pattern = pattern[8:]
2114
2140
2115 match = pattern.__eq__
2141 match = pattern.__eq__
2116
2142
2117 if not casesensitive:
2143 if not casesensitive:
2118 ipat = encoding.lower(pattern)
2144 ipat = encoding.lower(pattern)
2119 match = lambda s: ipat == encoding.lower(s)
2145 match = lambda s: ipat == encoding.lower(s)
2120 return 'literal', pattern, match
2146 return 'literal', pattern, match
2121
2147
2122 def shortuser(user):
2148 def shortuser(user):
2123 """Return a short representation of a user name or email address."""
2149 """Return a short representation of a user name or email address."""
2124 f = user.find('@')
2150 f = user.find('@')
2125 if f >= 0:
2151 if f >= 0:
2126 user = user[:f]
2152 user = user[:f]
2127 f = user.find('<')
2153 f = user.find('<')
2128 if f >= 0:
2154 if f >= 0:
2129 user = user[f + 1:]
2155 user = user[f + 1:]
2130 f = user.find(' ')
2156 f = user.find(' ')
2131 if f >= 0:
2157 if f >= 0:
2132 user = user[:f]
2158 user = user[:f]
2133 f = user.find('.')
2159 f = user.find('.')
2134 if f >= 0:
2160 if f >= 0:
2135 user = user[:f]
2161 user = user[:f]
2136 return user
2162 return user
2137
2163
2138 def emailuser(user):
2164 def emailuser(user):
2139 """Return the user portion of an email address."""
2165 """Return the user portion of an email address."""
2140 f = user.find('@')
2166 f = user.find('@')
2141 if f >= 0:
2167 if f >= 0:
2142 user = user[:f]
2168 user = user[:f]
2143 f = user.find('<')
2169 f = user.find('<')
2144 if f >= 0:
2170 if f >= 0:
2145 user = user[f + 1:]
2171 user = user[f + 1:]
2146 return user
2172 return user
2147
2173
2148 def email(author):
2174 def email(author):
2149 '''get email of author.'''
2175 '''get email of author.'''
2150 r = author.find('>')
2176 r = author.find('>')
2151 if r == -1:
2177 if r == -1:
2152 r = None
2178 r = None
2153 return author[author.find('<') + 1:r]
2179 return author[author.find('<') + 1:r]
2154
2180
2155 def ellipsis(text, maxlength=400):
2181 def ellipsis(text, maxlength=400):
2156 """Trim string to at most maxlength (default: 400) columns in display."""
2182 """Trim string to at most maxlength (default: 400) columns in display."""
2157 return encoding.trim(text, maxlength, ellipsis='...')
2183 return encoding.trim(text, maxlength, ellipsis='...')
2158
2184
2159 def unitcountfn(*unittable):
2185 def unitcountfn(*unittable):
2160 '''return a function that renders a readable count of some quantity'''
2186 '''return a function that renders a readable count of some quantity'''
2161
2187
2162 def go(count):
2188 def go(count):
2163 for multiplier, divisor, format in unittable:
2189 for multiplier, divisor, format in unittable:
2164 if abs(count) >= divisor * multiplier:
2190 if abs(count) >= divisor * multiplier:
2165 return format % (count / float(divisor))
2191 return format % (count / float(divisor))
2166 return unittable[-1][2] % count
2192 return unittable[-1][2] % count
2167
2193
2168 return go
2194 return go
2169
2195
2170 def processlinerange(fromline, toline):
2196 def processlinerange(fromline, toline):
2171 """Check that linerange <fromline>:<toline> makes sense and return a
2197 """Check that linerange <fromline>:<toline> makes sense and return a
2172 0-based range.
2198 0-based range.
2173
2199
2174 >>> processlinerange(10, 20)
2200 >>> processlinerange(10, 20)
2175 (9, 20)
2201 (9, 20)
2176 >>> processlinerange(2, 1)
2202 >>> processlinerange(2, 1)
2177 Traceback (most recent call last):
2203 Traceback (most recent call last):
2178 ...
2204 ...
2179 ParseError: line range must be positive
2205 ParseError: line range must be positive
2180 >>> processlinerange(0, 5)
2206 >>> processlinerange(0, 5)
2181 Traceback (most recent call last):
2207 Traceback (most recent call last):
2182 ...
2208 ...
2183 ParseError: fromline must be strictly positive
2209 ParseError: fromline must be strictly positive
2184 """
2210 """
2185 if toline - fromline < 0:
2211 if toline - fromline < 0:
2186 raise error.ParseError(_("line range must be positive"))
2212 raise error.ParseError(_("line range must be positive"))
2187 if fromline < 1:
2213 if fromline < 1:
2188 raise error.ParseError(_("fromline must be strictly positive"))
2214 raise error.ParseError(_("fromline must be strictly positive"))
2189 return fromline - 1, toline
2215 return fromline - 1, toline
2190
2216
2191 bytecount = unitcountfn(
2217 bytecount = unitcountfn(
2192 (100, 1 << 30, _('%.0f GB')),
2218 (100, 1 << 30, _('%.0f GB')),
2193 (10, 1 << 30, _('%.1f GB')),
2219 (10, 1 << 30, _('%.1f GB')),
2194 (1, 1 << 30, _('%.2f GB')),
2220 (1, 1 << 30, _('%.2f GB')),
2195 (100, 1 << 20, _('%.0f MB')),
2221 (100, 1 << 20, _('%.0f MB')),
2196 (10, 1 << 20, _('%.1f MB')),
2222 (10, 1 << 20, _('%.1f MB')),
2197 (1, 1 << 20, _('%.2f MB')),
2223 (1, 1 << 20, _('%.2f MB')),
2198 (100, 1 << 10, _('%.0f KB')),
2224 (100, 1 << 10, _('%.0f KB')),
2199 (10, 1 << 10, _('%.1f KB')),
2225 (10, 1 << 10, _('%.1f KB')),
2200 (1, 1 << 10, _('%.2f KB')),
2226 (1, 1 << 10, _('%.2f KB')),
2201 (1, 1, _('%.0f bytes')),
2227 (1, 1, _('%.0f bytes')),
2202 )
2228 )
2203
2229
2204 # Matches a single EOL which can either be a CRLF where repeated CR
2230 # Matches a single EOL which can either be a CRLF where repeated CR
2205 # are removed or a LF. We do not care about old Macintosh files, so a
2231 # are removed or a LF. We do not care about old Macintosh files, so a
2206 # stray CR is an error.
2232 # stray CR is an error.
2207 _eolre = remod.compile(br'\r*\n')
2233 _eolre = remod.compile(br'\r*\n')
2208
2234
2209 def tolf(s):
2235 def tolf(s):
2210 return _eolre.sub('\n', s)
2236 return _eolre.sub('\n', s)
2211
2237
2212 def tocrlf(s):
2238 def tocrlf(s):
2213 return _eolre.sub('\r\n', s)
2239 return _eolre.sub('\r\n', s)
2214
2240
2215 if pycompat.oslinesep == '\r\n':
2241 if pycompat.oslinesep == '\r\n':
2216 tonativeeol = tocrlf
2242 tonativeeol = tocrlf
2217 fromnativeeol = tolf
2243 fromnativeeol = tolf
2218 else:
2244 else:
2219 tonativeeol = pycompat.identity
2245 tonativeeol = pycompat.identity
2220 fromnativeeol = pycompat.identity
2246 fromnativeeol = pycompat.identity
2221
2247
2222 def escapestr(s):
2248 def escapestr(s):
2223 # call underlying function of s.encode('string_escape') directly for
2249 # call underlying function of s.encode('string_escape') directly for
2224 # Python 3 compatibility
2250 # Python 3 compatibility
2225 return codecs.escape_encode(s)[0]
2251 return codecs.escape_encode(s)[0]
2226
2252
2227 def unescapestr(s):
2253 def unescapestr(s):
2228 return codecs.escape_decode(s)[0]
2254 return codecs.escape_decode(s)[0]
2229
2255
2230 def uirepr(s):
2256 def uirepr(s):
2231 # Avoid double backslash in Windows path repr()
2257 # Avoid double backslash in Windows path repr()
2232 return repr(s).replace('\\\\', '\\')
2258 return repr(s).replace('\\\\', '\\')
2233
2259
2234 # delay import of textwrap
2260 # delay import of textwrap
2235 def MBTextWrapper(**kwargs):
2261 def MBTextWrapper(**kwargs):
2236 class tw(textwrap.TextWrapper):
2262 class tw(textwrap.TextWrapper):
2237 """
2263 """
2238 Extend TextWrapper for width-awareness.
2264 Extend TextWrapper for width-awareness.
2239
2265
2240 Neither number of 'bytes' in any encoding nor 'characters' is
2266 Neither number of 'bytes' in any encoding nor 'characters' is
2241 appropriate to calculate terminal columns for specified string.
2267 appropriate to calculate terminal columns for specified string.
2242
2268
2243 Original TextWrapper implementation uses built-in 'len()' directly,
2269 Original TextWrapper implementation uses built-in 'len()' directly,
2244 so overriding is needed to use width information of each characters.
2270 so overriding is needed to use width information of each characters.
2245
2271
2246 In addition, characters classified into 'ambiguous' width are
2272 In addition, characters classified into 'ambiguous' width are
2247 treated as wide in East Asian area, but as narrow in other.
2273 treated as wide in East Asian area, but as narrow in other.
2248
2274
2249 This requires use decision to determine width of such characters.
2275 This requires use decision to determine width of such characters.
2250 """
2276 """
2251 def _cutdown(self, ucstr, space_left):
2277 def _cutdown(self, ucstr, space_left):
2252 l = 0
2278 l = 0
2253 colwidth = encoding.ucolwidth
2279 colwidth = encoding.ucolwidth
2254 for i in xrange(len(ucstr)):
2280 for i in xrange(len(ucstr)):
2255 l += colwidth(ucstr[i])
2281 l += colwidth(ucstr[i])
2256 if space_left < l:
2282 if space_left < l:
2257 return (ucstr[:i], ucstr[i:])
2283 return (ucstr[:i], ucstr[i:])
2258 return ucstr, ''
2284 return ucstr, ''
2259
2285
2260 # overriding of base class
2286 # overriding of base class
2261 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2287 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2262 space_left = max(width - cur_len, 1)
2288 space_left = max(width - cur_len, 1)
2263
2289
2264 if self.break_long_words:
2290 if self.break_long_words:
2265 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2291 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2266 cur_line.append(cut)
2292 cur_line.append(cut)
2267 reversed_chunks[-1] = res
2293 reversed_chunks[-1] = res
2268 elif not cur_line:
2294 elif not cur_line:
2269 cur_line.append(reversed_chunks.pop())
2295 cur_line.append(reversed_chunks.pop())
2270
2296
2271 # this overriding code is imported from TextWrapper of Python 2.6
2297 # this overriding code is imported from TextWrapper of Python 2.6
2272 # to calculate columns of string by 'encoding.ucolwidth()'
2298 # to calculate columns of string by 'encoding.ucolwidth()'
2273 def _wrap_chunks(self, chunks):
2299 def _wrap_chunks(self, chunks):
2274 colwidth = encoding.ucolwidth
2300 colwidth = encoding.ucolwidth
2275
2301
2276 lines = []
2302 lines = []
2277 if self.width <= 0:
2303 if self.width <= 0:
2278 raise ValueError("invalid width %r (must be > 0)" % self.width)
2304 raise ValueError("invalid width %r (must be > 0)" % self.width)
2279
2305
2280 # Arrange in reverse order so items can be efficiently popped
2306 # Arrange in reverse order so items can be efficiently popped
2281 # from a stack of chucks.
2307 # from a stack of chucks.
2282 chunks.reverse()
2308 chunks.reverse()
2283
2309
2284 while chunks:
2310 while chunks:
2285
2311
2286 # Start the list of chunks that will make up the current line.
2312 # Start the list of chunks that will make up the current line.
2287 # cur_len is just the length of all the chunks in cur_line.
2313 # cur_len is just the length of all the chunks in cur_line.
2288 cur_line = []
2314 cur_line = []
2289 cur_len = 0
2315 cur_len = 0
2290
2316
2291 # Figure out which static string will prefix this line.
2317 # Figure out which static string will prefix this line.
2292 if lines:
2318 if lines:
2293 indent = self.subsequent_indent
2319 indent = self.subsequent_indent
2294 else:
2320 else:
2295 indent = self.initial_indent
2321 indent = self.initial_indent
2296
2322
2297 # Maximum width for this line.
2323 # Maximum width for this line.
2298 width = self.width - len(indent)
2324 width = self.width - len(indent)
2299
2325
2300 # First chunk on line is whitespace -- drop it, unless this
2326 # First chunk on line is whitespace -- drop it, unless this
2301 # is the very beginning of the text (i.e. no lines started yet).
2327 # is the very beginning of the text (i.e. no lines started yet).
2302 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2328 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2303 del chunks[-1]
2329 del chunks[-1]
2304
2330
2305 while chunks:
2331 while chunks:
2306 l = colwidth(chunks[-1])
2332 l = colwidth(chunks[-1])
2307
2333
2308 # Can at least squeeze this chunk onto the current line.
2334 # Can at least squeeze this chunk onto the current line.
2309 if cur_len + l <= width:
2335 if cur_len + l <= width:
2310 cur_line.append(chunks.pop())
2336 cur_line.append(chunks.pop())
2311 cur_len += l
2337 cur_len += l
2312
2338
2313 # Nope, this line is full.
2339 # Nope, this line is full.
2314 else:
2340 else:
2315 break
2341 break
2316
2342
2317 # The current line is full, and the next chunk is too big to
2343 # The current line is full, and the next chunk is too big to
2318 # fit on *any* line (not just this one).
2344 # fit on *any* line (not just this one).
2319 if chunks and colwidth(chunks[-1]) > width:
2345 if chunks and colwidth(chunks[-1]) > width:
2320 self._handle_long_word(chunks, cur_line, cur_len, width)
2346 self._handle_long_word(chunks, cur_line, cur_len, width)
2321
2347
2322 # If the last chunk on this line is all whitespace, drop it.
2348 # If the last chunk on this line is all whitespace, drop it.
2323 if (self.drop_whitespace and
2349 if (self.drop_whitespace and
2324 cur_line and cur_line[-1].strip() == ''):
2350 cur_line and cur_line[-1].strip() == ''):
2325 del cur_line[-1]
2351 del cur_line[-1]
2326
2352
2327 # Convert current line back to a string and store it in list
2353 # Convert current line back to a string and store it in list
2328 # of all lines (return value).
2354 # of all lines (return value).
2329 if cur_line:
2355 if cur_line:
2330 lines.append(indent + ''.join(cur_line))
2356 lines.append(indent + ''.join(cur_line))
2331
2357
2332 return lines
2358 return lines
2333
2359
2334 global MBTextWrapper
2360 global MBTextWrapper
2335 MBTextWrapper = tw
2361 MBTextWrapper = tw
2336 return tw(**kwargs)
2362 return tw(**kwargs)
2337
2363
2338 def wrap(line, width, initindent='', hangindent=''):
2364 def wrap(line, width, initindent='', hangindent=''):
2339 maxindent = max(len(hangindent), len(initindent))
2365 maxindent = max(len(hangindent), len(initindent))
2340 if width <= maxindent:
2366 if width <= maxindent:
2341 # adjust for weird terminal size
2367 # adjust for weird terminal size
2342 width = max(78, maxindent + 1)
2368 width = max(78, maxindent + 1)
2343 line = line.decode(pycompat.sysstr(encoding.encoding),
2369 line = line.decode(pycompat.sysstr(encoding.encoding),
2344 pycompat.sysstr(encoding.encodingmode))
2370 pycompat.sysstr(encoding.encodingmode))
2345 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2371 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2346 pycompat.sysstr(encoding.encodingmode))
2372 pycompat.sysstr(encoding.encodingmode))
2347 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2373 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2348 pycompat.sysstr(encoding.encodingmode))
2374 pycompat.sysstr(encoding.encodingmode))
2349 wrapper = MBTextWrapper(width=width,
2375 wrapper = MBTextWrapper(width=width,
2350 initial_indent=initindent,
2376 initial_indent=initindent,
2351 subsequent_indent=hangindent)
2377 subsequent_indent=hangindent)
2352 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2378 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2353
2379
2354 if (pyplatform.python_implementation() == 'CPython' and
2380 if (pyplatform.python_implementation() == 'CPython' and
2355 sys.version_info < (3, 0)):
2381 sys.version_info < (3, 0)):
2356 # There is an issue in CPython that some IO methods do not handle EINTR
2382 # There is an issue in CPython that some IO methods do not handle EINTR
2357 # correctly. The following table shows what CPython version (and functions)
2383 # correctly. The following table shows what CPython version (and functions)
2358 # are affected (buggy: has the EINTR bug, okay: otherwise):
2384 # are affected (buggy: has the EINTR bug, okay: otherwise):
2359 #
2385 #
2360 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2386 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2361 # --------------------------------------------------
2387 # --------------------------------------------------
2362 # fp.__iter__ | buggy | buggy | okay
2388 # fp.__iter__ | buggy | buggy | okay
2363 # fp.read* | buggy | okay [1] | okay
2389 # fp.read* | buggy | okay [1] | okay
2364 #
2390 #
2365 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2391 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2366 #
2392 #
2367 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2393 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2368 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2394 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2369 #
2395 #
2370 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2396 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2371 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2397 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2372 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2398 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2373 # fp.__iter__ but not other fp.read* methods.
2399 # fp.__iter__ but not other fp.read* methods.
2374 #
2400 #
2375 # On modern systems like Linux, the "read" syscall cannot be interrupted
2401 # On modern systems like Linux, the "read" syscall cannot be interrupted
2376 # when reading "fast" files like on-disk files. So the EINTR issue only
2402 # when reading "fast" files like on-disk files. So the EINTR issue only
2377 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2403 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2378 # files approximately as "fast" files and use the fast (unsafe) code path,
2404 # files approximately as "fast" files and use the fast (unsafe) code path,
2379 # to minimize the performance impact.
2405 # to minimize the performance impact.
2380 if sys.version_info >= (2, 7, 4):
2406 if sys.version_info >= (2, 7, 4):
2381 # fp.readline deals with EINTR correctly, use it as a workaround.
2407 # fp.readline deals with EINTR correctly, use it as a workaround.
2382 def _safeiterfile(fp):
2408 def _safeiterfile(fp):
2383 return iter(fp.readline, '')
2409 return iter(fp.readline, '')
2384 else:
2410 else:
2385 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2411 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2386 # note: this may block longer than necessary because of bufsize.
2412 # note: this may block longer than necessary because of bufsize.
2387 def _safeiterfile(fp, bufsize=4096):
2413 def _safeiterfile(fp, bufsize=4096):
2388 fd = fp.fileno()
2414 fd = fp.fileno()
2389 line = ''
2415 line = ''
2390 while True:
2416 while True:
2391 try:
2417 try:
2392 buf = os.read(fd, bufsize)
2418 buf = os.read(fd, bufsize)
2393 except OSError as ex:
2419 except OSError as ex:
2394 # os.read only raises EINTR before any data is read
2420 # os.read only raises EINTR before any data is read
2395 if ex.errno == errno.EINTR:
2421 if ex.errno == errno.EINTR:
2396 continue
2422 continue
2397 else:
2423 else:
2398 raise
2424 raise
2399 line += buf
2425 line += buf
2400 if '\n' in buf:
2426 if '\n' in buf:
2401 splitted = line.splitlines(True)
2427 splitted = line.splitlines(True)
2402 line = ''
2428 line = ''
2403 for l in splitted:
2429 for l in splitted:
2404 if l[-1] == '\n':
2430 if l[-1] == '\n':
2405 yield l
2431 yield l
2406 else:
2432 else:
2407 line = l
2433 line = l
2408 if not buf:
2434 if not buf:
2409 break
2435 break
2410 if line:
2436 if line:
2411 yield line
2437 yield line
2412
2438
2413 def iterfile(fp):
2439 def iterfile(fp):
2414 fastpath = True
2440 fastpath = True
2415 if type(fp) is file:
2441 if type(fp) is file:
2416 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2442 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2417 if fastpath:
2443 if fastpath:
2418 return fp
2444 return fp
2419 else:
2445 else:
2420 return _safeiterfile(fp)
2446 return _safeiterfile(fp)
2421 else:
2447 else:
2422 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2448 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2423 def iterfile(fp):
2449 def iterfile(fp):
2424 return fp
2450 return fp
2425
2451
2426 def iterlines(iterator):
2452 def iterlines(iterator):
2427 for chunk in iterator:
2453 for chunk in iterator:
2428 for line in chunk.splitlines():
2454 for line in chunk.splitlines():
2429 yield line
2455 yield line
2430
2456
2431 def expandpath(path):
2457 def expandpath(path):
2432 return os.path.expanduser(os.path.expandvars(path))
2458 return os.path.expanduser(os.path.expandvars(path))
2433
2459
2434 def hgcmd():
2460 def hgcmd():
2435 """Return the command used to execute current hg
2461 """Return the command used to execute current hg
2436
2462
2437 This is different from hgexecutable() because on Windows we want
2463 This is different from hgexecutable() because on Windows we want
2438 to avoid things opening new shell windows like batch files, so we
2464 to avoid things opening new shell windows like batch files, so we
2439 get either the python call or current executable.
2465 get either the python call or current executable.
2440 """
2466 """
2441 if mainfrozen():
2467 if mainfrozen():
2442 if getattr(sys, 'frozen', None) == 'macosx_app':
2468 if getattr(sys, 'frozen', None) == 'macosx_app':
2443 # Env variable set by py2app
2469 # Env variable set by py2app
2444 return [encoding.environ['EXECUTABLEPATH']]
2470 return [encoding.environ['EXECUTABLEPATH']]
2445 else:
2471 else:
2446 return [pycompat.sysexecutable]
2472 return [pycompat.sysexecutable]
2447 return gethgcmd()
2473 return gethgcmd()
2448
2474
2449 def rundetached(args, condfn):
2475 def rundetached(args, condfn):
2450 """Execute the argument list in a detached process.
2476 """Execute the argument list in a detached process.
2451
2477
2452 condfn is a callable which is called repeatedly and should return
2478 condfn is a callable which is called repeatedly and should return
2453 True once the child process is known to have started successfully.
2479 True once the child process is known to have started successfully.
2454 At this point, the child process PID is returned. If the child
2480 At this point, the child process PID is returned. If the child
2455 process fails to start or finishes before condfn() evaluates to
2481 process fails to start or finishes before condfn() evaluates to
2456 True, return -1.
2482 True, return -1.
2457 """
2483 """
2458 # Windows case is easier because the child process is either
2484 # Windows case is easier because the child process is either
2459 # successfully starting and validating the condition or exiting
2485 # successfully starting and validating the condition or exiting
2460 # on failure. We just poll on its PID. On Unix, if the child
2486 # on failure. We just poll on its PID. On Unix, if the child
2461 # process fails to start, it will be left in a zombie state until
2487 # process fails to start, it will be left in a zombie state until
2462 # the parent wait on it, which we cannot do since we expect a long
2488 # the parent wait on it, which we cannot do since we expect a long
2463 # running process on success. Instead we listen for SIGCHLD telling
2489 # running process on success. Instead we listen for SIGCHLD telling
2464 # us our child process terminated.
2490 # us our child process terminated.
2465 terminated = set()
2491 terminated = set()
2466 def handler(signum, frame):
2492 def handler(signum, frame):
2467 terminated.add(os.wait())
2493 terminated.add(os.wait())
2468 prevhandler = None
2494 prevhandler = None
2469 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2495 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2470 if SIGCHLD is not None:
2496 if SIGCHLD is not None:
2471 prevhandler = signal.signal(SIGCHLD, handler)
2497 prevhandler = signal.signal(SIGCHLD, handler)
2472 try:
2498 try:
2473 pid = spawndetached(args)
2499 pid = spawndetached(args)
2474 while not condfn():
2500 while not condfn():
2475 if ((pid in terminated or not testpid(pid))
2501 if ((pid in terminated or not testpid(pid))
2476 and not condfn()):
2502 and not condfn()):
2477 return -1
2503 return -1
2478 time.sleep(0.1)
2504 time.sleep(0.1)
2479 return pid
2505 return pid
2480 finally:
2506 finally:
2481 if prevhandler is not None:
2507 if prevhandler is not None:
2482 signal.signal(signal.SIGCHLD, prevhandler)
2508 signal.signal(signal.SIGCHLD, prevhandler)
2483
2509
2484 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2510 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2485 """Return the result of interpolating items in the mapping into string s.
2511 """Return the result of interpolating items in the mapping into string s.
2486
2512
2487 prefix is a single character string, or a two character string with
2513 prefix is a single character string, or a two character string with
2488 a backslash as the first character if the prefix needs to be escaped in
2514 a backslash as the first character if the prefix needs to be escaped in
2489 a regular expression.
2515 a regular expression.
2490
2516
2491 fn is an optional function that will be applied to the replacement text
2517 fn is an optional function that will be applied to the replacement text
2492 just before replacement.
2518 just before replacement.
2493
2519
2494 escape_prefix is an optional flag that allows using doubled prefix for
2520 escape_prefix is an optional flag that allows using doubled prefix for
2495 its escaping.
2521 its escaping.
2496 """
2522 """
2497 fn = fn or (lambda s: s)
2523 fn = fn or (lambda s: s)
2498 patterns = '|'.join(mapping.keys())
2524 patterns = '|'.join(mapping.keys())
2499 if escape_prefix:
2525 if escape_prefix:
2500 patterns += '|' + prefix
2526 patterns += '|' + prefix
2501 if len(prefix) > 1:
2527 if len(prefix) > 1:
2502 prefix_char = prefix[1:]
2528 prefix_char = prefix[1:]
2503 else:
2529 else:
2504 prefix_char = prefix
2530 prefix_char = prefix
2505 mapping[prefix_char] = prefix_char
2531 mapping[prefix_char] = prefix_char
2506 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2532 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2507 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2533 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2508
2534
2509 def getport(port):
2535 def getport(port):
2510 """Return the port for a given network service.
2536 """Return the port for a given network service.
2511
2537
2512 If port is an integer, it's returned as is. If it's a string, it's
2538 If port is an integer, it's returned as is. If it's a string, it's
2513 looked up using socket.getservbyname(). If there's no matching
2539 looked up using socket.getservbyname(). If there's no matching
2514 service, error.Abort is raised.
2540 service, error.Abort is raised.
2515 """
2541 """
2516 try:
2542 try:
2517 return int(port)
2543 return int(port)
2518 except ValueError:
2544 except ValueError:
2519 pass
2545 pass
2520
2546
2521 try:
2547 try:
2522 return socket.getservbyname(port)
2548 return socket.getservbyname(port)
2523 except socket.error:
2549 except socket.error:
2524 raise Abort(_("no port number associated with service '%s'") % port)
2550 raise Abort(_("no port number associated with service '%s'") % port)
2525
2551
2526 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2552 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2527 '0': False, 'no': False, 'false': False, 'off': False,
2553 '0': False, 'no': False, 'false': False, 'off': False,
2528 'never': False}
2554 'never': False}
2529
2555
2530 def parsebool(s):
2556 def parsebool(s):
2531 """Parse s into a boolean.
2557 """Parse s into a boolean.
2532
2558
2533 If s is not a valid boolean, returns None.
2559 If s is not a valid boolean, returns None.
2534 """
2560 """
2535 return _booleans.get(s.lower(), None)
2561 return _booleans.get(s.lower(), None)
2536
2562
2537 _hextochr = dict((a + b, chr(int(a + b, 16)))
2563 _hextochr = dict((a + b, chr(int(a + b, 16)))
2538 for a in string.hexdigits for b in string.hexdigits)
2564 for a in string.hexdigits for b in string.hexdigits)
2539
2565
2540 class url(object):
2566 class url(object):
2541 r"""Reliable URL parser.
2567 r"""Reliable URL parser.
2542
2568
2543 This parses URLs and provides attributes for the following
2569 This parses URLs and provides attributes for the following
2544 components:
2570 components:
2545
2571
2546 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2572 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2547
2573
2548 Missing components are set to None. The only exception is
2574 Missing components are set to None. The only exception is
2549 fragment, which is set to '' if present but empty.
2575 fragment, which is set to '' if present but empty.
2550
2576
2551 If parsefragment is False, fragment is included in query. If
2577 If parsefragment is False, fragment is included in query. If
2552 parsequery is False, query is included in path. If both are
2578 parsequery is False, query is included in path. If both are
2553 False, both fragment and query are included in path.
2579 False, both fragment and query are included in path.
2554
2580
2555 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2581 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2556
2582
2557 Note that for backward compatibility reasons, bundle URLs do not
2583 Note that for backward compatibility reasons, bundle URLs do not
2558 take host names. That means 'bundle://../' has a path of '../'.
2584 take host names. That means 'bundle://../' has a path of '../'.
2559
2585
2560 Examples:
2586 Examples:
2561
2587
2562 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2588 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2563 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2589 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2564 >>> url('ssh://[::1]:2200//home/joe/repo')
2590 >>> url('ssh://[::1]:2200//home/joe/repo')
2565 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2591 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2566 >>> url('file:///home/joe/repo')
2592 >>> url('file:///home/joe/repo')
2567 <url scheme: 'file', path: '/home/joe/repo'>
2593 <url scheme: 'file', path: '/home/joe/repo'>
2568 >>> url('file:///c:/temp/foo/')
2594 >>> url('file:///c:/temp/foo/')
2569 <url scheme: 'file', path: 'c:/temp/foo/'>
2595 <url scheme: 'file', path: 'c:/temp/foo/'>
2570 >>> url('bundle:foo')
2596 >>> url('bundle:foo')
2571 <url scheme: 'bundle', path: 'foo'>
2597 <url scheme: 'bundle', path: 'foo'>
2572 >>> url('bundle://../foo')
2598 >>> url('bundle://../foo')
2573 <url scheme: 'bundle', path: '../foo'>
2599 <url scheme: 'bundle', path: '../foo'>
2574 >>> url(r'c:\foo\bar')
2600 >>> url(r'c:\foo\bar')
2575 <url path: 'c:\\foo\\bar'>
2601 <url path: 'c:\\foo\\bar'>
2576 >>> url(r'\\blah\blah\blah')
2602 >>> url(r'\\blah\blah\blah')
2577 <url path: '\\\\blah\\blah\\blah'>
2603 <url path: '\\\\blah\\blah\\blah'>
2578 >>> url(r'\\blah\blah\blah#baz')
2604 >>> url(r'\\blah\blah\blah#baz')
2579 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2605 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2580 >>> url(r'file:///C:\users\me')
2606 >>> url(r'file:///C:\users\me')
2581 <url scheme: 'file', path: 'C:\\users\\me'>
2607 <url scheme: 'file', path: 'C:\\users\\me'>
2582
2608
2583 Authentication credentials:
2609 Authentication credentials:
2584
2610
2585 >>> url('ssh://joe:xyz@x/repo')
2611 >>> url('ssh://joe:xyz@x/repo')
2586 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2612 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2587 >>> url('ssh://joe@x/repo')
2613 >>> url('ssh://joe@x/repo')
2588 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2614 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2589
2615
2590 Query strings and fragments:
2616 Query strings and fragments:
2591
2617
2592 >>> url('http://host/a?b#c')
2618 >>> url('http://host/a?b#c')
2593 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2619 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2594 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2620 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2595 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2621 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2596
2622
2597 Empty path:
2623 Empty path:
2598
2624
2599 >>> url('')
2625 >>> url('')
2600 <url path: ''>
2626 <url path: ''>
2601 >>> url('#a')
2627 >>> url('#a')
2602 <url path: '', fragment: 'a'>
2628 <url path: '', fragment: 'a'>
2603 >>> url('http://host/')
2629 >>> url('http://host/')
2604 <url scheme: 'http', host: 'host', path: ''>
2630 <url scheme: 'http', host: 'host', path: ''>
2605 >>> url('http://host/#a')
2631 >>> url('http://host/#a')
2606 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2632 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2607
2633
2608 Only scheme:
2634 Only scheme:
2609
2635
2610 >>> url('http:')
2636 >>> url('http:')
2611 <url scheme: 'http'>
2637 <url scheme: 'http'>
2612 """
2638 """
2613
2639
2614 _safechars = "!~*'()+"
2640 _safechars = "!~*'()+"
2615 _safepchars = "/!~*'()+:\\"
2641 _safepchars = "/!~*'()+:\\"
2616 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2642 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2617
2643
2618 def __init__(self, path, parsequery=True, parsefragment=True):
2644 def __init__(self, path, parsequery=True, parsefragment=True):
2619 # We slowly chomp away at path until we have only the path left
2645 # We slowly chomp away at path until we have only the path left
2620 self.scheme = self.user = self.passwd = self.host = None
2646 self.scheme = self.user = self.passwd = self.host = None
2621 self.port = self.path = self.query = self.fragment = None
2647 self.port = self.path = self.query = self.fragment = None
2622 self._localpath = True
2648 self._localpath = True
2623 self._hostport = ''
2649 self._hostport = ''
2624 self._origpath = path
2650 self._origpath = path
2625
2651
2626 if parsefragment and '#' in path:
2652 if parsefragment and '#' in path:
2627 path, self.fragment = path.split('#', 1)
2653 path, self.fragment = path.split('#', 1)
2628
2654
2629 # special case for Windows drive letters and UNC paths
2655 # special case for Windows drive letters and UNC paths
2630 if hasdriveletter(path) or path.startswith('\\\\'):
2656 if hasdriveletter(path) or path.startswith('\\\\'):
2631 self.path = path
2657 self.path = path
2632 return
2658 return
2633
2659
2634 # For compatibility reasons, we can't handle bundle paths as
2660 # For compatibility reasons, we can't handle bundle paths as
2635 # normal URLS
2661 # normal URLS
2636 if path.startswith('bundle:'):
2662 if path.startswith('bundle:'):
2637 self.scheme = 'bundle'
2663 self.scheme = 'bundle'
2638 path = path[7:]
2664 path = path[7:]
2639 if path.startswith('//'):
2665 if path.startswith('//'):
2640 path = path[2:]
2666 path = path[2:]
2641 self.path = path
2667 self.path = path
2642 return
2668 return
2643
2669
2644 if self._matchscheme(path):
2670 if self._matchscheme(path):
2645 parts = path.split(':', 1)
2671 parts = path.split(':', 1)
2646 if parts[0]:
2672 if parts[0]:
2647 self.scheme, path = parts
2673 self.scheme, path = parts
2648 self._localpath = False
2674 self._localpath = False
2649
2675
2650 if not path:
2676 if not path:
2651 path = None
2677 path = None
2652 if self._localpath:
2678 if self._localpath:
2653 self.path = ''
2679 self.path = ''
2654 return
2680 return
2655 else:
2681 else:
2656 if self._localpath:
2682 if self._localpath:
2657 self.path = path
2683 self.path = path
2658 return
2684 return
2659
2685
2660 if parsequery and '?' in path:
2686 if parsequery and '?' in path:
2661 path, self.query = path.split('?', 1)
2687 path, self.query = path.split('?', 1)
2662 if not path:
2688 if not path:
2663 path = None
2689 path = None
2664 if not self.query:
2690 if not self.query:
2665 self.query = None
2691 self.query = None
2666
2692
2667 # // is required to specify a host/authority
2693 # // is required to specify a host/authority
2668 if path and path.startswith('//'):
2694 if path and path.startswith('//'):
2669 parts = path[2:].split('/', 1)
2695 parts = path[2:].split('/', 1)
2670 if len(parts) > 1:
2696 if len(parts) > 1:
2671 self.host, path = parts
2697 self.host, path = parts
2672 else:
2698 else:
2673 self.host = parts[0]
2699 self.host = parts[0]
2674 path = None
2700 path = None
2675 if not self.host:
2701 if not self.host:
2676 self.host = None
2702 self.host = None
2677 # path of file:///d is /d
2703 # path of file:///d is /d
2678 # path of file:///d:/ is d:/, not /d:/
2704 # path of file:///d:/ is d:/, not /d:/
2679 if path and not hasdriveletter(path):
2705 if path and not hasdriveletter(path):
2680 path = '/' + path
2706 path = '/' + path
2681
2707
2682 if self.host and '@' in self.host:
2708 if self.host and '@' in self.host:
2683 self.user, self.host = self.host.rsplit('@', 1)
2709 self.user, self.host = self.host.rsplit('@', 1)
2684 if ':' in self.user:
2710 if ':' in self.user:
2685 self.user, self.passwd = self.user.split(':', 1)
2711 self.user, self.passwd = self.user.split(':', 1)
2686 if not self.host:
2712 if not self.host:
2687 self.host = None
2713 self.host = None
2688
2714
2689 # Don't split on colons in IPv6 addresses without ports
2715 # Don't split on colons in IPv6 addresses without ports
2690 if (self.host and ':' in self.host and
2716 if (self.host and ':' in self.host and
2691 not (self.host.startswith('[') and self.host.endswith(']'))):
2717 not (self.host.startswith('[') and self.host.endswith(']'))):
2692 self._hostport = self.host
2718 self._hostport = self.host
2693 self.host, self.port = self.host.rsplit(':', 1)
2719 self.host, self.port = self.host.rsplit(':', 1)
2694 if not self.host:
2720 if not self.host:
2695 self.host = None
2721 self.host = None
2696
2722
2697 if (self.host and self.scheme == 'file' and
2723 if (self.host and self.scheme == 'file' and
2698 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2724 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2699 raise Abort(_('file:// URLs can only refer to localhost'))
2725 raise Abort(_('file:// URLs can only refer to localhost'))
2700
2726
2701 self.path = path
2727 self.path = path
2702
2728
2703 # leave the query string escaped
2729 # leave the query string escaped
2704 for a in ('user', 'passwd', 'host', 'port',
2730 for a in ('user', 'passwd', 'host', 'port',
2705 'path', 'fragment'):
2731 'path', 'fragment'):
2706 v = getattr(self, a)
2732 v = getattr(self, a)
2707 if v is not None:
2733 if v is not None:
2708 setattr(self, a, urlreq.unquote(v))
2734 setattr(self, a, urlreq.unquote(v))
2709
2735
2710 def __repr__(self):
2736 def __repr__(self):
2711 attrs = []
2737 attrs = []
2712 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2738 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2713 'query', 'fragment'):
2739 'query', 'fragment'):
2714 v = getattr(self, a)
2740 v = getattr(self, a)
2715 if v is not None:
2741 if v is not None:
2716 attrs.append('%s: %r' % (a, v))
2742 attrs.append('%s: %r' % (a, v))
2717 return '<url %s>' % ', '.join(attrs)
2743 return '<url %s>' % ', '.join(attrs)
2718
2744
2719 def __str__(self):
2745 def __str__(self):
2720 r"""Join the URL's components back into a URL string.
2746 r"""Join the URL's components back into a URL string.
2721
2747
2722 Examples:
2748 Examples:
2723
2749
2724 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2750 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2725 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2751 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2726 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2752 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2727 'http://user:pw@host:80/?foo=bar&baz=42'
2753 'http://user:pw@host:80/?foo=bar&baz=42'
2728 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2754 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2729 'http://user:pw@host:80/?foo=bar%3dbaz'
2755 'http://user:pw@host:80/?foo=bar%3dbaz'
2730 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2756 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2731 'ssh://user:pw@[::1]:2200//home/joe#'
2757 'ssh://user:pw@[::1]:2200//home/joe#'
2732 >>> str(url('http://localhost:80//'))
2758 >>> str(url('http://localhost:80//'))
2733 'http://localhost:80//'
2759 'http://localhost:80//'
2734 >>> str(url('http://localhost:80/'))
2760 >>> str(url('http://localhost:80/'))
2735 'http://localhost:80/'
2761 'http://localhost:80/'
2736 >>> str(url('http://localhost:80'))
2762 >>> str(url('http://localhost:80'))
2737 'http://localhost:80/'
2763 'http://localhost:80/'
2738 >>> str(url('bundle:foo'))
2764 >>> str(url('bundle:foo'))
2739 'bundle:foo'
2765 'bundle:foo'
2740 >>> str(url('bundle://../foo'))
2766 >>> str(url('bundle://../foo'))
2741 'bundle:../foo'
2767 'bundle:../foo'
2742 >>> str(url('path'))
2768 >>> str(url('path'))
2743 'path'
2769 'path'
2744 >>> str(url('file:///tmp/foo/bar'))
2770 >>> str(url('file:///tmp/foo/bar'))
2745 'file:///tmp/foo/bar'
2771 'file:///tmp/foo/bar'
2746 >>> str(url('file:///c:/tmp/foo/bar'))
2772 >>> str(url('file:///c:/tmp/foo/bar'))
2747 'file:///c:/tmp/foo/bar'
2773 'file:///c:/tmp/foo/bar'
2748 >>> print url(r'bundle:foo\bar')
2774 >>> print url(r'bundle:foo\bar')
2749 bundle:foo\bar
2775 bundle:foo\bar
2750 >>> print url(r'file:///D:\data\hg')
2776 >>> print url(r'file:///D:\data\hg')
2751 file:///D:\data\hg
2777 file:///D:\data\hg
2752 """
2778 """
2753 return encoding.strfromlocal(self.__bytes__())
2779 return encoding.strfromlocal(self.__bytes__())
2754
2780
2755 def __bytes__(self):
2781 def __bytes__(self):
2756 if self._localpath:
2782 if self._localpath:
2757 s = self.path
2783 s = self.path
2758 if self.scheme == 'bundle':
2784 if self.scheme == 'bundle':
2759 s = 'bundle:' + s
2785 s = 'bundle:' + s
2760 if self.fragment:
2786 if self.fragment:
2761 s += '#' + self.fragment
2787 s += '#' + self.fragment
2762 return s
2788 return s
2763
2789
2764 s = self.scheme + ':'
2790 s = self.scheme + ':'
2765 if self.user or self.passwd or self.host:
2791 if self.user or self.passwd or self.host:
2766 s += '//'
2792 s += '//'
2767 elif self.scheme and (not self.path or self.path.startswith('/')
2793 elif self.scheme and (not self.path or self.path.startswith('/')
2768 or hasdriveletter(self.path)):
2794 or hasdriveletter(self.path)):
2769 s += '//'
2795 s += '//'
2770 if hasdriveletter(self.path):
2796 if hasdriveletter(self.path):
2771 s += '/'
2797 s += '/'
2772 if self.user:
2798 if self.user:
2773 s += urlreq.quote(self.user, safe=self._safechars)
2799 s += urlreq.quote(self.user, safe=self._safechars)
2774 if self.passwd:
2800 if self.passwd:
2775 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2801 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2776 if self.user or self.passwd:
2802 if self.user or self.passwd:
2777 s += '@'
2803 s += '@'
2778 if self.host:
2804 if self.host:
2779 if not (self.host.startswith('[') and self.host.endswith(']')):
2805 if not (self.host.startswith('[') and self.host.endswith(']')):
2780 s += urlreq.quote(self.host)
2806 s += urlreq.quote(self.host)
2781 else:
2807 else:
2782 s += self.host
2808 s += self.host
2783 if self.port:
2809 if self.port:
2784 s += ':' + urlreq.quote(self.port)
2810 s += ':' + urlreq.quote(self.port)
2785 if self.host:
2811 if self.host:
2786 s += '/'
2812 s += '/'
2787 if self.path:
2813 if self.path:
2788 # TODO: similar to the query string, we should not unescape the
2814 # TODO: similar to the query string, we should not unescape the
2789 # path when we store it, the path might contain '%2f' = '/',
2815 # path when we store it, the path might contain '%2f' = '/',
2790 # which we should *not* escape.
2816 # which we should *not* escape.
2791 s += urlreq.quote(self.path, safe=self._safepchars)
2817 s += urlreq.quote(self.path, safe=self._safepchars)
2792 if self.query:
2818 if self.query:
2793 # we store the query in escaped form.
2819 # we store the query in escaped form.
2794 s += '?' + self.query
2820 s += '?' + self.query
2795 if self.fragment is not None:
2821 if self.fragment is not None:
2796 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2822 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2797 return s
2823 return s
2798
2824
2799 def authinfo(self):
2825 def authinfo(self):
2800 user, passwd = self.user, self.passwd
2826 user, passwd = self.user, self.passwd
2801 try:
2827 try:
2802 self.user, self.passwd = None, None
2828 self.user, self.passwd = None, None
2803 s = bytes(self)
2829 s = bytes(self)
2804 finally:
2830 finally:
2805 self.user, self.passwd = user, passwd
2831 self.user, self.passwd = user, passwd
2806 if not self.user:
2832 if not self.user:
2807 return (s, None)
2833 return (s, None)
2808 # authinfo[1] is passed to urllib2 password manager, and its
2834 # authinfo[1] is passed to urllib2 password manager, and its
2809 # URIs must not contain credentials. The host is passed in the
2835 # URIs must not contain credentials. The host is passed in the
2810 # URIs list because Python < 2.4.3 uses only that to search for
2836 # URIs list because Python < 2.4.3 uses only that to search for
2811 # a password.
2837 # a password.
2812 return (s, (None, (s, self.host),
2838 return (s, (None, (s, self.host),
2813 self.user, self.passwd or ''))
2839 self.user, self.passwd or ''))
2814
2840
2815 def isabs(self):
2841 def isabs(self):
2816 if self.scheme and self.scheme != 'file':
2842 if self.scheme and self.scheme != 'file':
2817 return True # remote URL
2843 return True # remote URL
2818 if hasdriveletter(self.path):
2844 if hasdriveletter(self.path):
2819 return True # absolute for our purposes - can't be joined()
2845 return True # absolute for our purposes - can't be joined()
2820 if self.path.startswith(r'\\'):
2846 if self.path.startswith(r'\\'):
2821 return True # Windows UNC path
2847 return True # Windows UNC path
2822 if self.path.startswith('/'):
2848 if self.path.startswith('/'):
2823 return True # POSIX-style
2849 return True # POSIX-style
2824 return False
2850 return False
2825
2851
2826 def localpath(self):
2852 def localpath(self):
2827 if self.scheme == 'file' or self.scheme == 'bundle':
2853 if self.scheme == 'file' or self.scheme == 'bundle':
2828 path = self.path or '/'
2854 path = self.path or '/'
2829 # For Windows, we need to promote hosts containing drive
2855 # For Windows, we need to promote hosts containing drive
2830 # letters to paths with drive letters.
2856 # letters to paths with drive letters.
2831 if hasdriveletter(self._hostport):
2857 if hasdriveletter(self._hostport):
2832 path = self._hostport + '/' + self.path
2858 path = self._hostport + '/' + self.path
2833 elif (self.host is not None and self.path
2859 elif (self.host is not None and self.path
2834 and not hasdriveletter(path)):
2860 and not hasdriveletter(path)):
2835 path = '/' + path
2861 path = '/' + path
2836 return path
2862 return path
2837 return self._origpath
2863 return self._origpath
2838
2864
2839 def islocal(self):
2865 def islocal(self):
2840 '''whether localpath will return something that posixfile can open'''
2866 '''whether localpath will return something that posixfile can open'''
2841 return (not self.scheme or self.scheme == 'file'
2867 return (not self.scheme or self.scheme == 'file'
2842 or self.scheme == 'bundle')
2868 or self.scheme == 'bundle')
2843
2869
2844 def hasscheme(path):
2870 def hasscheme(path):
2845 return bool(url(path).scheme)
2871 return bool(url(path).scheme)
2846
2872
2847 def hasdriveletter(path):
2873 def hasdriveletter(path):
2848 return path and path[1:2] == ':' and path[0:1].isalpha()
2874 return path and path[1:2] == ':' and path[0:1].isalpha()
2849
2875
2850 def urllocalpath(path):
2876 def urllocalpath(path):
2851 return url(path, parsequery=False, parsefragment=False).localpath()
2877 return url(path, parsequery=False, parsefragment=False).localpath()
2852
2878
2853 def hidepassword(u):
2879 def hidepassword(u):
2854 '''hide user credential in a url string'''
2880 '''hide user credential in a url string'''
2855 u = url(u)
2881 u = url(u)
2856 if u.passwd:
2882 if u.passwd:
2857 u.passwd = '***'
2883 u.passwd = '***'
2858 return bytes(u)
2884 return bytes(u)
2859
2885
2860 def removeauth(u):
2886 def removeauth(u):
2861 '''remove all authentication information from a url string'''
2887 '''remove all authentication information from a url string'''
2862 u = url(u)
2888 u = url(u)
2863 u.user = u.passwd = None
2889 u.user = u.passwd = None
2864 return str(u)
2890 return str(u)
2865
2891
2866 timecount = unitcountfn(
2892 timecount = unitcountfn(
2867 (1, 1e3, _('%.0f s')),
2893 (1, 1e3, _('%.0f s')),
2868 (100, 1, _('%.1f s')),
2894 (100, 1, _('%.1f s')),
2869 (10, 1, _('%.2f s')),
2895 (10, 1, _('%.2f s')),
2870 (1, 1, _('%.3f s')),
2896 (1, 1, _('%.3f s')),
2871 (100, 0.001, _('%.1f ms')),
2897 (100, 0.001, _('%.1f ms')),
2872 (10, 0.001, _('%.2f ms')),
2898 (10, 0.001, _('%.2f ms')),
2873 (1, 0.001, _('%.3f ms')),
2899 (1, 0.001, _('%.3f ms')),
2874 (100, 0.000001, _('%.1f us')),
2900 (100, 0.000001, _('%.1f us')),
2875 (10, 0.000001, _('%.2f us')),
2901 (10, 0.000001, _('%.2f us')),
2876 (1, 0.000001, _('%.3f us')),
2902 (1, 0.000001, _('%.3f us')),
2877 (100, 0.000000001, _('%.1f ns')),
2903 (100, 0.000000001, _('%.1f ns')),
2878 (10, 0.000000001, _('%.2f ns')),
2904 (10, 0.000000001, _('%.2f ns')),
2879 (1, 0.000000001, _('%.3f ns')),
2905 (1, 0.000000001, _('%.3f ns')),
2880 )
2906 )
2881
2907
2882 _timenesting = [0]
2908 _timenesting = [0]
2883
2909
2884 def timed(func):
2910 def timed(func):
2885 '''Report the execution time of a function call to stderr.
2911 '''Report the execution time of a function call to stderr.
2886
2912
2887 During development, use as a decorator when you need to measure
2913 During development, use as a decorator when you need to measure
2888 the cost of a function, e.g. as follows:
2914 the cost of a function, e.g. as follows:
2889
2915
2890 @util.timed
2916 @util.timed
2891 def foo(a, b, c):
2917 def foo(a, b, c):
2892 pass
2918 pass
2893 '''
2919 '''
2894
2920
2895 def wrapper(*args, **kwargs):
2921 def wrapper(*args, **kwargs):
2896 start = timer()
2922 start = timer()
2897 indent = 2
2923 indent = 2
2898 _timenesting[0] += indent
2924 _timenesting[0] += indent
2899 try:
2925 try:
2900 return func(*args, **kwargs)
2926 return func(*args, **kwargs)
2901 finally:
2927 finally:
2902 elapsed = timer() - start
2928 elapsed = timer() - start
2903 _timenesting[0] -= indent
2929 _timenesting[0] -= indent
2904 stderr.write('%s%s: %s\n' %
2930 stderr.write('%s%s: %s\n' %
2905 (' ' * _timenesting[0], func.__name__,
2931 (' ' * _timenesting[0], func.__name__,
2906 timecount(elapsed)))
2932 timecount(elapsed)))
2907 return wrapper
2933 return wrapper
2908
2934
2909 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2935 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2910 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2936 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2911
2937
2912 def sizetoint(s):
2938 def sizetoint(s):
2913 '''Convert a space specifier to a byte count.
2939 '''Convert a space specifier to a byte count.
2914
2940
2915 >>> sizetoint('30')
2941 >>> sizetoint('30')
2916 30
2942 30
2917 >>> sizetoint('2.2kb')
2943 >>> sizetoint('2.2kb')
2918 2252
2944 2252
2919 >>> sizetoint('6M')
2945 >>> sizetoint('6M')
2920 6291456
2946 6291456
2921 '''
2947 '''
2922 t = s.strip().lower()
2948 t = s.strip().lower()
2923 try:
2949 try:
2924 for k, u in _sizeunits:
2950 for k, u in _sizeunits:
2925 if t.endswith(k):
2951 if t.endswith(k):
2926 return int(float(t[:-len(k)]) * u)
2952 return int(float(t[:-len(k)]) * u)
2927 return int(t)
2953 return int(t)
2928 except ValueError:
2954 except ValueError:
2929 raise error.ParseError(_("couldn't parse size: %s") % s)
2955 raise error.ParseError(_("couldn't parse size: %s") % s)
2930
2956
2931 class hooks(object):
2957 class hooks(object):
2932 '''A collection of hook functions that can be used to extend a
2958 '''A collection of hook functions that can be used to extend a
2933 function's behavior. Hooks are called in lexicographic order,
2959 function's behavior. Hooks are called in lexicographic order,
2934 based on the names of their sources.'''
2960 based on the names of their sources.'''
2935
2961
2936 def __init__(self):
2962 def __init__(self):
2937 self._hooks = []
2963 self._hooks = []
2938
2964
2939 def add(self, source, hook):
2965 def add(self, source, hook):
2940 self._hooks.append((source, hook))
2966 self._hooks.append((source, hook))
2941
2967
2942 def __call__(self, *args):
2968 def __call__(self, *args):
2943 self._hooks.sort(key=lambda x: x[0])
2969 self._hooks.sort(key=lambda x: x[0])
2944 results = []
2970 results = []
2945 for source, hook in self._hooks:
2971 for source, hook in self._hooks:
2946 results.append(hook(*args))
2972 results.append(hook(*args))
2947 return results
2973 return results
2948
2974
2949 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2975 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2950 '''Yields lines for a nicely formatted stacktrace.
2976 '''Yields lines for a nicely formatted stacktrace.
2951 Skips the 'skip' last entries, then return the last 'depth' entries.
2977 Skips the 'skip' last entries, then return the last 'depth' entries.
2952 Each file+linenumber is formatted according to fileline.
2978 Each file+linenumber is formatted according to fileline.
2953 Each line is formatted according to line.
2979 Each line is formatted according to line.
2954 If line is None, it yields:
2980 If line is None, it yields:
2955 length of longest filepath+line number,
2981 length of longest filepath+line number,
2956 filepath+linenumber,
2982 filepath+linenumber,
2957 function
2983 function
2958
2984
2959 Not be used in production code but very convenient while developing.
2985 Not be used in production code but very convenient while developing.
2960 '''
2986 '''
2961 entries = [(fileline % (fn, ln), func)
2987 entries = [(fileline % (fn, ln), func)
2962 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2988 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2963 ][-depth:]
2989 ][-depth:]
2964 if entries:
2990 if entries:
2965 fnmax = max(len(entry[0]) for entry in entries)
2991 fnmax = max(len(entry[0]) for entry in entries)
2966 for fnln, func in entries:
2992 for fnln, func in entries:
2967 if line is None:
2993 if line is None:
2968 yield (fnmax, fnln, func)
2994 yield (fnmax, fnln, func)
2969 else:
2995 else:
2970 yield line % (fnmax, fnln, func)
2996 yield line % (fnmax, fnln, func)
2971
2997
2972 def debugstacktrace(msg='stacktrace', skip=0,
2998 def debugstacktrace(msg='stacktrace', skip=0,
2973 f=stderr, otherf=stdout, depth=0):
2999 f=stderr, otherf=stdout, depth=0):
2974 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
3000 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2975 Skips the 'skip' entries closest to the call, then show 'depth' entries.
3001 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2976 By default it will flush stdout first.
3002 By default it will flush stdout first.
2977 It can be used everywhere and intentionally does not require an ui object.
3003 It can be used everywhere and intentionally does not require an ui object.
2978 Not be used in production code but very convenient while developing.
3004 Not be used in production code but very convenient while developing.
2979 '''
3005 '''
2980 if otherf:
3006 if otherf:
2981 otherf.flush()
3007 otherf.flush()
2982 f.write('%s at:\n' % msg.rstrip())
3008 f.write('%s at:\n' % msg.rstrip())
2983 for line in getstackframes(skip + 1, depth=depth):
3009 for line in getstackframes(skip + 1, depth=depth):
2984 f.write(line)
3010 f.write(line)
2985 f.flush()
3011 f.flush()
2986
3012
2987 class dirs(object):
3013 class dirs(object):
2988 '''a multiset of directory names from a dirstate or manifest'''
3014 '''a multiset of directory names from a dirstate or manifest'''
2989
3015
2990 def __init__(self, map, skip=None):
3016 def __init__(self, map, skip=None):
2991 self._dirs = {}
3017 self._dirs = {}
2992 addpath = self.addpath
3018 addpath = self.addpath
2993 if safehasattr(map, 'iteritems') and skip is not None:
3019 if safehasattr(map, 'iteritems') and skip is not None:
2994 for f, s in map.iteritems():
3020 for f, s in map.iteritems():
2995 if s[0] != skip:
3021 if s[0] != skip:
2996 addpath(f)
3022 addpath(f)
2997 else:
3023 else:
2998 for f in map:
3024 for f in map:
2999 addpath(f)
3025 addpath(f)
3000
3026
3001 def addpath(self, path):
3027 def addpath(self, path):
3002 dirs = self._dirs
3028 dirs = self._dirs
3003 for base in finddirs(path):
3029 for base in finddirs(path):
3004 if base in dirs:
3030 if base in dirs:
3005 dirs[base] += 1
3031 dirs[base] += 1
3006 return
3032 return
3007 dirs[base] = 1
3033 dirs[base] = 1
3008
3034
3009 def delpath(self, path):
3035 def delpath(self, path):
3010 dirs = self._dirs
3036 dirs = self._dirs
3011 for base in finddirs(path):
3037 for base in finddirs(path):
3012 if dirs[base] > 1:
3038 if dirs[base] > 1:
3013 dirs[base] -= 1
3039 dirs[base] -= 1
3014 return
3040 return
3015 del dirs[base]
3041 del dirs[base]
3016
3042
3017 def __iter__(self):
3043 def __iter__(self):
3018 return iter(self._dirs)
3044 return iter(self._dirs)
3019
3045
3020 def __contains__(self, d):
3046 def __contains__(self, d):
3021 return d in self._dirs
3047 return d in self._dirs
3022
3048
3023 if safehasattr(parsers, 'dirs'):
3049 if safehasattr(parsers, 'dirs'):
3024 dirs = parsers.dirs
3050 dirs = parsers.dirs
3025
3051
3026 def finddirs(path):
3052 def finddirs(path):
3027 pos = path.rfind('/')
3053 pos = path.rfind('/')
3028 while pos != -1:
3054 while pos != -1:
3029 yield path[:pos]
3055 yield path[:pos]
3030 pos = path.rfind('/', 0, pos)
3056 pos = path.rfind('/', 0, pos)
3031
3057
3032 class ctxmanager(object):
3058 class ctxmanager(object):
3033 '''A context manager for use in 'with' blocks to allow multiple
3059 '''A context manager for use in 'with' blocks to allow multiple
3034 contexts to be entered at once. This is both safer and more
3060 contexts to be entered at once. This is both safer and more
3035 flexible than contextlib.nested.
3061 flexible than contextlib.nested.
3036
3062
3037 Once Mercurial supports Python 2.7+, this will become mostly
3063 Once Mercurial supports Python 2.7+, this will become mostly
3038 unnecessary.
3064 unnecessary.
3039 '''
3065 '''
3040
3066
3041 def __init__(self, *args):
3067 def __init__(self, *args):
3042 '''Accepts a list of no-argument functions that return context
3068 '''Accepts a list of no-argument functions that return context
3043 managers. These will be invoked at __call__ time.'''
3069 managers. These will be invoked at __call__ time.'''
3044 self._pending = args
3070 self._pending = args
3045 self._atexit = []
3071 self._atexit = []
3046
3072
3047 def __enter__(self):
3073 def __enter__(self):
3048 return self
3074 return self
3049
3075
3050 def enter(self):
3076 def enter(self):
3051 '''Create and enter context managers in the order in which they were
3077 '''Create and enter context managers in the order in which they were
3052 passed to the constructor.'''
3078 passed to the constructor.'''
3053 values = []
3079 values = []
3054 for func in self._pending:
3080 for func in self._pending:
3055 obj = func()
3081 obj = func()
3056 values.append(obj.__enter__())
3082 values.append(obj.__enter__())
3057 self._atexit.append(obj.__exit__)
3083 self._atexit.append(obj.__exit__)
3058 del self._pending
3084 del self._pending
3059 return values
3085 return values
3060
3086
3061 def atexit(self, func, *args, **kwargs):
3087 def atexit(self, func, *args, **kwargs):
3062 '''Add a function to call when this context manager exits. The
3088 '''Add a function to call when this context manager exits. The
3063 ordering of multiple atexit calls is unspecified, save that
3089 ordering of multiple atexit calls is unspecified, save that
3064 they will happen before any __exit__ functions.'''
3090 they will happen before any __exit__ functions.'''
3065 def wrapper(exc_type, exc_val, exc_tb):
3091 def wrapper(exc_type, exc_val, exc_tb):
3066 func(*args, **kwargs)
3092 func(*args, **kwargs)
3067 self._atexit.append(wrapper)
3093 self._atexit.append(wrapper)
3068 return func
3094 return func
3069
3095
3070 def __exit__(self, exc_type, exc_val, exc_tb):
3096 def __exit__(self, exc_type, exc_val, exc_tb):
3071 '''Context managers are exited in the reverse order from which
3097 '''Context managers are exited in the reverse order from which
3072 they were created.'''
3098 they were created.'''
3073 received = exc_type is not None
3099 received = exc_type is not None
3074 suppressed = False
3100 suppressed = False
3075 pending = None
3101 pending = None
3076 self._atexit.reverse()
3102 self._atexit.reverse()
3077 for exitfunc in self._atexit:
3103 for exitfunc in self._atexit:
3078 try:
3104 try:
3079 if exitfunc(exc_type, exc_val, exc_tb):
3105 if exitfunc(exc_type, exc_val, exc_tb):
3080 suppressed = True
3106 suppressed = True
3081 exc_type = None
3107 exc_type = None
3082 exc_val = None
3108 exc_val = None
3083 exc_tb = None
3109 exc_tb = None
3084 except BaseException:
3110 except BaseException:
3085 pending = sys.exc_info()
3111 pending = sys.exc_info()
3086 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3112 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3087 del self._atexit
3113 del self._atexit
3088 if pending:
3114 if pending:
3089 raise exc_val
3115 raise exc_val
3090 return received and suppressed
3116 return received and suppressed
3091
3117
3092 # compression code
3118 # compression code
3093
3119
3094 SERVERROLE = 'server'
3120 SERVERROLE = 'server'
3095 CLIENTROLE = 'client'
3121 CLIENTROLE = 'client'
3096
3122
3097 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3123 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3098 (u'name', u'serverpriority',
3124 (u'name', u'serverpriority',
3099 u'clientpriority'))
3125 u'clientpriority'))
3100
3126
3101 class compressormanager(object):
3127 class compressormanager(object):
3102 """Holds registrations of various compression engines.
3128 """Holds registrations of various compression engines.
3103
3129
3104 This class essentially abstracts the differences between compression
3130 This class essentially abstracts the differences between compression
3105 engines to allow new compression formats to be added easily, possibly from
3131 engines to allow new compression formats to be added easily, possibly from
3106 extensions.
3132 extensions.
3107
3133
3108 Compressors are registered against the global instance by calling its
3134 Compressors are registered against the global instance by calling its
3109 ``register()`` method.
3135 ``register()`` method.
3110 """
3136 """
3111 def __init__(self):
3137 def __init__(self):
3112 self._engines = {}
3138 self._engines = {}
3113 # Bundle spec human name to engine name.
3139 # Bundle spec human name to engine name.
3114 self._bundlenames = {}
3140 self._bundlenames = {}
3115 # Internal bundle identifier to engine name.
3141 # Internal bundle identifier to engine name.
3116 self._bundletypes = {}
3142 self._bundletypes = {}
3117 # Revlog header to engine name.
3143 # Revlog header to engine name.
3118 self._revlogheaders = {}
3144 self._revlogheaders = {}
3119 # Wire proto identifier to engine name.
3145 # Wire proto identifier to engine name.
3120 self._wiretypes = {}
3146 self._wiretypes = {}
3121
3147
3122 def __getitem__(self, key):
3148 def __getitem__(self, key):
3123 return self._engines[key]
3149 return self._engines[key]
3124
3150
3125 def __contains__(self, key):
3151 def __contains__(self, key):
3126 return key in self._engines
3152 return key in self._engines
3127
3153
3128 def __iter__(self):
3154 def __iter__(self):
3129 return iter(self._engines.keys())
3155 return iter(self._engines.keys())
3130
3156
3131 def register(self, engine):
3157 def register(self, engine):
3132 """Register a compression engine with the manager.
3158 """Register a compression engine with the manager.
3133
3159
3134 The argument must be a ``compressionengine`` instance.
3160 The argument must be a ``compressionengine`` instance.
3135 """
3161 """
3136 if not isinstance(engine, compressionengine):
3162 if not isinstance(engine, compressionengine):
3137 raise ValueError(_('argument must be a compressionengine'))
3163 raise ValueError(_('argument must be a compressionengine'))
3138
3164
3139 name = engine.name()
3165 name = engine.name()
3140
3166
3141 if name in self._engines:
3167 if name in self._engines:
3142 raise error.Abort(_('compression engine %s already registered') %
3168 raise error.Abort(_('compression engine %s already registered') %
3143 name)
3169 name)
3144
3170
3145 bundleinfo = engine.bundletype()
3171 bundleinfo = engine.bundletype()
3146 if bundleinfo:
3172 if bundleinfo:
3147 bundlename, bundletype = bundleinfo
3173 bundlename, bundletype = bundleinfo
3148
3174
3149 if bundlename in self._bundlenames:
3175 if bundlename in self._bundlenames:
3150 raise error.Abort(_('bundle name %s already registered') %
3176 raise error.Abort(_('bundle name %s already registered') %
3151 bundlename)
3177 bundlename)
3152 if bundletype in self._bundletypes:
3178 if bundletype in self._bundletypes:
3153 raise error.Abort(_('bundle type %s already registered by %s') %
3179 raise error.Abort(_('bundle type %s already registered by %s') %
3154 (bundletype, self._bundletypes[bundletype]))
3180 (bundletype, self._bundletypes[bundletype]))
3155
3181
3156 # No external facing name declared.
3182 # No external facing name declared.
3157 if bundlename:
3183 if bundlename:
3158 self._bundlenames[bundlename] = name
3184 self._bundlenames[bundlename] = name
3159
3185
3160 self._bundletypes[bundletype] = name
3186 self._bundletypes[bundletype] = name
3161
3187
3162 wiresupport = engine.wireprotosupport()
3188 wiresupport = engine.wireprotosupport()
3163 if wiresupport:
3189 if wiresupport:
3164 wiretype = wiresupport.name
3190 wiretype = wiresupport.name
3165 if wiretype in self._wiretypes:
3191 if wiretype in self._wiretypes:
3166 raise error.Abort(_('wire protocol compression %s already '
3192 raise error.Abort(_('wire protocol compression %s already '
3167 'registered by %s') %
3193 'registered by %s') %
3168 (wiretype, self._wiretypes[wiretype]))
3194 (wiretype, self._wiretypes[wiretype]))
3169
3195
3170 self._wiretypes[wiretype] = name
3196 self._wiretypes[wiretype] = name
3171
3197
3172 revlogheader = engine.revlogheader()
3198 revlogheader = engine.revlogheader()
3173 if revlogheader and revlogheader in self._revlogheaders:
3199 if revlogheader and revlogheader in self._revlogheaders:
3174 raise error.Abort(_('revlog header %s already registered by %s') %
3200 raise error.Abort(_('revlog header %s already registered by %s') %
3175 (revlogheader, self._revlogheaders[revlogheader]))
3201 (revlogheader, self._revlogheaders[revlogheader]))
3176
3202
3177 if revlogheader:
3203 if revlogheader:
3178 self._revlogheaders[revlogheader] = name
3204 self._revlogheaders[revlogheader] = name
3179
3205
3180 self._engines[name] = engine
3206 self._engines[name] = engine
3181
3207
3182 @property
3208 @property
3183 def supportedbundlenames(self):
3209 def supportedbundlenames(self):
3184 return set(self._bundlenames.keys())
3210 return set(self._bundlenames.keys())
3185
3211
3186 @property
3212 @property
3187 def supportedbundletypes(self):
3213 def supportedbundletypes(self):
3188 return set(self._bundletypes.keys())
3214 return set(self._bundletypes.keys())
3189
3215
3190 def forbundlename(self, bundlename):
3216 def forbundlename(self, bundlename):
3191 """Obtain a compression engine registered to a bundle name.
3217 """Obtain a compression engine registered to a bundle name.
3192
3218
3193 Will raise KeyError if the bundle type isn't registered.
3219 Will raise KeyError if the bundle type isn't registered.
3194
3220
3195 Will abort if the engine is known but not available.
3221 Will abort if the engine is known but not available.
3196 """
3222 """
3197 engine = self._engines[self._bundlenames[bundlename]]
3223 engine = self._engines[self._bundlenames[bundlename]]
3198 if not engine.available():
3224 if not engine.available():
3199 raise error.Abort(_('compression engine %s could not be loaded') %
3225 raise error.Abort(_('compression engine %s could not be loaded') %
3200 engine.name())
3226 engine.name())
3201 return engine
3227 return engine
3202
3228
3203 def forbundletype(self, bundletype):
3229 def forbundletype(self, bundletype):
3204 """Obtain a compression engine registered to a bundle type.
3230 """Obtain a compression engine registered to a bundle type.
3205
3231
3206 Will raise KeyError if the bundle type isn't registered.
3232 Will raise KeyError if the bundle type isn't registered.
3207
3233
3208 Will abort if the engine is known but not available.
3234 Will abort if the engine is known but not available.
3209 """
3235 """
3210 engine = self._engines[self._bundletypes[bundletype]]
3236 engine = self._engines[self._bundletypes[bundletype]]
3211 if not engine.available():
3237 if not engine.available():
3212 raise error.Abort(_('compression engine %s could not be loaded') %
3238 raise error.Abort(_('compression engine %s could not be loaded') %
3213 engine.name())
3239 engine.name())
3214 return engine
3240 return engine
3215
3241
3216 def supportedwireengines(self, role, onlyavailable=True):
3242 def supportedwireengines(self, role, onlyavailable=True):
3217 """Obtain compression engines that support the wire protocol.
3243 """Obtain compression engines that support the wire protocol.
3218
3244
3219 Returns a list of engines in prioritized order, most desired first.
3245 Returns a list of engines in prioritized order, most desired first.
3220
3246
3221 If ``onlyavailable`` is set, filter out engines that can't be
3247 If ``onlyavailable`` is set, filter out engines that can't be
3222 loaded.
3248 loaded.
3223 """
3249 """
3224 assert role in (SERVERROLE, CLIENTROLE)
3250 assert role in (SERVERROLE, CLIENTROLE)
3225
3251
3226 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3252 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3227
3253
3228 engines = [self._engines[e] for e in self._wiretypes.values()]
3254 engines = [self._engines[e] for e in self._wiretypes.values()]
3229 if onlyavailable:
3255 if onlyavailable:
3230 engines = [e for e in engines if e.available()]
3256 engines = [e for e in engines if e.available()]
3231
3257
3232 def getkey(e):
3258 def getkey(e):
3233 # Sort first by priority, highest first. In case of tie, sort
3259 # Sort first by priority, highest first. In case of tie, sort
3234 # alphabetically. This is arbitrary, but ensures output is
3260 # alphabetically. This is arbitrary, but ensures output is
3235 # stable.
3261 # stable.
3236 w = e.wireprotosupport()
3262 w = e.wireprotosupport()
3237 return -1 * getattr(w, attr), w.name
3263 return -1 * getattr(w, attr), w.name
3238
3264
3239 return list(sorted(engines, key=getkey))
3265 return list(sorted(engines, key=getkey))
3240
3266
3241 def forwiretype(self, wiretype):
3267 def forwiretype(self, wiretype):
3242 engine = self._engines[self._wiretypes[wiretype]]
3268 engine = self._engines[self._wiretypes[wiretype]]
3243 if not engine.available():
3269 if not engine.available():
3244 raise error.Abort(_('compression engine %s could not be loaded') %
3270 raise error.Abort(_('compression engine %s could not be loaded') %
3245 engine.name())
3271 engine.name())
3246 return engine
3272 return engine
3247
3273
3248 def forrevlogheader(self, header):
3274 def forrevlogheader(self, header):
3249 """Obtain a compression engine registered to a revlog header.
3275 """Obtain a compression engine registered to a revlog header.
3250
3276
3251 Will raise KeyError if the revlog header value isn't registered.
3277 Will raise KeyError if the revlog header value isn't registered.
3252 """
3278 """
3253 return self._engines[self._revlogheaders[header]]
3279 return self._engines[self._revlogheaders[header]]
3254
3280
3255 compengines = compressormanager()
3281 compengines = compressormanager()
3256
3282
3257 class compressionengine(object):
3283 class compressionengine(object):
3258 """Base class for compression engines.
3284 """Base class for compression engines.
3259
3285
3260 Compression engines must implement the interface defined by this class.
3286 Compression engines must implement the interface defined by this class.
3261 """
3287 """
3262 def name(self):
3288 def name(self):
3263 """Returns the name of the compression engine.
3289 """Returns the name of the compression engine.
3264
3290
3265 This is the key the engine is registered under.
3291 This is the key the engine is registered under.
3266
3292
3267 This method must be implemented.
3293 This method must be implemented.
3268 """
3294 """
3269 raise NotImplementedError()
3295 raise NotImplementedError()
3270
3296
3271 def available(self):
3297 def available(self):
3272 """Whether the compression engine is available.
3298 """Whether the compression engine is available.
3273
3299
3274 The intent of this method is to allow optional compression engines
3300 The intent of this method is to allow optional compression engines
3275 that may not be available in all installations (such as engines relying
3301 that may not be available in all installations (such as engines relying
3276 on C extensions that may not be present).
3302 on C extensions that may not be present).
3277 """
3303 """
3278 return True
3304 return True
3279
3305
3280 def bundletype(self):
3306 def bundletype(self):
3281 """Describes bundle identifiers for this engine.
3307 """Describes bundle identifiers for this engine.
3282
3308
3283 If this compression engine isn't supported for bundles, returns None.
3309 If this compression engine isn't supported for bundles, returns None.
3284
3310
3285 If this engine can be used for bundles, returns a 2-tuple of strings of
3311 If this engine can be used for bundles, returns a 2-tuple of strings of
3286 the user-facing "bundle spec" compression name and an internal
3312 the user-facing "bundle spec" compression name and an internal
3287 identifier used to denote the compression format within bundles. To
3313 identifier used to denote the compression format within bundles. To
3288 exclude the name from external usage, set the first element to ``None``.
3314 exclude the name from external usage, set the first element to ``None``.
3289
3315
3290 If bundle compression is supported, the class must also implement
3316 If bundle compression is supported, the class must also implement
3291 ``compressstream`` and `decompressorreader``.
3317 ``compressstream`` and `decompressorreader``.
3292
3318
3293 The docstring of this method is used in the help system to tell users
3319 The docstring of this method is used in the help system to tell users
3294 about this engine.
3320 about this engine.
3295 """
3321 """
3296 return None
3322 return None
3297
3323
3298 def wireprotosupport(self):
3324 def wireprotosupport(self):
3299 """Declare support for this compression format on the wire protocol.
3325 """Declare support for this compression format on the wire protocol.
3300
3326
3301 If this compression engine isn't supported for compressing wire
3327 If this compression engine isn't supported for compressing wire
3302 protocol payloads, returns None.
3328 protocol payloads, returns None.
3303
3329
3304 Otherwise, returns ``compenginewireprotosupport`` with the following
3330 Otherwise, returns ``compenginewireprotosupport`` with the following
3305 fields:
3331 fields:
3306
3332
3307 * String format identifier
3333 * String format identifier
3308 * Integer priority for the server
3334 * Integer priority for the server
3309 * Integer priority for the client
3335 * Integer priority for the client
3310
3336
3311 The integer priorities are used to order the advertisement of format
3337 The integer priorities are used to order the advertisement of format
3312 support by server and client. The highest integer is advertised
3338 support by server and client. The highest integer is advertised
3313 first. Integers with non-positive values aren't advertised.
3339 first. Integers with non-positive values aren't advertised.
3314
3340
3315 The priority values are somewhat arbitrary and only used for default
3341 The priority values are somewhat arbitrary and only used for default
3316 ordering. The relative order can be changed via config options.
3342 ordering. The relative order can be changed via config options.
3317
3343
3318 If wire protocol compression is supported, the class must also implement
3344 If wire protocol compression is supported, the class must also implement
3319 ``compressstream`` and ``decompressorreader``.
3345 ``compressstream`` and ``decompressorreader``.
3320 """
3346 """
3321 return None
3347 return None
3322
3348
3323 def revlogheader(self):
3349 def revlogheader(self):
3324 """Header added to revlog chunks that identifies this engine.
3350 """Header added to revlog chunks that identifies this engine.
3325
3351
3326 If this engine can be used to compress revlogs, this method should
3352 If this engine can be used to compress revlogs, this method should
3327 return the bytes used to identify chunks compressed with this engine.
3353 return the bytes used to identify chunks compressed with this engine.
3328 Else, the method should return ``None`` to indicate it does not
3354 Else, the method should return ``None`` to indicate it does not
3329 participate in revlog compression.
3355 participate in revlog compression.
3330 """
3356 """
3331 return None
3357 return None
3332
3358
3333 def compressstream(self, it, opts=None):
3359 def compressstream(self, it, opts=None):
3334 """Compress an iterator of chunks.
3360 """Compress an iterator of chunks.
3335
3361
3336 The method receives an iterator (ideally a generator) of chunks of
3362 The method receives an iterator (ideally a generator) of chunks of
3337 bytes to be compressed. It returns an iterator (ideally a generator)
3363 bytes to be compressed. It returns an iterator (ideally a generator)
3338 of bytes of chunks representing the compressed output.
3364 of bytes of chunks representing the compressed output.
3339
3365
3340 Optionally accepts an argument defining how to perform compression.
3366 Optionally accepts an argument defining how to perform compression.
3341 Each engine treats this argument differently.
3367 Each engine treats this argument differently.
3342 """
3368 """
3343 raise NotImplementedError()
3369 raise NotImplementedError()
3344
3370
3345 def decompressorreader(self, fh):
3371 def decompressorreader(self, fh):
3346 """Perform decompression on a file object.
3372 """Perform decompression on a file object.
3347
3373
3348 Argument is an object with a ``read(size)`` method that returns
3374 Argument is an object with a ``read(size)`` method that returns
3349 compressed data. Return value is an object with a ``read(size)`` that
3375 compressed data. Return value is an object with a ``read(size)`` that
3350 returns uncompressed data.
3376 returns uncompressed data.
3351 """
3377 """
3352 raise NotImplementedError()
3378 raise NotImplementedError()
3353
3379
3354 def revlogcompressor(self, opts=None):
3380 def revlogcompressor(self, opts=None):
3355 """Obtain an object that can be used to compress revlog entries.
3381 """Obtain an object that can be used to compress revlog entries.
3356
3382
3357 The object has a ``compress(data)`` method that compresses binary
3383 The object has a ``compress(data)`` method that compresses binary
3358 data. This method returns compressed binary data or ``None`` if
3384 data. This method returns compressed binary data or ``None`` if
3359 the data could not be compressed (too small, not compressible, etc).
3385 the data could not be compressed (too small, not compressible, etc).
3360 The returned data should have a header uniquely identifying this
3386 The returned data should have a header uniquely identifying this
3361 compression format so decompression can be routed to this engine.
3387 compression format so decompression can be routed to this engine.
3362 This header should be identified by the ``revlogheader()`` return
3388 This header should be identified by the ``revlogheader()`` return
3363 value.
3389 value.
3364
3390
3365 The object has a ``decompress(data)`` method that decompresses
3391 The object has a ``decompress(data)`` method that decompresses
3366 data. The method will only be called if ``data`` begins with
3392 data. The method will only be called if ``data`` begins with
3367 ``revlogheader()``. The method should return the raw, uncompressed
3393 ``revlogheader()``. The method should return the raw, uncompressed
3368 data or raise a ``RevlogError``.
3394 data or raise a ``RevlogError``.
3369
3395
3370 The object is reusable but is not thread safe.
3396 The object is reusable but is not thread safe.
3371 """
3397 """
3372 raise NotImplementedError()
3398 raise NotImplementedError()
3373
3399
3374 class _zlibengine(compressionengine):
3400 class _zlibengine(compressionengine):
3375 def name(self):
3401 def name(self):
3376 return 'zlib'
3402 return 'zlib'
3377
3403
3378 def bundletype(self):
3404 def bundletype(self):
3379 """zlib compression using the DEFLATE algorithm.
3405 """zlib compression using the DEFLATE algorithm.
3380
3406
3381 All Mercurial clients should support this format. The compression
3407 All Mercurial clients should support this format. The compression
3382 algorithm strikes a reasonable balance between compression ratio
3408 algorithm strikes a reasonable balance between compression ratio
3383 and size.
3409 and size.
3384 """
3410 """
3385 return 'gzip', 'GZ'
3411 return 'gzip', 'GZ'
3386
3412
3387 def wireprotosupport(self):
3413 def wireprotosupport(self):
3388 return compewireprotosupport('zlib', 20, 20)
3414 return compewireprotosupport('zlib', 20, 20)
3389
3415
3390 def revlogheader(self):
3416 def revlogheader(self):
3391 return 'x'
3417 return 'x'
3392
3418
3393 def compressstream(self, it, opts=None):
3419 def compressstream(self, it, opts=None):
3394 opts = opts or {}
3420 opts = opts or {}
3395
3421
3396 z = zlib.compressobj(opts.get('level', -1))
3422 z = zlib.compressobj(opts.get('level', -1))
3397 for chunk in it:
3423 for chunk in it:
3398 data = z.compress(chunk)
3424 data = z.compress(chunk)
3399 # Not all calls to compress emit data. It is cheaper to inspect
3425 # Not all calls to compress emit data. It is cheaper to inspect
3400 # here than to feed empty chunks through generator.
3426 # here than to feed empty chunks through generator.
3401 if data:
3427 if data:
3402 yield data
3428 yield data
3403
3429
3404 yield z.flush()
3430 yield z.flush()
3405
3431
3406 def decompressorreader(self, fh):
3432 def decompressorreader(self, fh):
3407 def gen():
3433 def gen():
3408 d = zlib.decompressobj()
3434 d = zlib.decompressobj()
3409 for chunk in filechunkiter(fh):
3435 for chunk in filechunkiter(fh):
3410 while chunk:
3436 while chunk:
3411 # Limit output size to limit memory.
3437 # Limit output size to limit memory.
3412 yield d.decompress(chunk, 2 ** 18)
3438 yield d.decompress(chunk, 2 ** 18)
3413 chunk = d.unconsumed_tail
3439 chunk = d.unconsumed_tail
3414
3440
3415 return chunkbuffer(gen())
3441 return chunkbuffer(gen())
3416
3442
3417 class zlibrevlogcompressor(object):
3443 class zlibrevlogcompressor(object):
3418 def compress(self, data):
3444 def compress(self, data):
3419 insize = len(data)
3445 insize = len(data)
3420 # Caller handles empty input case.
3446 # Caller handles empty input case.
3421 assert insize > 0
3447 assert insize > 0
3422
3448
3423 if insize < 44:
3449 if insize < 44:
3424 return None
3450 return None
3425
3451
3426 elif insize <= 1000000:
3452 elif insize <= 1000000:
3427 compressed = zlib.compress(data)
3453 compressed = zlib.compress(data)
3428 if len(compressed) < insize:
3454 if len(compressed) < insize:
3429 return compressed
3455 return compressed
3430 return None
3456 return None
3431
3457
3432 # zlib makes an internal copy of the input buffer, doubling
3458 # zlib makes an internal copy of the input buffer, doubling
3433 # memory usage for large inputs. So do streaming compression
3459 # memory usage for large inputs. So do streaming compression
3434 # on large inputs.
3460 # on large inputs.
3435 else:
3461 else:
3436 z = zlib.compressobj()
3462 z = zlib.compressobj()
3437 parts = []
3463 parts = []
3438 pos = 0
3464 pos = 0
3439 while pos < insize:
3465 while pos < insize:
3440 pos2 = pos + 2**20
3466 pos2 = pos + 2**20
3441 parts.append(z.compress(data[pos:pos2]))
3467 parts.append(z.compress(data[pos:pos2]))
3442 pos = pos2
3468 pos = pos2
3443 parts.append(z.flush())
3469 parts.append(z.flush())
3444
3470
3445 if sum(map(len, parts)) < insize:
3471 if sum(map(len, parts)) < insize:
3446 return ''.join(parts)
3472 return ''.join(parts)
3447 return None
3473 return None
3448
3474
3449 def decompress(self, data):
3475 def decompress(self, data):
3450 try:
3476 try:
3451 return zlib.decompress(data)
3477 return zlib.decompress(data)
3452 except zlib.error as e:
3478 except zlib.error as e:
3453 raise error.RevlogError(_('revlog decompress error: %s') %
3479 raise error.RevlogError(_('revlog decompress error: %s') %
3454 str(e))
3480 str(e))
3455
3481
3456 def revlogcompressor(self, opts=None):
3482 def revlogcompressor(self, opts=None):
3457 return self.zlibrevlogcompressor()
3483 return self.zlibrevlogcompressor()
3458
3484
3459 compengines.register(_zlibengine())
3485 compengines.register(_zlibengine())
3460
3486
3461 class _bz2engine(compressionengine):
3487 class _bz2engine(compressionengine):
3462 def name(self):
3488 def name(self):
3463 return 'bz2'
3489 return 'bz2'
3464
3490
3465 def bundletype(self):
3491 def bundletype(self):
3466 """An algorithm that produces smaller bundles than ``gzip``.
3492 """An algorithm that produces smaller bundles than ``gzip``.
3467
3493
3468 All Mercurial clients should support this format.
3494 All Mercurial clients should support this format.
3469
3495
3470 This engine will likely produce smaller bundles than ``gzip`` but
3496 This engine will likely produce smaller bundles than ``gzip`` but
3471 will be significantly slower, both during compression and
3497 will be significantly slower, both during compression and
3472 decompression.
3498 decompression.
3473
3499
3474 If available, the ``zstd`` engine can yield similar or better
3500 If available, the ``zstd`` engine can yield similar or better
3475 compression at much higher speeds.
3501 compression at much higher speeds.
3476 """
3502 """
3477 return 'bzip2', 'BZ'
3503 return 'bzip2', 'BZ'
3478
3504
3479 # We declare a protocol name but don't advertise by default because
3505 # We declare a protocol name but don't advertise by default because
3480 # it is slow.
3506 # it is slow.
3481 def wireprotosupport(self):
3507 def wireprotosupport(self):
3482 return compewireprotosupport('bzip2', 0, 0)
3508 return compewireprotosupport('bzip2', 0, 0)
3483
3509
3484 def compressstream(self, it, opts=None):
3510 def compressstream(self, it, opts=None):
3485 opts = opts or {}
3511 opts = opts or {}
3486 z = bz2.BZ2Compressor(opts.get('level', 9))
3512 z = bz2.BZ2Compressor(opts.get('level', 9))
3487 for chunk in it:
3513 for chunk in it:
3488 data = z.compress(chunk)
3514 data = z.compress(chunk)
3489 if data:
3515 if data:
3490 yield data
3516 yield data
3491
3517
3492 yield z.flush()
3518 yield z.flush()
3493
3519
3494 def decompressorreader(self, fh):
3520 def decompressorreader(self, fh):
3495 def gen():
3521 def gen():
3496 d = bz2.BZ2Decompressor()
3522 d = bz2.BZ2Decompressor()
3497 for chunk in filechunkiter(fh):
3523 for chunk in filechunkiter(fh):
3498 yield d.decompress(chunk)
3524 yield d.decompress(chunk)
3499
3525
3500 return chunkbuffer(gen())
3526 return chunkbuffer(gen())
3501
3527
3502 compengines.register(_bz2engine())
3528 compengines.register(_bz2engine())
3503
3529
3504 class _truncatedbz2engine(compressionengine):
3530 class _truncatedbz2engine(compressionengine):
3505 def name(self):
3531 def name(self):
3506 return 'bz2truncated'
3532 return 'bz2truncated'
3507
3533
3508 def bundletype(self):
3534 def bundletype(self):
3509 return None, '_truncatedBZ'
3535 return None, '_truncatedBZ'
3510
3536
3511 # We don't implement compressstream because it is hackily handled elsewhere.
3537 # We don't implement compressstream because it is hackily handled elsewhere.
3512
3538
3513 def decompressorreader(self, fh):
3539 def decompressorreader(self, fh):
3514 def gen():
3540 def gen():
3515 # The input stream doesn't have the 'BZ' header. So add it back.
3541 # The input stream doesn't have the 'BZ' header. So add it back.
3516 d = bz2.BZ2Decompressor()
3542 d = bz2.BZ2Decompressor()
3517 d.decompress('BZ')
3543 d.decompress('BZ')
3518 for chunk in filechunkiter(fh):
3544 for chunk in filechunkiter(fh):
3519 yield d.decompress(chunk)
3545 yield d.decompress(chunk)
3520
3546
3521 return chunkbuffer(gen())
3547 return chunkbuffer(gen())
3522
3548
3523 compengines.register(_truncatedbz2engine())
3549 compengines.register(_truncatedbz2engine())
3524
3550
3525 class _noopengine(compressionengine):
3551 class _noopengine(compressionengine):
3526 def name(self):
3552 def name(self):
3527 return 'none'
3553 return 'none'
3528
3554
3529 def bundletype(self):
3555 def bundletype(self):
3530 """No compression is performed.
3556 """No compression is performed.
3531
3557
3532 Use this compression engine to explicitly disable compression.
3558 Use this compression engine to explicitly disable compression.
3533 """
3559 """
3534 return 'none', 'UN'
3560 return 'none', 'UN'
3535
3561
3536 # Clients always support uncompressed payloads. Servers don't because
3562 # Clients always support uncompressed payloads. Servers don't because
3537 # unless you are on a fast network, uncompressed payloads can easily
3563 # unless you are on a fast network, uncompressed payloads can easily
3538 # saturate your network pipe.
3564 # saturate your network pipe.
3539 def wireprotosupport(self):
3565 def wireprotosupport(self):
3540 return compewireprotosupport('none', 0, 10)
3566 return compewireprotosupport('none', 0, 10)
3541
3567
3542 # We don't implement revlogheader because it is handled specially
3568 # We don't implement revlogheader because it is handled specially
3543 # in the revlog class.
3569 # in the revlog class.
3544
3570
3545 def compressstream(self, it, opts=None):
3571 def compressstream(self, it, opts=None):
3546 return it
3572 return it
3547
3573
3548 def decompressorreader(self, fh):
3574 def decompressorreader(self, fh):
3549 return fh
3575 return fh
3550
3576
3551 class nooprevlogcompressor(object):
3577 class nooprevlogcompressor(object):
3552 def compress(self, data):
3578 def compress(self, data):
3553 return None
3579 return None
3554
3580
3555 def revlogcompressor(self, opts=None):
3581 def revlogcompressor(self, opts=None):
3556 return self.nooprevlogcompressor()
3582 return self.nooprevlogcompressor()
3557
3583
3558 compengines.register(_noopengine())
3584 compengines.register(_noopengine())
3559
3585
3560 class _zstdengine(compressionengine):
3586 class _zstdengine(compressionengine):
3561 def name(self):
3587 def name(self):
3562 return 'zstd'
3588 return 'zstd'
3563
3589
3564 @propertycache
3590 @propertycache
3565 def _module(self):
3591 def _module(self):
3566 # Not all installs have the zstd module available. So defer importing
3592 # Not all installs have the zstd module available. So defer importing
3567 # until first access.
3593 # until first access.
3568 try:
3594 try:
3569 from . import zstd
3595 from . import zstd
3570 # Force delayed import.
3596 # Force delayed import.
3571 zstd.__version__
3597 zstd.__version__
3572 return zstd
3598 return zstd
3573 except ImportError:
3599 except ImportError:
3574 return None
3600 return None
3575
3601
3576 def available(self):
3602 def available(self):
3577 return bool(self._module)
3603 return bool(self._module)
3578
3604
3579 def bundletype(self):
3605 def bundletype(self):
3580 """A modern compression algorithm that is fast and highly flexible.
3606 """A modern compression algorithm that is fast and highly flexible.
3581
3607
3582 Only supported by Mercurial 4.1 and newer clients.
3608 Only supported by Mercurial 4.1 and newer clients.
3583
3609
3584 With the default settings, zstd compression is both faster and yields
3610 With the default settings, zstd compression is both faster and yields
3585 better compression than ``gzip``. It also frequently yields better
3611 better compression than ``gzip``. It also frequently yields better
3586 compression than ``bzip2`` while operating at much higher speeds.
3612 compression than ``bzip2`` while operating at much higher speeds.
3587
3613
3588 If this engine is available and backwards compatibility is not a
3614 If this engine is available and backwards compatibility is not a
3589 concern, it is likely the best available engine.
3615 concern, it is likely the best available engine.
3590 """
3616 """
3591 return 'zstd', 'ZS'
3617 return 'zstd', 'ZS'
3592
3618
3593 def wireprotosupport(self):
3619 def wireprotosupport(self):
3594 return compewireprotosupport('zstd', 50, 50)
3620 return compewireprotosupport('zstd', 50, 50)
3595
3621
3596 def revlogheader(self):
3622 def revlogheader(self):
3597 return '\x28'
3623 return '\x28'
3598
3624
3599 def compressstream(self, it, opts=None):
3625 def compressstream(self, it, opts=None):
3600 opts = opts or {}
3626 opts = opts or {}
3601 # zstd level 3 is almost always significantly faster than zlib
3627 # zstd level 3 is almost always significantly faster than zlib
3602 # while providing no worse compression. It strikes a good balance
3628 # while providing no worse compression. It strikes a good balance
3603 # between speed and compression.
3629 # between speed and compression.
3604 level = opts.get('level', 3)
3630 level = opts.get('level', 3)
3605
3631
3606 zstd = self._module
3632 zstd = self._module
3607 z = zstd.ZstdCompressor(level=level).compressobj()
3633 z = zstd.ZstdCompressor(level=level).compressobj()
3608 for chunk in it:
3634 for chunk in it:
3609 data = z.compress(chunk)
3635 data = z.compress(chunk)
3610 if data:
3636 if data:
3611 yield data
3637 yield data
3612
3638
3613 yield z.flush()
3639 yield z.flush()
3614
3640
3615 def decompressorreader(self, fh):
3641 def decompressorreader(self, fh):
3616 zstd = self._module
3642 zstd = self._module
3617 dctx = zstd.ZstdDecompressor()
3643 dctx = zstd.ZstdDecompressor()
3618 return chunkbuffer(dctx.read_from(fh))
3644 return chunkbuffer(dctx.read_from(fh))
3619
3645
3620 class zstdrevlogcompressor(object):
3646 class zstdrevlogcompressor(object):
3621 def __init__(self, zstd, level=3):
3647 def __init__(self, zstd, level=3):
3622 # Writing the content size adds a few bytes to the output. However,
3648 # Writing the content size adds a few bytes to the output. However,
3623 # it allows decompression to be more optimal since we can
3649 # it allows decompression to be more optimal since we can
3624 # pre-allocate a buffer to hold the result.
3650 # pre-allocate a buffer to hold the result.
3625 self._cctx = zstd.ZstdCompressor(level=level,
3651 self._cctx = zstd.ZstdCompressor(level=level,
3626 write_content_size=True)
3652 write_content_size=True)
3627 self._dctx = zstd.ZstdDecompressor()
3653 self._dctx = zstd.ZstdDecompressor()
3628 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3654 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3629 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3655 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3630
3656
3631 def compress(self, data):
3657 def compress(self, data):
3632 insize = len(data)
3658 insize = len(data)
3633 # Caller handles empty input case.
3659 # Caller handles empty input case.
3634 assert insize > 0
3660 assert insize > 0
3635
3661
3636 if insize < 50:
3662 if insize < 50:
3637 return None
3663 return None
3638
3664
3639 elif insize <= 1000000:
3665 elif insize <= 1000000:
3640 compressed = self._cctx.compress(data)
3666 compressed = self._cctx.compress(data)
3641 if len(compressed) < insize:
3667 if len(compressed) < insize:
3642 return compressed
3668 return compressed
3643 return None
3669 return None
3644 else:
3670 else:
3645 z = self._cctx.compressobj()
3671 z = self._cctx.compressobj()
3646 chunks = []
3672 chunks = []
3647 pos = 0
3673 pos = 0
3648 while pos < insize:
3674 while pos < insize:
3649 pos2 = pos + self._compinsize
3675 pos2 = pos + self._compinsize
3650 chunk = z.compress(data[pos:pos2])
3676 chunk = z.compress(data[pos:pos2])
3651 if chunk:
3677 if chunk:
3652 chunks.append(chunk)
3678 chunks.append(chunk)
3653 pos = pos2
3679 pos = pos2
3654 chunks.append(z.flush())
3680 chunks.append(z.flush())
3655
3681
3656 if sum(map(len, chunks)) < insize:
3682 if sum(map(len, chunks)) < insize:
3657 return ''.join(chunks)
3683 return ''.join(chunks)
3658 return None
3684 return None
3659
3685
3660 def decompress(self, data):
3686 def decompress(self, data):
3661 insize = len(data)
3687 insize = len(data)
3662
3688
3663 try:
3689 try:
3664 # This was measured to be faster than other streaming
3690 # This was measured to be faster than other streaming
3665 # decompressors.
3691 # decompressors.
3666 dobj = self._dctx.decompressobj()
3692 dobj = self._dctx.decompressobj()
3667 chunks = []
3693 chunks = []
3668 pos = 0
3694 pos = 0
3669 while pos < insize:
3695 while pos < insize:
3670 pos2 = pos + self._decompinsize
3696 pos2 = pos + self._decompinsize
3671 chunk = dobj.decompress(data[pos:pos2])
3697 chunk = dobj.decompress(data[pos:pos2])
3672 if chunk:
3698 if chunk:
3673 chunks.append(chunk)
3699 chunks.append(chunk)
3674 pos = pos2
3700 pos = pos2
3675 # Frame should be exhausted, so no finish() API.
3701 # Frame should be exhausted, so no finish() API.
3676
3702
3677 return ''.join(chunks)
3703 return ''.join(chunks)
3678 except Exception as e:
3704 except Exception as e:
3679 raise error.RevlogError(_('revlog decompress error: %s') %
3705 raise error.RevlogError(_('revlog decompress error: %s') %
3680 str(e))
3706 str(e))
3681
3707
3682 def revlogcompressor(self, opts=None):
3708 def revlogcompressor(self, opts=None):
3683 opts = opts or {}
3709 opts = opts or {}
3684 return self.zstdrevlogcompressor(self._module,
3710 return self.zstdrevlogcompressor(self._module,
3685 level=opts.get('level', 3))
3711 level=opts.get('level', 3))
3686
3712
3687 compengines.register(_zstdengine())
3713 compengines.register(_zstdengine())
3688
3714
3689 def bundlecompressiontopics():
3715 def bundlecompressiontopics():
3690 """Obtains a list of available bundle compressions for use in help."""
3716 """Obtains a list of available bundle compressions for use in help."""
3691 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3717 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3692 items = {}
3718 items = {}
3693
3719
3694 # We need to format the docstring. So use a dummy object/type to hold it
3720 # We need to format the docstring. So use a dummy object/type to hold it
3695 # rather than mutating the original.
3721 # rather than mutating the original.
3696 class docobject(object):
3722 class docobject(object):
3697 pass
3723 pass
3698
3724
3699 for name in compengines:
3725 for name in compengines:
3700 engine = compengines[name]
3726 engine = compengines[name]
3701
3727
3702 if not engine.available():
3728 if not engine.available():
3703 continue
3729 continue
3704
3730
3705 bt = engine.bundletype()
3731 bt = engine.bundletype()
3706 if not bt or not bt[0]:
3732 if not bt or not bt[0]:
3707 continue
3733 continue
3708
3734
3709 doc = pycompat.sysstr('``%s``\n %s') % (
3735 doc = pycompat.sysstr('``%s``\n %s') % (
3710 bt[0], engine.bundletype.__doc__)
3736 bt[0], engine.bundletype.__doc__)
3711
3737
3712 value = docobject()
3738 value = docobject()
3713 value.__doc__ = doc
3739 value.__doc__ = doc
3714
3740
3715 items[bt[0]] = value
3741 items[bt[0]] = value
3716
3742
3717 return items
3743 return items
3718
3744
3719 # convenient shortcut
3745 # convenient shortcut
3720 dst = debugstacktrace
3746 dst = debugstacktrace
@@ -1,2664 +1,2665 b''
1 #!/usr/bin/env python
1 #!/usr/bin/env python
2 #
2 #
3 # run-tests.py - Run a set of tests on Mercurial
3 # run-tests.py - Run a set of tests on Mercurial
4 #
4 #
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 # Modifying this script is tricky because it has many modes:
10 # Modifying this script is tricky because it has many modes:
11 # - serial (default) vs parallel (-jN, N > 1)
11 # - serial (default) vs parallel (-jN, N > 1)
12 # - no coverage (default) vs coverage (-c, -C, -s)
12 # - no coverage (default) vs coverage (-c, -C, -s)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 # - tests are a mix of shell scripts and Python scripts
14 # - tests are a mix of shell scripts and Python scripts
15 #
15 #
16 # If you change this script, it is recommended that you ensure you
16 # If you change this script, it is recommended that you ensure you
17 # haven't broken it by running it in various modes with a representative
17 # haven't broken it by running it in various modes with a representative
18 # sample of test scripts. For example:
18 # sample of test scripts. For example:
19 #
19 #
20 # 1) serial, no coverage, temp install:
20 # 1) serial, no coverage, temp install:
21 # ./run-tests.py test-s*
21 # ./run-tests.py test-s*
22 # 2) serial, no coverage, local hg:
22 # 2) serial, no coverage, local hg:
23 # ./run-tests.py --local test-s*
23 # ./run-tests.py --local test-s*
24 # 3) serial, coverage, temp install:
24 # 3) serial, coverage, temp install:
25 # ./run-tests.py -c test-s*
25 # ./run-tests.py -c test-s*
26 # 4) serial, coverage, local hg:
26 # 4) serial, coverage, local hg:
27 # ./run-tests.py -c --local test-s* # unsupported
27 # ./run-tests.py -c --local test-s* # unsupported
28 # 5) parallel, no coverage, temp install:
28 # 5) parallel, no coverage, temp install:
29 # ./run-tests.py -j2 test-s*
29 # ./run-tests.py -j2 test-s*
30 # 6) parallel, no coverage, local hg:
30 # 6) parallel, no coverage, local hg:
31 # ./run-tests.py -j2 --local test-s*
31 # ./run-tests.py -j2 --local test-s*
32 # 7) parallel, coverage, temp install:
32 # 7) parallel, coverage, temp install:
33 # ./run-tests.py -j2 -c test-s* # currently broken
33 # ./run-tests.py -j2 -c test-s* # currently broken
34 # 8) parallel, coverage, local install:
34 # 8) parallel, coverage, local install:
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 # 9) parallel, custom tmp dir:
36 # 9) parallel, custom tmp dir:
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 # 10) parallel, pure, tests that call run-tests:
38 # 10) parallel, pure, tests that call run-tests:
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 #
40 #
41 # (You could use any subset of the tests: test-s* happens to match
41 # (You could use any subset of the tests: test-s* happens to match
42 # enough that it's worth doing parallel runs, few enough that it
42 # enough that it's worth doing parallel runs, few enough that it
43 # completes fairly quickly, includes both shell and Python scripts, and
43 # completes fairly quickly, includes both shell and Python scripts, and
44 # includes some scripts that run daemon processes.)
44 # includes some scripts that run daemon processes.)
45
45
46 from __future__ import absolute_import, print_function
46 from __future__ import absolute_import, print_function
47
47
48 import difflib
48 import difflib
49 import distutils.version as version
49 import distutils.version as version
50 import errno
50 import errno
51 import json
51 import json
52 import optparse
52 import optparse
53 import os
53 import os
54 import random
54 import random
55 import re
55 import re
56 import shutil
56 import shutil
57 import signal
57 import signal
58 import socket
58 import socket
59 import subprocess
59 import subprocess
60 import sys
60 import sys
61 try:
61 try:
62 import sysconfig
62 import sysconfig
63 except ImportError:
63 except ImportError:
64 # sysconfig doesn't exist in Python 2.6
64 # sysconfig doesn't exist in Python 2.6
65 sysconfig = None
65 sysconfig = None
66 import tempfile
66 import tempfile
67 import threading
67 import threading
68 import time
68 import time
69 import unittest
69 import unittest
70 import xml.dom.minidom as minidom
70 import xml.dom.minidom as minidom
71
71
72 try:
72 try:
73 import Queue as queue
73 import Queue as queue
74 except ImportError:
74 except ImportError:
75 import queue
75 import queue
76
76
77 if os.environ.get('RTUNICODEPEDANTRY', False):
77 if os.environ.get('RTUNICODEPEDANTRY', False):
78 try:
78 try:
79 reload(sys)
79 reload(sys)
80 sys.setdefaultencoding("undefined")
80 sys.setdefaultencoding("undefined")
81 except NameError:
81 except NameError:
82 pass
82 pass
83
83
84 osenvironb = getattr(os, 'environb', os.environ)
84 osenvironb = getattr(os, 'environb', os.environ)
85 processlock = threading.Lock()
85 processlock = threading.Lock()
86
86
87 if sys.version_info > (3, 5, 0):
87 if sys.version_info > (3, 5, 0):
88 PYTHON3 = True
88 PYTHON3 = True
89 xrange = range # we use xrange in one place, and we'd rather not use range
89 xrange = range # we use xrange in one place, and we'd rather not use range
90 def _bytespath(p):
90 def _bytespath(p):
91 return p.encode('utf-8')
91 return p.encode('utf-8')
92
92
93 def _strpath(p):
93 def _strpath(p):
94 return p.decode('utf-8')
94 return p.decode('utf-8')
95
95
96 elif sys.version_info >= (3, 0, 0):
96 elif sys.version_info >= (3, 0, 0):
97 print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
97 print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
98 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
98 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
99 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
99 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
100 else:
100 else:
101 PYTHON3 = False
101 PYTHON3 = False
102
102
103 # In python 2.x, path operations are generally done using
103 # In python 2.x, path operations are generally done using
104 # bytestrings by default, so we don't have to do any extra
104 # bytestrings by default, so we don't have to do any extra
105 # fiddling there. We define the wrapper functions anyway just to
105 # fiddling there. We define the wrapper functions anyway just to
106 # help keep code consistent between platforms.
106 # help keep code consistent between platforms.
107 def _bytespath(p):
107 def _bytespath(p):
108 return p
108 return p
109
109
110 _strpath = _bytespath
110 _strpath = _bytespath
111
111
112 # For Windows support
112 # For Windows support
113 wifexited = getattr(os, "WIFEXITED", lambda x: False)
113 wifexited = getattr(os, "WIFEXITED", lambda x: False)
114
114
115 # Whether to use IPv6
115 # Whether to use IPv6
116 def checksocketfamily(name, port=20058):
116 def checksocketfamily(name, port=20058):
117 """return true if we can listen on localhost using family=name
117 """return true if we can listen on localhost using family=name
118
118
119 name should be either 'AF_INET', or 'AF_INET6'.
119 name should be either 'AF_INET', or 'AF_INET6'.
120 port being used is okay - EADDRINUSE is considered as successful.
120 port being used is okay - EADDRINUSE is considered as successful.
121 """
121 """
122 family = getattr(socket, name, None)
122 family = getattr(socket, name, None)
123 if family is None:
123 if family is None:
124 return False
124 return False
125 try:
125 try:
126 s = socket.socket(family, socket.SOCK_STREAM)
126 s = socket.socket(family, socket.SOCK_STREAM)
127 s.bind(('localhost', port))
127 s.bind(('localhost', port))
128 s.close()
128 s.close()
129 return True
129 return True
130 except socket.error as exc:
130 except socket.error as exc:
131 if exc.errno == errno.EADDRINUSE:
131 if exc.errno == errno.EADDRINUSE:
132 return True
132 return True
133 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
133 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
134 return False
134 return False
135 else:
135 else:
136 raise
136 raise
137 else:
137 else:
138 return False
138 return False
139
139
140 # useipv6 will be set by parseargs
140 # useipv6 will be set by parseargs
141 useipv6 = None
141 useipv6 = None
142
142
143 def checkportisavailable(port):
143 def checkportisavailable(port):
144 """return true if a port seems free to bind on localhost"""
144 """return true if a port seems free to bind on localhost"""
145 if useipv6:
145 if useipv6:
146 family = socket.AF_INET6
146 family = socket.AF_INET6
147 else:
147 else:
148 family = socket.AF_INET
148 family = socket.AF_INET
149 try:
149 try:
150 s = socket.socket(family, socket.SOCK_STREAM)
150 s = socket.socket(family, socket.SOCK_STREAM)
151 s.bind(('localhost', port))
151 s.bind(('localhost', port))
152 s.close()
152 s.close()
153 return True
153 return True
154 except socket.error as exc:
154 except socket.error as exc:
155 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
155 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
156 errno.EPROTONOSUPPORT):
156 errno.EPROTONOSUPPORT):
157 raise
157 raise
158 return False
158 return False
159
159
160 closefds = os.name == 'posix'
160 closefds = os.name == 'posix'
161 def Popen4(cmd, wd, timeout, env=None):
161 def Popen4(cmd, wd, timeout, env=None):
162 processlock.acquire()
162 processlock.acquire()
163 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
163 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
164 close_fds=closefds,
164 close_fds=closefds,
165 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
165 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
166 stderr=subprocess.STDOUT)
166 stderr=subprocess.STDOUT)
167 processlock.release()
167 processlock.release()
168
168
169 p.fromchild = p.stdout
169 p.fromchild = p.stdout
170 p.tochild = p.stdin
170 p.tochild = p.stdin
171 p.childerr = p.stderr
171 p.childerr = p.stderr
172
172
173 p.timeout = False
173 p.timeout = False
174 if timeout:
174 if timeout:
175 def t():
175 def t():
176 start = time.time()
176 start = time.time()
177 while time.time() - start < timeout and p.returncode is None:
177 while time.time() - start < timeout and p.returncode is None:
178 time.sleep(.1)
178 time.sleep(.1)
179 p.timeout = True
179 p.timeout = True
180 if p.returncode is None:
180 if p.returncode is None:
181 terminate(p)
181 terminate(p)
182 threading.Thread(target=t).start()
182 threading.Thread(target=t).start()
183
183
184 return p
184 return p
185
185
186 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
186 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
187 IMPL_PATH = b'PYTHONPATH'
187 IMPL_PATH = b'PYTHONPATH'
188 if 'java' in sys.platform:
188 if 'java' in sys.platform:
189 IMPL_PATH = b'JYTHONPATH'
189 IMPL_PATH = b'JYTHONPATH'
190
190
191 defaults = {
191 defaults = {
192 'jobs': ('HGTEST_JOBS', 1),
192 'jobs': ('HGTEST_JOBS', 1),
193 'timeout': ('HGTEST_TIMEOUT', 180),
193 'timeout': ('HGTEST_TIMEOUT', 180),
194 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
194 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
195 'port': ('HGTEST_PORT', 20059),
195 'port': ('HGTEST_PORT', 20059),
196 'shell': ('HGTEST_SHELL', 'sh'),
196 'shell': ('HGTEST_SHELL', 'sh'),
197 }
197 }
198
198
199 def canonpath(path):
199 def canonpath(path):
200 return os.path.realpath(os.path.expanduser(path))
200 return os.path.realpath(os.path.expanduser(path))
201
201
202 def parselistfiles(files, listtype, warn=True):
202 def parselistfiles(files, listtype, warn=True):
203 entries = dict()
203 entries = dict()
204 for filename in files:
204 for filename in files:
205 try:
205 try:
206 path = os.path.expanduser(os.path.expandvars(filename))
206 path = os.path.expanduser(os.path.expandvars(filename))
207 f = open(path, "rb")
207 f = open(path, "rb")
208 except IOError as err:
208 except IOError as err:
209 if err.errno != errno.ENOENT:
209 if err.errno != errno.ENOENT:
210 raise
210 raise
211 if warn:
211 if warn:
212 print("warning: no such %s file: %s" % (listtype, filename))
212 print("warning: no such %s file: %s" % (listtype, filename))
213 continue
213 continue
214
214
215 for line in f.readlines():
215 for line in f.readlines():
216 line = line.split(b'#', 1)[0].strip()
216 line = line.split(b'#', 1)[0].strip()
217 if line:
217 if line:
218 entries[line] = filename
218 entries[line] = filename
219
219
220 f.close()
220 f.close()
221 return entries
221 return entries
222
222
223 def getparser():
223 def getparser():
224 """Obtain the OptionParser used by the CLI."""
224 """Obtain the OptionParser used by the CLI."""
225 parser = optparse.OptionParser("%prog [options] [tests]")
225 parser = optparse.OptionParser("%prog [options] [tests]")
226
226
227 # keep these sorted
227 # keep these sorted
228 parser.add_option("--blacklist", action="append",
228 parser.add_option("--blacklist", action="append",
229 help="skip tests listed in the specified blacklist file")
229 help="skip tests listed in the specified blacklist file")
230 parser.add_option("--whitelist", action="append",
230 parser.add_option("--whitelist", action="append",
231 help="always run tests listed in the specified whitelist file")
231 help="always run tests listed in the specified whitelist file")
232 parser.add_option("--changed", type="string",
232 parser.add_option("--changed", type="string",
233 help="run tests that are changed in parent rev or working directory")
233 help="run tests that are changed in parent rev or working directory")
234 parser.add_option("-C", "--annotate", action="store_true",
234 parser.add_option("-C", "--annotate", action="store_true",
235 help="output files annotated with coverage")
235 help="output files annotated with coverage")
236 parser.add_option("-c", "--cover", action="store_true",
236 parser.add_option("-c", "--cover", action="store_true",
237 help="print a test coverage report")
237 help="print a test coverage report")
238 parser.add_option("-d", "--debug", action="store_true",
238 parser.add_option("-d", "--debug", action="store_true",
239 help="debug mode: write output of test scripts to console"
239 help="debug mode: write output of test scripts to console"
240 " rather than capturing and diffing it (disables timeout)")
240 " rather than capturing and diffing it (disables timeout)")
241 parser.add_option("-f", "--first", action="store_true",
241 parser.add_option("-f", "--first", action="store_true",
242 help="exit on the first test failure")
242 help="exit on the first test failure")
243 parser.add_option("-H", "--htmlcov", action="store_true",
243 parser.add_option("-H", "--htmlcov", action="store_true",
244 help="create an HTML report of the coverage of the files")
244 help="create an HTML report of the coverage of the files")
245 parser.add_option("-i", "--interactive", action="store_true",
245 parser.add_option("-i", "--interactive", action="store_true",
246 help="prompt to accept changed output")
246 help="prompt to accept changed output")
247 parser.add_option("-j", "--jobs", type="int",
247 parser.add_option("-j", "--jobs", type="int",
248 help="number of jobs to run in parallel"
248 help="number of jobs to run in parallel"
249 " (default: $%s or %d)" % defaults['jobs'])
249 " (default: $%s or %d)" % defaults['jobs'])
250 parser.add_option("--keep-tmpdir", action="store_true",
250 parser.add_option("--keep-tmpdir", action="store_true",
251 help="keep temporary directory after running tests")
251 help="keep temporary directory after running tests")
252 parser.add_option("-k", "--keywords",
252 parser.add_option("-k", "--keywords",
253 help="run tests matching keywords")
253 help="run tests matching keywords")
254 parser.add_option("-l", "--local", action="store_true",
254 parser.add_option("-l", "--local", action="store_true",
255 help="shortcut for --with-hg=<testdir>/../hg, "
255 help="shortcut for --with-hg=<testdir>/../hg, "
256 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
256 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
257 parser.add_option("--loop", action="store_true",
257 parser.add_option("--loop", action="store_true",
258 help="loop tests repeatedly")
258 help="loop tests repeatedly")
259 parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
259 parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
260 help="run each test N times (default=1)", default=1)
260 help="run each test N times (default=1)", default=1)
261 parser.add_option("-n", "--nodiff", action="store_true",
261 parser.add_option("-n", "--nodiff", action="store_true",
262 help="skip showing test changes")
262 help="skip showing test changes")
263 parser.add_option("-p", "--port", type="int",
263 parser.add_option("-p", "--port", type="int",
264 help="port on which servers should listen"
264 help="port on which servers should listen"
265 " (default: $%s or %d)" % defaults['port'])
265 " (default: $%s or %d)" % defaults['port'])
266 parser.add_option("--compiler", type="string",
266 parser.add_option("--compiler", type="string",
267 help="compiler to build with")
267 help="compiler to build with")
268 parser.add_option("--pure", action="store_true",
268 parser.add_option("--pure", action="store_true",
269 help="use pure Python code instead of C extensions")
269 help="use pure Python code instead of C extensions")
270 parser.add_option("-R", "--restart", action="store_true",
270 parser.add_option("-R", "--restart", action="store_true",
271 help="restart at last error")
271 help="restart at last error")
272 parser.add_option("-r", "--retest", action="store_true",
272 parser.add_option("-r", "--retest", action="store_true",
273 help="retest failed tests")
273 help="retest failed tests")
274 parser.add_option("-S", "--noskips", action="store_true",
274 parser.add_option("-S", "--noskips", action="store_true",
275 help="don't report skip tests verbosely")
275 help="don't report skip tests verbosely")
276 parser.add_option("--shell", type="string",
276 parser.add_option("--shell", type="string",
277 help="shell to use (default: $%s or %s)" % defaults['shell'])
277 help="shell to use (default: $%s or %s)" % defaults['shell'])
278 parser.add_option("-t", "--timeout", type="int",
278 parser.add_option("-t", "--timeout", type="int",
279 help="kill errant tests after TIMEOUT seconds"
279 help="kill errant tests after TIMEOUT seconds"
280 " (default: $%s or %d)" % defaults['timeout'])
280 " (default: $%s or %d)" % defaults['timeout'])
281 parser.add_option("--slowtimeout", type="int",
281 parser.add_option("--slowtimeout", type="int",
282 help="kill errant slow tests after SLOWTIMEOUT seconds"
282 help="kill errant slow tests after SLOWTIMEOUT seconds"
283 " (default: $%s or %d)" % defaults['slowtimeout'])
283 " (default: $%s or %d)" % defaults['slowtimeout'])
284 parser.add_option("--time", action="store_true",
284 parser.add_option("--time", action="store_true",
285 help="time how long each test takes")
285 help="time how long each test takes")
286 parser.add_option("--json", action="store_true",
286 parser.add_option("--json", action="store_true",
287 help="store test result data in 'report.json' file")
287 help="store test result data in 'report.json' file")
288 parser.add_option("--tmpdir", type="string",
288 parser.add_option("--tmpdir", type="string",
289 help="run tests in the given temporary directory"
289 help="run tests in the given temporary directory"
290 " (implies --keep-tmpdir)")
290 " (implies --keep-tmpdir)")
291 parser.add_option("-v", "--verbose", action="store_true",
291 parser.add_option("-v", "--verbose", action="store_true",
292 help="output verbose messages")
292 help="output verbose messages")
293 parser.add_option("--xunit", type="string",
293 parser.add_option("--xunit", type="string",
294 help="record xunit results at specified path")
294 help="record xunit results at specified path")
295 parser.add_option("--view", type="string",
295 parser.add_option("--view", type="string",
296 help="external diff viewer")
296 help="external diff viewer")
297 parser.add_option("--with-hg", type="string",
297 parser.add_option("--with-hg", type="string",
298 metavar="HG",
298 metavar="HG",
299 help="test using specified hg script rather than a "
299 help="test using specified hg script rather than a "
300 "temporary installation")
300 "temporary installation")
301 parser.add_option("--chg", action="store_true",
301 parser.add_option("--chg", action="store_true",
302 help="install and use chg wrapper in place of hg")
302 help="install and use chg wrapper in place of hg")
303 parser.add_option("--with-chg", metavar="CHG",
303 parser.add_option("--with-chg", metavar="CHG",
304 help="use specified chg wrapper in place of hg")
304 help="use specified chg wrapper in place of hg")
305 parser.add_option("--ipv6", action="store_true",
305 parser.add_option("--ipv6", action="store_true",
306 help="prefer IPv6 to IPv4 for network related tests")
306 help="prefer IPv6 to IPv4 for network related tests")
307 parser.add_option("-3", "--py3k-warnings", action="store_true",
307 parser.add_option("-3", "--py3k-warnings", action="store_true",
308 help="enable Py3k warnings on Python 2.6+")
308 help="enable Py3k warnings on Python 2.6+")
309 # This option should be deleted once test-check-py3-compat.t and other
309 # This option should be deleted once test-check-py3-compat.t and other
310 # Python 3 tests run with Python 3.
310 # Python 3 tests run with Python 3.
311 parser.add_option("--with-python3", metavar="PYTHON3",
311 parser.add_option("--with-python3", metavar="PYTHON3",
312 help="Python 3 interpreter (if running under Python 2)"
312 help="Python 3 interpreter (if running under Python 2)"
313 " (TEMPORARY)")
313 " (TEMPORARY)")
314 parser.add_option('--extra-config-opt', action="append",
314 parser.add_option('--extra-config-opt', action="append",
315 help='set the given config opt in the test hgrc')
315 help='set the given config opt in the test hgrc')
316 parser.add_option('--random', action="store_true",
316 parser.add_option('--random', action="store_true",
317 help='run tests in random order')
317 help='run tests in random order')
318 parser.add_option('--profile-runner', action='store_true',
318 parser.add_option('--profile-runner', action='store_true',
319 help='run statprof on run-tests')
319 help='run statprof on run-tests')
320 parser.add_option('--allow-slow-tests', action='store_true',
320 parser.add_option('--allow-slow-tests', action='store_true',
321 help='allow extremely slow tests')
321 help='allow extremely slow tests')
322 parser.add_option('--showchannels', action='store_true',
322 parser.add_option('--showchannels', action='store_true',
323 help='show scheduling channels')
323 help='show scheduling channels')
324 parser.add_option('--known-good-rev', type="string",
324 parser.add_option('--known-good-rev', type="string",
325 metavar="known_good_rev",
325 metavar="known_good_rev",
326 help=("Automatically bisect any failures using this "
326 help=("Automatically bisect any failures using this "
327 "revision as a known-good revision."))
327 "revision as a known-good revision."))
328
328
329 for option, (envvar, default) in defaults.items():
329 for option, (envvar, default) in defaults.items():
330 defaults[option] = type(default)(os.environ.get(envvar, default))
330 defaults[option] = type(default)(os.environ.get(envvar, default))
331 parser.set_defaults(**defaults)
331 parser.set_defaults(**defaults)
332
332
333 return parser
333 return parser
334
334
335 def parseargs(args, parser):
335 def parseargs(args, parser):
336 """Parse arguments with our OptionParser and validate results."""
336 """Parse arguments with our OptionParser and validate results."""
337 (options, args) = parser.parse_args(args)
337 (options, args) = parser.parse_args(args)
338
338
339 # jython is always pure
339 # jython is always pure
340 if 'java' in sys.platform or '__pypy__' in sys.modules:
340 if 'java' in sys.platform or '__pypy__' in sys.modules:
341 options.pure = True
341 options.pure = True
342
342
343 if options.with_hg:
343 if options.with_hg:
344 options.with_hg = canonpath(_bytespath(options.with_hg))
344 options.with_hg = canonpath(_bytespath(options.with_hg))
345 if not (os.path.isfile(options.with_hg) and
345 if not (os.path.isfile(options.with_hg) and
346 os.access(options.with_hg, os.X_OK)):
346 os.access(options.with_hg, os.X_OK)):
347 parser.error('--with-hg must specify an executable hg script')
347 parser.error('--with-hg must specify an executable hg script')
348 if not os.path.basename(options.with_hg) == b'hg':
348 if not os.path.basename(options.with_hg) == b'hg':
349 sys.stderr.write('warning: --with-hg should specify an hg script\n')
349 sys.stderr.write('warning: --with-hg should specify an hg script\n')
350 if options.local:
350 if options.local:
351 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
351 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
352 reporootdir = os.path.dirname(testdir)
352 reporootdir = os.path.dirname(testdir)
353 pathandattrs = [(b'hg', 'with_hg')]
353 pathandattrs = [(b'hg', 'with_hg')]
354 if options.chg:
354 if options.chg:
355 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
355 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
356 for relpath, attr in pathandattrs:
356 for relpath, attr in pathandattrs:
357 binpath = os.path.join(reporootdir, relpath)
357 binpath = os.path.join(reporootdir, relpath)
358 if os.name != 'nt' and not os.access(binpath, os.X_OK):
358 if os.name != 'nt' and not os.access(binpath, os.X_OK):
359 parser.error('--local specified, but %r not found or '
359 parser.error('--local specified, but %r not found or '
360 'not executable' % binpath)
360 'not executable' % binpath)
361 setattr(options, attr, binpath)
361 setattr(options, attr, binpath)
362
362
363 if (options.chg or options.with_chg) and os.name == 'nt':
363 if (options.chg or options.with_chg) and os.name == 'nt':
364 parser.error('chg does not work on %s' % os.name)
364 parser.error('chg does not work on %s' % os.name)
365 if options.with_chg:
365 if options.with_chg:
366 options.chg = False # no installation to temporary location
366 options.chg = False # no installation to temporary location
367 options.with_chg = canonpath(_bytespath(options.with_chg))
367 options.with_chg = canonpath(_bytespath(options.with_chg))
368 if not (os.path.isfile(options.with_chg) and
368 if not (os.path.isfile(options.with_chg) and
369 os.access(options.with_chg, os.X_OK)):
369 os.access(options.with_chg, os.X_OK)):
370 parser.error('--with-chg must specify a chg executable')
370 parser.error('--with-chg must specify a chg executable')
371 if options.chg and options.with_hg:
371 if options.chg and options.with_hg:
372 # chg shares installation location with hg
372 # chg shares installation location with hg
373 parser.error('--chg does not work when --with-hg is specified '
373 parser.error('--chg does not work when --with-hg is specified '
374 '(use --with-chg instead)')
374 '(use --with-chg instead)')
375
375
376 global useipv6
376 global useipv6
377 if options.ipv6:
377 if options.ipv6:
378 useipv6 = checksocketfamily('AF_INET6')
378 useipv6 = checksocketfamily('AF_INET6')
379 else:
379 else:
380 # only use IPv6 if IPv4 is unavailable and IPv6 is available
380 # only use IPv6 if IPv4 is unavailable and IPv6 is available
381 useipv6 = ((not checksocketfamily('AF_INET'))
381 useipv6 = ((not checksocketfamily('AF_INET'))
382 and checksocketfamily('AF_INET6'))
382 and checksocketfamily('AF_INET6'))
383
383
384 options.anycoverage = options.cover or options.annotate or options.htmlcov
384 options.anycoverage = options.cover or options.annotate or options.htmlcov
385 if options.anycoverage:
385 if options.anycoverage:
386 try:
386 try:
387 import coverage
387 import coverage
388 covver = version.StrictVersion(coverage.__version__).version
388 covver = version.StrictVersion(coverage.__version__).version
389 if covver < (3, 3):
389 if covver < (3, 3):
390 parser.error('coverage options require coverage 3.3 or later')
390 parser.error('coverage options require coverage 3.3 or later')
391 except ImportError:
391 except ImportError:
392 parser.error('coverage options now require the coverage package')
392 parser.error('coverage options now require the coverage package')
393
393
394 if options.anycoverage and options.local:
394 if options.anycoverage and options.local:
395 # this needs some path mangling somewhere, I guess
395 # this needs some path mangling somewhere, I guess
396 parser.error("sorry, coverage options do not work when --local "
396 parser.error("sorry, coverage options do not work when --local "
397 "is specified")
397 "is specified")
398
398
399 if options.anycoverage and options.with_hg:
399 if options.anycoverage and options.with_hg:
400 parser.error("sorry, coverage options do not work when --with-hg "
400 parser.error("sorry, coverage options do not work when --with-hg "
401 "is specified")
401 "is specified")
402
402
403 global verbose
403 global verbose
404 if options.verbose:
404 if options.verbose:
405 verbose = ''
405 verbose = ''
406
406
407 if options.tmpdir:
407 if options.tmpdir:
408 options.tmpdir = canonpath(options.tmpdir)
408 options.tmpdir = canonpath(options.tmpdir)
409
409
410 if options.jobs < 1:
410 if options.jobs < 1:
411 parser.error('--jobs must be positive')
411 parser.error('--jobs must be positive')
412 if options.interactive and options.debug:
412 if options.interactive and options.debug:
413 parser.error("-i/--interactive and -d/--debug are incompatible")
413 parser.error("-i/--interactive and -d/--debug are incompatible")
414 if options.debug:
414 if options.debug:
415 if options.timeout != defaults['timeout']:
415 if options.timeout != defaults['timeout']:
416 sys.stderr.write(
416 sys.stderr.write(
417 'warning: --timeout option ignored with --debug\n')
417 'warning: --timeout option ignored with --debug\n')
418 if options.slowtimeout != defaults['slowtimeout']:
418 if options.slowtimeout != defaults['slowtimeout']:
419 sys.stderr.write(
419 sys.stderr.write(
420 'warning: --slowtimeout option ignored with --debug\n')
420 'warning: --slowtimeout option ignored with --debug\n')
421 options.timeout = 0
421 options.timeout = 0
422 options.slowtimeout = 0
422 options.slowtimeout = 0
423 if options.py3k_warnings:
423 if options.py3k_warnings:
424 if PYTHON3:
424 if PYTHON3:
425 parser.error(
425 parser.error(
426 '--py3k-warnings can only be used on Python 2.6 and 2.7')
426 '--py3k-warnings can only be used on Python 2.6 and 2.7')
427 if options.with_python3:
427 if options.with_python3:
428 if PYTHON3:
428 if PYTHON3:
429 parser.error('--with-python3 cannot be used when executing with '
429 parser.error('--with-python3 cannot be used when executing with '
430 'Python 3')
430 'Python 3')
431
431
432 options.with_python3 = canonpath(options.with_python3)
432 options.with_python3 = canonpath(options.with_python3)
433 # Verify Python3 executable is acceptable.
433 # Verify Python3 executable is acceptable.
434 proc = subprocess.Popen([options.with_python3, b'--version'],
434 proc = subprocess.Popen([options.with_python3, b'--version'],
435 stdout=subprocess.PIPE,
435 stdout=subprocess.PIPE,
436 stderr=subprocess.STDOUT)
436 stderr=subprocess.STDOUT)
437 out, _err = proc.communicate()
437 out, _err = proc.communicate()
438 ret = proc.wait()
438 ret = proc.wait()
439 if ret != 0:
439 if ret != 0:
440 parser.error('could not determine version of python 3')
440 parser.error('could not determine version of python 3')
441 if not out.startswith('Python '):
441 if not out.startswith('Python '):
442 parser.error('unexpected output from python3 --version: %s' %
442 parser.error('unexpected output from python3 --version: %s' %
443 out)
443 out)
444 vers = version.LooseVersion(out[len('Python '):])
444 vers = version.LooseVersion(out[len('Python '):])
445 if vers < version.LooseVersion('3.5.0'):
445 if vers < version.LooseVersion('3.5.0'):
446 parser.error('--with-python3 version must be 3.5.0 or greater; '
446 parser.error('--with-python3 version must be 3.5.0 or greater; '
447 'got %s' % out)
447 'got %s' % out)
448
448
449 if options.blacklist:
449 if options.blacklist:
450 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
450 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
451 if options.whitelist:
451 if options.whitelist:
452 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
452 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
453 else:
453 else:
454 options.whitelisted = {}
454 options.whitelisted = {}
455
455
456 if options.showchannels:
456 if options.showchannels:
457 options.nodiff = True
457 options.nodiff = True
458
458
459 return (options, args)
459 return (options, args)
460
460
461 def rename(src, dst):
461 def rename(src, dst):
462 """Like os.rename(), trade atomicity and opened files friendliness
462 """Like os.rename(), trade atomicity and opened files friendliness
463 for existing destination support.
463 for existing destination support.
464 """
464 """
465 shutil.copy(src, dst)
465 shutil.copy(src, dst)
466 os.remove(src)
466 os.remove(src)
467
467
468 _unified_diff = difflib.unified_diff
468 _unified_diff = difflib.unified_diff
469 if PYTHON3:
469 if PYTHON3:
470 import functools
470 import functools
471 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
471 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
472
472
473 def getdiff(expected, output, ref, err):
473 def getdiff(expected, output, ref, err):
474 servefail = False
474 servefail = False
475 lines = []
475 lines = []
476 for line in _unified_diff(expected, output, ref, err):
476 for line in _unified_diff(expected, output, ref, err):
477 if line.startswith(b'+++') or line.startswith(b'---'):
477 if line.startswith(b'+++') or line.startswith(b'---'):
478 line = line.replace(b'\\', b'/')
478 line = line.replace(b'\\', b'/')
479 if line.endswith(b' \n'):
479 if line.endswith(b' \n'):
480 line = line[:-2] + b'\n'
480 line = line[:-2] + b'\n'
481 lines.append(line)
481 lines.append(line)
482 if not servefail and line.startswith(
482 if not servefail and line.startswith(
483 b'+ abort: child process failed to start'):
483 b'+ abort: child process failed to start'):
484 servefail = True
484 servefail = True
485
485
486 return servefail, lines
486 return servefail, lines
487
487
488 verbose = False
488 verbose = False
489 def vlog(*msg):
489 def vlog(*msg):
490 """Log only when in verbose mode."""
490 """Log only when in verbose mode."""
491 if verbose is False:
491 if verbose is False:
492 return
492 return
493
493
494 return log(*msg)
494 return log(*msg)
495
495
496 # Bytes that break XML even in a CDATA block: control characters 0-31
496 # Bytes that break XML even in a CDATA block: control characters 0-31
497 # sans \t, \n and \r
497 # sans \t, \n and \r
498 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
498 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
499
499
500 # Match feature conditionalized output lines in the form, capturing the feature
500 # Match feature conditionalized output lines in the form, capturing the feature
501 # list in group 2, and the preceeding line output in group 1:
501 # list in group 2, and the preceeding line output in group 1:
502 #
502 #
503 # output..output (feature !)\n
503 # output..output (feature !)\n
504 optline = re.compile(b'(.+) \((.+?) !\)\n$')
504 optline = re.compile(b'(.+) \((.+?) !\)\n$')
505
505
506 def cdatasafe(data):
506 def cdatasafe(data):
507 """Make a string safe to include in a CDATA block.
507 """Make a string safe to include in a CDATA block.
508
508
509 Certain control characters are illegal in a CDATA block, and
509 Certain control characters are illegal in a CDATA block, and
510 there's no way to include a ]]> in a CDATA either. This function
510 there's no way to include a ]]> in a CDATA either. This function
511 replaces illegal bytes with ? and adds a space between the ]] so
511 replaces illegal bytes with ? and adds a space between the ]] so
512 that it won't break the CDATA block.
512 that it won't break the CDATA block.
513 """
513 """
514 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
514 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
515
515
516 def log(*msg):
516 def log(*msg):
517 """Log something to stdout.
517 """Log something to stdout.
518
518
519 Arguments are strings to print.
519 Arguments are strings to print.
520 """
520 """
521 with iolock:
521 with iolock:
522 if verbose:
522 if verbose:
523 print(verbose, end=' ')
523 print(verbose, end=' ')
524 for m in msg:
524 for m in msg:
525 print(m, end=' ')
525 print(m, end=' ')
526 print()
526 print()
527 sys.stdout.flush()
527 sys.stdout.flush()
528
528
529 def terminate(proc):
529 def terminate(proc):
530 """Terminate subprocess (with fallback for Python versions < 2.6)"""
530 """Terminate subprocess (with fallback for Python versions < 2.6)"""
531 vlog('# Terminating process %d' % proc.pid)
531 vlog('# Terminating process %d' % proc.pid)
532 try:
532 try:
533 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
533 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
534 except OSError:
534 except OSError:
535 pass
535 pass
536
536
537 def killdaemons(pidfile):
537 def killdaemons(pidfile):
538 import killdaemons as killmod
538 import killdaemons as killmod
539 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
539 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
540 logfn=vlog)
540 logfn=vlog)
541
541
542 class Test(unittest.TestCase):
542 class Test(unittest.TestCase):
543 """Encapsulates a single, runnable test.
543 """Encapsulates a single, runnable test.
544
544
545 While this class conforms to the unittest.TestCase API, it differs in that
545 While this class conforms to the unittest.TestCase API, it differs in that
546 instances need to be instantiated manually. (Typically, unittest.TestCase
546 instances need to be instantiated manually. (Typically, unittest.TestCase
547 classes are instantiated automatically by scanning modules.)
547 classes are instantiated automatically by scanning modules.)
548 """
548 """
549
549
550 # Status code reserved for skipped tests (used by hghave).
550 # Status code reserved for skipped tests (used by hghave).
551 SKIPPED_STATUS = 80
551 SKIPPED_STATUS = 80
552
552
553 def __init__(self, path, tmpdir, keeptmpdir=False,
553 def __init__(self, path, tmpdir, keeptmpdir=False,
554 debug=False,
554 debug=False,
555 timeout=defaults['timeout'],
555 timeout=defaults['timeout'],
556 startport=defaults['port'], extraconfigopts=None,
556 startport=defaults['port'], extraconfigopts=None,
557 py3kwarnings=False, shell=None, hgcommand=None,
557 py3kwarnings=False, shell=None, hgcommand=None,
558 slowtimeout=defaults['slowtimeout'], usechg=False,
558 slowtimeout=defaults['slowtimeout'], usechg=False,
559 useipv6=False):
559 useipv6=False):
560 """Create a test from parameters.
560 """Create a test from parameters.
561
561
562 path is the full path to the file defining the test.
562 path is the full path to the file defining the test.
563
563
564 tmpdir is the main temporary directory to use for this test.
564 tmpdir is the main temporary directory to use for this test.
565
565
566 keeptmpdir determines whether to keep the test's temporary directory
566 keeptmpdir determines whether to keep the test's temporary directory
567 after execution. It defaults to removal (False).
567 after execution. It defaults to removal (False).
568
568
569 debug mode will make the test execute verbosely, with unfiltered
569 debug mode will make the test execute verbosely, with unfiltered
570 output.
570 output.
571
571
572 timeout controls the maximum run time of the test. It is ignored when
572 timeout controls the maximum run time of the test. It is ignored when
573 debug is True. See slowtimeout for tests with #require slow.
573 debug is True. See slowtimeout for tests with #require slow.
574
574
575 slowtimeout overrides timeout if the test has #require slow.
575 slowtimeout overrides timeout if the test has #require slow.
576
576
577 startport controls the starting port number to use for this test. Each
577 startport controls the starting port number to use for this test. Each
578 test will reserve 3 port numbers for execution. It is the caller's
578 test will reserve 3 port numbers for execution. It is the caller's
579 responsibility to allocate a non-overlapping port range to Test
579 responsibility to allocate a non-overlapping port range to Test
580 instances.
580 instances.
581
581
582 extraconfigopts is an iterable of extra hgrc config options. Values
582 extraconfigopts is an iterable of extra hgrc config options. Values
583 must have the form "key=value" (something understood by hgrc). Values
583 must have the form "key=value" (something understood by hgrc). Values
584 of the form "foo.key=value" will result in "[foo] key=value".
584 of the form "foo.key=value" will result in "[foo] key=value".
585
585
586 py3kwarnings enables Py3k warnings.
586 py3kwarnings enables Py3k warnings.
587
587
588 shell is the shell to execute tests in.
588 shell is the shell to execute tests in.
589 """
589 """
590 self.path = path
590 self.path = path
591 self.bname = os.path.basename(path)
591 self.bname = os.path.basename(path)
592 self.name = _strpath(self.bname)
592 self.name = _strpath(self.bname)
593 self._testdir = os.path.dirname(path)
593 self._testdir = os.path.dirname(path)
594 self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
594 self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
595
595
596 self._threadtmp = tmpdir
596 self._threadtmp = tmpdir
597 self._keeptmpdir = keeptmpdir
597 self._keeptmpdir = keeptmpdir
598 self._debug = debug
598 self._debug = debug
599 self._timeout = timeout
599 self._timeout = timeout
600 self._slowtimeout = slowtimeout
600 self._slowtimeout = slowtimeout
601 self._startport = startport
601 self._startport = startport
602 self._extraconfigopts = extraconfigopts or []
602 self._extraconfigopts = extraconfigopts or []
603 self._py3kwarnings = py3kwarnings
603 self._py3kwarnings = py3kwarnings
604 self._shell = _bytespath(shell)
604 self._shell = _bytespath(shell)
605 self._hgcommand = hgcommand or b'hg'
605 self._hgcommand = hgcommand or b'hg'
606 self._usechg = usechg
606 self._usechg = usechg
607 self._useipv6 = useipv6
607 self._useipv6 = useipv6
608
608
609 self._aborted = False
609 self._aborted = False
610 self._daemonpids = []
610 self._daemonpids = []
611 self._finished = None
611 self._finished = None
612 self._ret = None
612 self._ret = None
613 self._out = None
613 self._out = None
614 self._skipped = None
614 self._skipped = None
615 self._testtmp = None
615 self._testtmp = None
616 self._chgsockdir = None
616 self._chgsockdir = None
617
617
618 # If we're not in --debug mode and reference output file exists,
618 # If we're not in --debug mode and reference output file exists,
619 # check test output against it.
619 # check test output against it.
620 if debug:
620 if debug:
621 self._refout = None # to match "out is None"
621 self._refout = None # to match "out is None"
622 elif os.path.exists(self.refpath):
622 elif os.path.exists(self.refpath):
623 f = open(self.refpath, 'rb')
623 f = open(self.refpath, 'rb')
624 self._refout = f.read().splitlines(True)
624 self._refout = f.read().splitlines(True)
625 f.close()
625 f.close()
626 else:
626 else:
627 self._refout = []
627 self._refout = []
628
628
629 # needed to get base class __repr__ running
629 # needed to get base class __repr__ running
630 @property
630 @property
631 def _testMethodName(self):
631 def _testMethodName(self):
632 return self.name
632 return self.name
633
633
634 def __str__(self):
634 def __str__(self):
635 return self.name
635 return self.name
636
636
637 def shortDescription(self):
637 def shortDescription(self):
638 return self.name
638 return self.name
639
639
640 def setUp(self):
640 def setUp(self):
641 """Tasks to perform before run()."""
641 """Tasks to perform before run()."""
642 self._finished = False
642 self._finished = False
643 self._ret = None
643 self._ret = None
644 self._out = None
644 self._out = None
645 self._skipped = None
645 self._skipped = None
646
646
647 try:
647 try:
648 os.mkdir(self._threadtmp)
648 os.mkdir(self._threadtmp)
649 except OSError as e:
649 except OSError as e:
650 if e.errno != errno.EEXIST:
650 if e.errno != errno.EEXIST:
651 raise
651 raise
652
652
653 name = os.path.basename(self.path)
653 name = os.path.basename(self.path)
654 self._testtmp = os.path.join(self._threadtmp, name)
654 self._testtmp = os.path.join(self._threadtmp, name)
655 os.mkdir(self._testtmp)
655 os.mkdir(self._testtmp)
656
656
657 # Remove any previous output files.
657 # Remove any previous output files.
658 if os.path.exists(self.errpath):
658 if os.path.exists(self.errpath):
659 try:
659 try:
660 os.remove(self.errpath)
660 os.remove(self.errpath)
661 except OSError as e:
661 except OSError as e:
662 # We might have raced another test to clean up a .err
662 # We might have raced another test to clean up a .err
663 # file, so ignore ENOENT when removing a previous .err
663 # file, so ignore ENOENT when removing a previous .err
664 # file.
664 # file.
665 if e.errno != errno.ENOENT:
665 if e.errno != errno.ENOENT:
666 raise
666 raise
667
667
668 if self._usechg:
668 if self._usechg:
669 self._chgsockdir = os.path.join(self._threadtmp,
669 self._chgsockdir = os.path.join(self._threadtmp,
670 b'%s.chgsock' % name)
670 b'%s.chgsock' % name)
671 os.mkdir(self._chgsockdir)
671 os.mkdir(self._chgsockdir)
672
672
673 def run(self, result):
673 def run(self, result):
674 """Run this test and report results against a TestResult instance."""
674 """Run this test and report results against a TestResult instance."""
675 # This function is extremely similar to unittest.TestCase.run(). Once
675 # This function is extremely similar to unittest.TestCase.run(). Once
676 # we require Python 2.7 (or at least its version of unittest), this
676 # we require Python 2.7 (or at least its version of unittest), this
677 # function can largely go away.
677 # function can largely go away.
678 self._result = result
678 self._result = result
679 result.startTest(self)
679 result.startTest(self)
680 try:
680 try:
681 try:
681 try:
682 self.setUp()
682 self.setUp()
683 except (KeyboardInterrupt, SystemExit):
683 except (KeyboardInterrupt, SystemExit):
684 self._aborted = True
684 self._aborted = True
685 raise
685 raise
686 except Exception:
686 except Exception:
687 result.addError(self, sys.exc_info())
687 result.addError(self, sys.exc_info())
688 return
688 return
689
689
690 success = False
690 success = False
691 try:
691 try:
692 self.runTest()
692 self.runTest()
693 except KeyboardInterrupt:
693 except KeyboardInterrupt:
694 self._aborted = True
694 self._aborted = True
695 raise
695 raise
696 except SkipTest as e:
696 except SkipTest as e:
697 result.addSkip(self, str(e))
697 result.addSkip(self, str(e))
698 # The base class will have already counted this as a
698 # The base class will have already counted this as a
699 # test we "ran", but we want to exclude skipped tests
699 # test we "ran", but we want to exclude skipped tests
700 # from those we count towards those run.
700 # from those we count towards those run.
701 result.testsRun -= 1
701 result.testsRun -= 1
702 except IgnoreTest as e:
702 except IgnoreTest as e:
703 result.addIgnore(self, str(e))
703 result.addIgnore(self, str(e))
704 # As with skips, ignores also should be excluded from
704 # As with skips, ignores also should be excluded from
705 # the number of tests executed.
705 # the number of tests executed.
706 result.testsRun -= 1
706 result.testsRun -= 1
707 except WarnTest as e:
707 except WarnTest as e:
708 result.addWarn(self, str(e))
708 result.addWarn(self, str(e))
709 except ReportedTest as e:
709 except ReportedTest as e:
710 pass
710 pass
711 except self.failureException as e:
711 except self.failureException as e:
712 # This differs from unittest in that we don't capture
712 # This differs from unittest in that we don't capture
713 # the stack trace. This is for historical reasons and
713 # the stack trace. This is for historical reasons and
714 # this decision could be revisited in the future,
714 # this decision could be revisited in the future,
715 # especially for PythonTest instances.
715 # especially for PythonTest instances.
716 if result.addFailure(self, str(e)):
716 if result.addFailure(self, str(e)):
717 success = True
717 success = True
718 except Exception:
718 except Exception:
719 result.addError(self, sys.exc_info())
719 result.addError(self, sys.exc_info())
720 else:
720 else:
721 success = True
721 success = True
722
722
723 try:
723 try:
724 self.tearDown()
724 self.tearDown()
725 except (KeyboardInterrupt, SystemExit):
725 except (KeyboardInterrupt, SystemExit):
726 self._aborted = True
726 self._aborted = True
727 raise
727 raise
728 except Exception:
728 except Exception:
729 result.addError(self, sys.exc_info())
729 result.addError(self, sys.exc_info())
730 success = False
730 success = False
731
731
732 if success:
732 if success:
733 result.addSuccess(self)
733 result.addSuccess(self)
734 finally:
734 finally:
735 result.stopTest(self, interrupted=self._aborted)
735 result.stopTest(self, interrupted=self._aborted)
736
736
737 def runTest(self):
737 def runTest(self):
738 """Run this test instance.
738 """Run this test instance.
739
739
740 This will return a tuple describing the result of the test.
740 This will return a tuple describing the result of the test.
741 """
741 """
742 env = self._getenv()
742 env = self._getenv()
743 self._daemonpids.append(env['DAEMON_PIDS'])
743 self._daemonpids.append(env['DAEMON_PIDS'])
744 self._createhgrc(env['HGRCPATH'])
744 self._createhgrc(env['HGRCPATH'])
745
745
746 vlog('# Test', self.name)
746 vlog('# Test', self.name)
747
747
748 ret, out = self._run(env)
748 ret, out = self._run(env)
749 self._finished = True
749 self._finished = True
750 self._ret = ret
750 self._ret = ret
751 self._out = out
751 self._out = out
752
752
753 def describe(ret):
753 def describe(ret):
754 if ret < 0:
754 if ret < 0:
755 return 'killed by signal: %d' % -ret
755 return 'killed by signal: %d' % -ret
756 return 'returned error code %d' % ret
756 return 'returned error code %d' % ret
757
757
758 self._skipped = False
758 self._skipped = False
759
759
760 if ret == self.SKIPPED_STATUS:
760 if ret == self.SKIPPED_STATUS:
761 if out is None: # Debug mode, nothing to parse.
761 if out is None: # Debug mode, nothing to parse.
762 missing = ['unknown']
762 missing = ['unknown']
763 failed = None
763 failed = None
764 else:
764 else:
765 missing, failed = TTest.parsehghaveoutput(out)
765 missing, failed = TTest.parsehghaveoutput(out)
766
766
767 if not missing:
767 if not missing:
768 missing = ['skipped']
768 missing = ['skipped']
769
769
770 if failed:
770 if failed:
771 self.fail('hg have failed checking for %s' % failed[-1])
771 self.fail('hg have failed checking for %s' % failed[-1])
772 else:
772 else:
773 self._skipped = True
773 self._skipped = True
774 raise SkipTest(missing[-1])
774 raise SkipTest(missing[-1])
775 elif ret == 'timeout':
775 elif ret == 'timeout':
776 self.fail('timed out')
776 self.fail('timed out')
777 elif ret is False:
777 elif ret is False:
778 raise WarnTest('no result code from test')
778 raise WarnTest('no result code from test')
779 elif out != self._refout:
779 elif out != self._refout:
780 # Diff generation may rely on written .err file.
780 # Diff generation may rely on written .err file.
781 if (ret != 0 or out != self._refout) and not self._skipped \
781 if (ret != 0 or out != self._refout) and not self._skipped \
782 and not self._debug:
782 and not self._debug:
783 f = open(self.errpath, 'wb')
783 f = open(self.errpath, 'wb')
784 for line in out:
784 for line in out:
785 f.write(line)
785 f.write(line)
786 f.close()
786 f.close()
787
787
788 # The result object handles diff calculation for us.
788 # The result object handles diff calculation for us.
789 if self._result.addOutputMismatch(self, ret, out, self._refout):
789 if self._result.addOutputMismatch(self, ret, out, self._refout):
790 # change was accepted, skip failing
790 # change was accepted, skip failing
791 return
791 return
792
792
793 if ret:
793 if ret:
794 msg = 'output changed and ' + describe(ret)
794 msg = 'output changed and ' + describe(ret)
795 else:
795 else:
796 msg = 'output changed'
796 msg = 'output changed'
797
797
798 self.fail(msg)
798 self.fail(msg)
799 elif ret:
799 elif ret:
800 self.fail(describe(ret))
800 self.fail(describe(ret))
801
801
802 def tearDown(self):
802 def tearDown(self):
803 """Tasks to perform after run()."""
803 """Tasks to perform after run()."""
804 for entry in self._daemonpids:
804 for entry in self._daemonpids:
805 killdaemons(entry)
805 killdaemons(entry)
806 self._daemonpids = []
806 self._daemonpids = []
807
807
808 if self._keeptmpdir:
808 if self._keeptmpdir:
809 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
809 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
810 (self._testtmp.decode('utf-8'),
810 (self._testtmp.decode('utf-8'),
811 self._threadtmp.decode('utf-8')))
811 self._threadtmp.decode('utf-8')))
812 else:
812 else:
813 shutil.rmtree(self._testtmp, True)
813 shutil.rmtree(self._testtmp, True)
814 shutil.rmtree(self._threadtmp, True)
814 shutil.rmtree(self._threadtmp, True)
815
815
816 if self._usechg:
816 if self._usechg:
817 # chgservers will stop automatically after they find the socket
817 # chgservers will stop automatically after they find the socket
818 # files are deleted
818 # files are deleted
819 shutil.rmtree(self._chgsockdir, True)
819 shutil.rmtree(self._chgsockdir, True)
820
820
821 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
821 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
822 and not self._debug and self._out:
822 and not self._debug and self._out:
823 f = open(self.errpath, 'wb')
823 f = open(self.errpath, 'wb')
824 for line in self._out:
824 for line in self._out:
825 f.write(line)
825 f.write(line)
826 f.close()
826 f.close()
827
827
828 vlog("# Ret was:", self._ret, '(%s)' % self.name)
828 vlog("# Ret was:", self._ret, '(%s)' % self.name)
829
829
830 def _run(self, env):
830 def _run(self, env):
831 # This should be implemented in child classes to run tests.
831 # This should be implemented in child classes to run tests.
832 raise SkipTest('unknown test type')
832 raise SkipTest('unknown test type')
833
833
834 def abort(self):
834 def abort(self):
835 """Terminate execution of this test."""
835 """Terminate execution of this test."""
836 self._aborted = True
836 self._aborted = True
837
837
838 def _portmap(self, i):
838 def _portmap(self, i):
839 offset = b'' if i == 0 else b'%d' % i
839 offset = b'' if i == 0 else b'%d' % i
840 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
840 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
841
841
842 def _getreplacements(self):
842 def _getreplacements(self):
843 """Obtain a mapping of text replacements to apply to test output.
843 """Obtain a mapping of text replacements to apply to test output.
844
844
845 Test output needs to be normalized so it can be compared to expected
845 Test output needs to be normalized so it can be compared to expected
846 output. This function defines how some of that normalization will
846 output. This function defines how some of that normalization will
847 occur.
847 occur.
848 """
848 """
849 r = [
849 r = [
850 # This list should be parallel to defineport in _getenv
850 # This list should be parallel to defineport in _getenv
851 self._portmap(0),
851 self._portmap(0),
852 self._portmap(1),
852 self._portmap(1),
853 self._portmap(2),
853 self._portmap(2),
854 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
854 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
855 br'\1 (glob)'),
855 br'\1 (glob)'),
856 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
856 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
857 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
857 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
858 ]
858 ]
859 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
859 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
860
860
861 return r
861 return r
862
862
863 def _escapepath(self, p):
863 def _escapepath(self, p):
864 if os.name == 'nt':
864 if os.name == 'nt':
865 return (
865 return (
866 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
866 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
867 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
867 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
868 for c in p))
868 for c in p))
869 )
869 )
870 else:
870 else:
871 return re.escape(p)
871 return re.escape(p)
872
872
873 def _localip(self):
873 def _localip(self):
874 if self._useipv6:
874 if self._useipv6:
875 return b'::1'
875 return b'::1'
876 else:
876 else:
877 return b'127.0.0.1'
877 return b'127.0.0.1'
878
878
879 def _getenv(self):
879 def _getenv(self):
880 """Obtain environment variables to use during test execution."""
880 """Obtain environment variables to use during test execution."""
881 def defineport(i):
881 def defineport(i):
882 offset = '' if i == 0 else '%s' % i
882 offset = '' if i == 0 else '%s' % i
883 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
883 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
884 env = os.environ.copy()
884 env = os.environ.copy()
885 if sysconfig is not None:
885 if sysconfig is not None:
886 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
886 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
887 env['HGEMITWARNINGS'] = '1'
887 env['TESTTMP'] = self._testtmp
888 env['TESTTMP'] = self._testtmp
888 env['HOME'] = self._testtmp
889 env['HOME'] = self._testtmp
889 # This number should match portneeded in _getport
890 # This number should match portneeded in _getport
890 for port in xrange(3):
891 for port in xrange(3):
891 # This list should be parallel to _portmap in _getreplacements
892 # This list should be parallel to _portmap in _getreplacements
892 defineport(port)
893 defineport(port)
893 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
894 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
894 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
895 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
895 env["HGEDITOR"] = ('"' + sys.executable + '"'
896 env["HGEDITOR"] = ('"' + sys.executable + '"'
896 + ' -c "import sys; sys.exit(0)"')
897 + ' -c "import sys; sys.exit(0)"')
897 env["HGMERGE"] = "internal:merge"
898 env["HGMERGE"] = "internal:merge"
898 env["HGUSER"] = "test"
899 env["HGUSER"] = "test"
899 env["HGENCODING"] = "ascii"
900 env["HGENCODING"] = "ascii"
900 env["HGENCODINGMODE"] = "strict"
901 env["HGENCODINGMODE"] = "strict"
901 env['HGIPV6'] = str(int(self._useipv6))
902 env['HGIPV6'] = str(int(self._useipv6))
902
903
903 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
904 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
904 # IP addresses.
905 # IP addresses.
905 env['LOCALIP'] = self._localip()
906 env['LOCALIP'] = self._localip()
906
907
907 # Reset some environment variables to well-known values so that
908 # Reset some environment variables to well-known values so that
908 # the tests produce repeatable output.
909 # the tests produce repeatable output.
909 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
910 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
910 env['TZ'] = 'GMT'
911 env['TZ'] = 'GMT'
911 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
912 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
912 env['COLUMNS'] = '80'
913 env['COLUMNS'] = '80'
913 env['TERM'] = 'xterm'
914 env['TERM'] = 'xterm'
914
915
915 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
916 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
916 'HGPLAIN HGPLAINEXCEPT EDITOR VISUAL PAGER ' +
917 'HGPLAIN HGPLAINEXCEPT EDITOR VISUAL PAGER ' +
917 'NO_PROXY CHGDEBUG').split():
918 'NO_PROXY CHGDEBUG').split():
918 if k in env:
919 if k in env:
919 del env[k]
920 del env[k]
920
921
921 # unset env related to hooks
922 # unset env related to hooks
922 for k in env.keys():
923 for k in env.keys():
923 if k.startswith('HG_'):
924 if k.startswith('HG_'):
924 del env[k]
925 del env[k]
925
926
926 if self._usechg:
927 if self._usechg:
927 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
928 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
928
929
929 return env
930 return env
930
931
931 def _createhgrc(self, path):
932 def _createhgrc(self, path):
932 """Create an hgrc file for this test."""
933 """Create an hgrc file for this test."""
933 hgrc = open(path, 'wb')
934 hgrc = open(path, 'wb')
934 hgrc.write(b'[ui]\n')
935 hgrc.write(b'[ui]\n')
935 hgrc.write(b'slash = True\n')
936 hgrc.write(b'slash = True\n')
936 hgrc.write(b'interactive = False\n')
937 hgrc.write(b'interactive = False\n')
937 hgrc.write(b'mergemarkers = detailed\n')
938 hgrc.write(b'mergemarkers = detailed\n')
938 hgrc.write(b'promptecho = True\n')
939 hgrc.write(b'promptecho = True\n')
939 hgrc.write(b'[defaults]\n')
940 hgrc.write(b'[defaults]\n')
940 hgrc.write(b'backout = -d "0 0"\n')
941 hgrc.write(b'backout = -d "0 0"\n')
941 hgrc.write(b'commit = -d "0 0"\n')
942 hgrc.write(b'commit = -d "0 0"\n')
942 hgrc.write(b'shelve = --date "0 0"\n')
943 hgrc.write(b'shelve = --date "0 0"\n')
943 hgrc.write(b'tag = -d "0 0"\n')
944 hgrc.write(b'tag = -d "0 0"\n')
944 hgrc.write(b'[devel]\n')
945 hgrc.write(b'[devel]\n')
945 hgrc.write(b'all-warnings = true\n')
946 hgrc.write(b'all-warnings = true\n')
946 hgrc.write(b'[largefiles]\n')
947 hgrc.write(b'[largefiles]\n')
947 hgrc.write(b'usercache = %s\n' %
948 hgrc.write(b'usercache = %s\n' %
948 (os.path.join(self._testtmp, b'.cache/largefiles')))
949 (os.path.join(self._testtmp, b'.cache/largefiles')))
949 hgrc.write(b'[web]\n')
950 hgrc.write(b'[web]\n')
950 hgrc.write(b'address = localhost\n')
951 hgrc.write(b'address = localhost\n')
951 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
952 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
952
953
953 for opt in self._extraconfigopts:
954 for opt in self._extraconfigopts:
954 section, key = opt.split('.', 1)
955 section, key = opt.split('.', 1)
955 assert '=' in key, ('extra config opt %s must '
956 assert '=' in key, ('extra config opt %s must '
956 'have an = for assignment' % opt)
957 'have an = for assignment' % opt)
957 hgrc.write(b'[%s]\n%s\n' % (section, key))
958 hgrc.write(b'[%s]\n%s\n' % (section, key))
958 hgrc.close()
959 hgrc.close()
959
960
960 def fail(self, msg):
961 def fail(self, msg):
961 # unittest differentiates between errored and failed.
962 # unittest differentiates between errored and failed.
962 # Failed is denoted by AssertionError (by default at least).
963 # Failed is denoted by AssertionError (by default at least).
963 raise AssertionError(msg)
964 raise AssertionError(msg)
964
965
965 def _runcommand(self, cmd, env, normalizenewlines=False):
966 def _runcommand(self, cmd, env, normalizenewlines=False):
966 """Run command in a sub-process, capturing the output (stdout and
967 """Run command in a sub-process, capturing the output (stdout and
967 stderr).
968 stderr).
968
969
969 Return a tuple (exitcode, output). output is None in debug mode.
970 Return a tuple (exitcode, output). output is None in debug mode.
970 """
971 """
971 if self._debug:
972 if self._debug:
972 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
973 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
973 env=env)
974 env=env)
974 ret = proc.wait()
975 ret = proc.wait()
975 return (ret, None)
976 return (ret, None)
976
977
977 proc = Popen4(cmd, self._testtmp, self._timeout, env)
978 proc = Popen4(cmd, self._testtmp, self._timeout, env)
978 def cleanup():
979 def cleanup():
979 terminate(proc)
980 terminate(proc)
980 ret = proc.wait()
981 ret = proc.wait()
981 if ret == 0:
982 if ret == 0:
982 ret = signal.SIGTERM << 8
983 ret = signal.SIGTERM << 8
983 killdaemons(env['DAEMON_PIDS'])
984 killdaemons(env['DAEMON_PIDS'])
984 return ret
985 return ret
985
986
986 output = ''
987 output = ''
987 proc.tochild.close()
988 proc.tochild.close()
988
989
989 try:
990 try:
990 output = proc.fromchild.read()
991 output = proc.fromchild.read()
991 except KeyboardInterrupt:
992 except KeyboardInterrupt:
992 vlog('# Handling keyboard interrupt')
993 vlog('# Handling keyboard interrupt')
993 cleanup()
994 cleanup()
994 raise
995 raise
995
996
996 ret = proc.wait()
997 ret = proc.wait()
997 if wifexited(ret):
998 if wifexited(ret):
998 ret = os.WEXITSTATUS(ret)
999 ret = os.WEXITSTATUS(ret)
999
1000
1000 if proc.timeout:
1001 if proc.timeout:
1001 ret = 'timeout'
1002 ret = 'timeout'
1002
1003
1003 if ret:
1004 if ret:
1004 killdaemons(env['DAEMON_PIDS'])
1005 killdaemons(env['DAEMON_PIDS'])
1005
1006
1006 for s, r in self._getreplacements():
1007 for s, r in self._getreplacements():
1007 output = re.sub(s, r, output)
1008 output = re.sub(s, r, output)
1008
1009
1009 if normalizenewlines:
1010 if normalizenewlines:
1010 output = output.replace('\r\n', '\n')
1011 output = output.replace('\r\n', '\n')
1011
1012
1012 return ret, output.splitlines(True)
1013 return ret, output.splitlines(True)
1013
1014
1014 class PythonTest(Test):
1015 class PythonTest(Test):
1015 """A Python-based test."""
1016 """A Python-based test."""
1016
1017
1017 @property
1018 @property
1018 def refpath(self):
1019 def refpath(self):
1019 return os.path.join(self._testdir, b'%s.out' % self.bname)
1020 return os.path.join(self._testdir, b'%s.out' % self.bname)
1020
1021
1021 def _run(self, env):
1022 def _run(self, env):
1022 py3kswitch = self._py3kwarnings and b' -3' or b''
1023 py3kswitch = self._py3kwarnings and b' -3' or b''
1023 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
1024 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
1024 vlog("# Running", cmd)
1025 vlog("# Running", cmd)
1025 normalizenewlines = os.name == 'nt'
1026 normalizenewlines = os.name == 'nt'
1026 result = self._runcommand(cmd, env,
1027 result = self._runcommand(cmd, env,
1027 normalizenewlines=normalizenewlines)
1028 normalizenewlines=normalizenewlines)
1028 if self._aborted:
1029 if self._aborted:
1029 raise KeyboardInterrupt()
1030 raise KeyboardInterrupt()
1030
1031
1031 return result
1032 return result
1032
1033
1033 # Some glob patterns apply only in some circumstances, so the script
1034 # Some glob patterns apply only in some circumstances, so the script
1034 # might want to remove (glob) annotations that otherwise should be
1035 # might want to remove (glob) annotations that otherwise should be
1035 # retained.
1036 # retained.
1036 checkcodeglobpats = [
1037 checkcodeglobpats = [
1037 # On Windows it looks like \ doesn't require a (glob), but we know
1038 # On Windows it looks like \ doesn't require a (glob), but we know
1038 # better.
1039 # better.
1039 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1040 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1040 re.compile(br'^moving \S+/.*[^)]$'),
1041 re.compile(br'^moving \S+/.*[^)]$'),
1041 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1042 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1042 # Not all platforms have 127.0.0.1 as loopback (though most do),
1043 # Not all platforms have 127.0.0.1 as loopback (though most do),
1043 # so we always glob that too.
1044 # so we always glob that too.
1044 re.compile(br'.*\$LOCALIP.*$'),
1045 re.compile(br'.*\$LOCALIP.*$'),
1045 ]
1046 ]
1046
1047
1047 bchr = chr
1048 bchr = chr
1048 if PYTHON3:
1049 if PYTHON3:
1049 bchr = lambda x: bytes([x])
1050 bchr = lambda x: bytes([x])
1050
1051
1051 class TTest(Test):
1052 class TTest(Test):
1052 """A "t test" is a test backed by a .t file."""
1053 """A "t test" is a test backed by a .t file."""
1053
1054
1054 SKIPPED_PREFIX = b'skipped: '
1055 SKIPPED_PREFIX = b'skipped: '
1055 FAILED_PREFIX = b'hghave check failed: '
1056 FAILED_PREFIX = b'hghave check failed: '
1056 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1057 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1057
1058
1058 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1059 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1059 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1060 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1060 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1061 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1061
1062
1062 @property
1063 @property
1063 def refpath(self):
1064 def refpath(self):
1064 return os.path.join(self._testdir, self.bname)
1065 return os.path.join(self._testdir, self.bname)
1065
1066
1066 def _run(self, env):
1067 def _run(self, env):
1067 f = open(self.path, 'rb')
1068 f = open(self.path, 'rb')
1068 lines = f.readlines()
1069 lines = f.readlines()
1069 f.close()
1070 f.close()
1070
1071
1071 salt, script, after, expected = self._parsetest(lines)
1072 salt, script, after, expected = self._parsetest(lines)
1072
1073
1073 # Write out the generated script.
1074 # Write out the generated script.
1074 fname = b'%s.sh' % self._testtmp
1075 fname = b'%s.sh' % self._testtmp
1075 f = open(fname, 'wb')
1076 f = open(fname, 'wb')
1076 for l in script:
1077 for l in script:
1077 f.write(l)
1078 f.write(l)
1078 f.close()
1079 f.close()
1079
1080
1080 cmd = b'%s "%s"' % (self._shell, fname)
1081 cmd = b'%s "%s"' % (self._shell, fname)
1081 vlog("# Running", cmd)
1082 vlog("# Running", cmd)
1082
1083
1083 exitcode, output = self._runcommand(cmd, env)
1084 exitcode, output = self._runcommand(cmd, env)
1084
1085
1085 if self._aborted:
1086 if self._aborted:
1086 raise KeyboardInterrupt()
1087 raise KeyboardInterrupt()
1087
1088
1088 # Do not merge output if skipped. Return hghave message instead.
1089 # Do not merge output if skipped. Return hghave message instead.
1089 # Similarly, with --debug, output is None.
1090 # Similarly, with --debug, output is None.
1090 if exitcode == self.SKIPPED_STATUS or output is None:
1091 if exitcode == self.SKIPPED_STATUS or output is None:
1091 return exitcode, output
1092 return exitcode, output
1092
1093
1093 return self._processoutput(exitcode, output, salt, after, expected)
1094 return self._processoutput(exitcode, output, salt, after, expected)
1094
1095
1095 def _hghave(self, reqs):
1096 def _hghave(self, reqs):
1096 # TODO do something smarter when all other uses of hghave are gone.
1097 # TODO do something smarter when all other uses of hghave are gone.
1097 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1098 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1098 tdir = runtestdir.replace(b'\\', b'/')
1099 tdir = runtestdir.replace(b'\\', b'/')
1099 proc = Popen4(b'%s -c "%s/hghave %s"' %
1100 proc = Popen4(b'%s -c "%s/hghave %s"' %
1100 (self._shell, tdir, b' '.join(reqs)),
1101 (self._shell, tdir, b' '.join(reqs)),
1101 self._testtmp, 0, self._getenv())
1102 self._testtmp, 0, self._getenv())
1102 stdout, stderr = proc.communicate()
1103 stdout, stderr = proc.communicate()
1103 ret = proc.wait()
1104 ret = proc.wait()
1104 if wifexited(ret):
1105 if wifexited(ret):
1105 ret = os.WEXITSTATUS(ret)
1106 ret = os.WEXITSTATUS(ret)
1106 if ret == 2:
1107 if ret == 2:
1107 print(stdout.decode('utf-8'))
1108 print(stdout.decode('utf-8'))
1108 sys.exit(1)
1109 sys.exit(1)
1109
1110
1110 if ret != 0:
1111 if ret != 0:
1111 return False, stdout
1112 return False, stdout
1112
1113
1113 if 'slow' in reqs:
1114 if 'slow' in reqs:
1114 self._timeout = self._slowtimeout
1115 self._timeout = self._slowtimeout
1115 return True, None
1116 return True, None
1116
1117
1117 def _parsetest(self, lines):
1118 def _parsetest(self, lines):
1118 # We generate a shell script which outputs unique markers to line
1119 # We generate a shell script which outputs unique markers to line
1119 # up script results with our source. These markers include input
1120 # up script results with our source. These markers include input
1120 # line number and the last return code.
1121 # line number and the last return code.
1121 salt = b"SALT%d" % time.time()
1122 salt = b"SALT%d" % time.time()
1122 def addsalt(line, inpython):
1123 def addsalt(line, inpython):
1123 if inpython:
1124 if inpython:
1124 script.append(b'%s %d 0\n' % (salt, line))
1125 script.append(b'%s %d 0\n' % (salt, line))
1125 else:
1126 else:
1126 script.append(b'echo %s %d $?\n' % (salt, line))
1127 script.append(b'echo %s %d $?\n' % (salt, line))
1127
1128
1128 script = []
1129 script = []
1129
1130
1130 # After we run the shell script, we re-unify the script output
1131 # After we run the shell script, we re-unify the script output
1131 # with non-active parts of the source, with synchronization by our
1132 # with non-active parts of the source, with synchronization by our
1132 # SALT line number markers. The after table contains the non-active
1133 # SALT line number markers. The after table contains the non-active
1133 # components, ordered by line number.
1134 # components, ordered by line number.
1134 after = {}
1135 after = {}
1135
1136
1136 # Expected shell script output.
1137 # Expected shell script output.
1137 expected = {}
1138 expected = {}
1138
1139
1139 pos = prepos = -1
1140 pos = prepos = -1
1140
1141
1141 # True or False when in a true or false conditional section
1142 # True or False when in a true or false conditional section
1142 skipping = None
1143 skipping = None
1143
1144
1144 # We keep track of whether or not we're in a Python block so we
1145 # We keep track of whether or not we're in a Python block so we
1145 # can generate the surrounding doctest magic.
1146 # can generate the surrounding doctest magic.
1146 inpython = False
1147 inpython = False
1147
1148
1148 if self._debug:
1149 if self._debug:
1149 script.append(b'set -x\n')
1150 script.append(b'set -x\n')
1150 if self._hgcommand != b'hg':
1151 if self._hgcommand != b'hg':
1151 script.append(b'alias hg="%s"\n' % self._hgcommand)
1152 script.append(b'alias hg="%s"\n' % self._hgcommand)
1152 if os.getenv('MSYSTEM'):
1153 if os.getenv('MSYSTEM'):
1153 script.append(b'alias pwd="pwd -W"\n')
1154 script.append(b'alias pwd="pwd -W"\n')
1154
1155
1155 n = 0
1156 n = 0
1156 for n, l in enumerate(lines):
1157 for n, l in enumerate(lines):
1157 if not l.endswith(b'\n'):
1158 if not l.endswith(b'\n'):
1158 l += b'\n'
1159 l += b'\n'
1159 if l.startswith(b'#require'):
1160 if l.startswith(b'#require'):
1160 lsplit = l.split()
1161 lsplit = l.split()
1161 if len(lsplit) < 2 or lsplit[0] != b'#require':
1162 if len(lsplit) < 2 or lsplit[0] != b'#require':
1162 after.setdefault(pos, []).append(' !!! invalid #require\n')
1163 after.setdefault(pos, []).append(' !!! invalid #require\n')
1163 haveresult, message = self._hghave(lsplit[1:])
1164 haveresult, message = self._hghave(lsplit[1:])
1164 if not haveresult:
1165 if not haveresult:
1165 script = [b'echo "%s"\nexit 80\n' % message]
1166 script = [b'echo "%s"\nexit 80\n' % message]
1166 break
1167 break
1167 after.setdefault(pos, []).append(l)
1168 after.setdefault(pos, []).append(l)
1168 elif l.startswith(b'#if'):
1169 elif l.startswith(b'#if'):
1169 lsplit = l.split()
1170 lsplit = l.split()
1170 if len(lsplit) < 2 or lsplit[0] != b'#if':
1171 if len(lsplit) < 2 or lsplit[0] != b'#if':
1171 after.setdefault(pos, []).append(' !!! invalid #if\n')
1172 after.setdefault(pos, []).append(' !!! invalid #if\n')
1172 if skipping is not None:
1173 if skipping is not None:
1173 after.setdefault(pos, []).append(' !!! nested #if\n')
1174 after.setdefault(pos, []).append(' !!! nested #if\n')
1174 skipping = not self._hghave(lsplit[1:])[0]
1175 skipping = not self._hghave(lsplit[1:])[0]
1175 after.setdefault(pos, []).append(l)
1176 after.setdefault(pos, []).append(l)
1176 elif l.startswith(b'#else'):
1177 elif l.startswith(b'#else'):
1177 if skipping is None:
1178 if skipping is None:
1178 after.setdefault(pos, []).append(' !!! missing #if\n')
1179 after.setdefault(pos, []).append(' !!! missing #if\n')
1179 skipping = not skipping
1180 skipping = not skipping
1180 after.setdefault(pos, []).append(l)
1181 after.setdefault(pos, []).append(l)
1181 elif l.startswith(b'#endif'):
1182 elif l.startswith(b'#endif'):
1182 if skipping is None:
1183 if skipping is None:
1183 after.setdefault(pos, []).append(' !!! missing #if\n')
1184 after.setdefault(pos, []).append(' !!! missing #if\n')
1184 skipping = None
1185 skipping = None
1185 after.setdefault(pos, []).append(l)
1186 after.setdefault(pos, []).append(l)
1186 elif skipping:
1187 elif skipping:
1187 after.setdefault(pos, []).append(l)
1188 after.setdefault(pos, []).append(l)
1188 elif l.startswith(b' >>> '): # python inlines
1189 elif l.startswith(b' >>> '): # python inlines
1189 after.setdefault(pos, []).append(l)
1190 after.setdefault(pos, []).append(l)
1190 prepos = pos
1191 prepos = pos
1191 pos = n
1192 pos = n
1192 if not inpython:
1193 if not inpython:
1193 # We've just entered a Python block. Add the header.
1194 # We've just entered a Python block. Add the header.
1194 inpython = True
1195 inpython = True
1195 addsalt(prepos, False) # Make sure we report the exit code.
1196 addsalt(prepos, False) # Make sure we report the exit code.
1196 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1197 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1197 addsalt(n, True)
1198 addsalt(n, True)
1198 script.append(l[2:])
1199 script.append(l[2:])
1199 elif l.startswith(b' ... '): # python inlines
1200 elif l.startswith(b' ... '): # python inlines
1200 after.setdefault(prepos, []).append(l)
1201 after.setdefault(prepos, []).append(l)
1201 script.append(l[2:])
1202 script.append(l[2:])
1202 elif l.startswith(b' $ '): # commands
1203 elif l.startswith(b' $ '): # commands
1203 if inpython:
1204 if inpython:
1204 script.append(b'EOF\n')
1205 script.append(b'EOF\n')
1205 inpython = False
1206 inpython = False
1206 after.setdefault(pos, []).append(l)
1207 after.setdefault(pos, []).append(l)
1207 prepos = pos
1208 prepos = pos
1208 pos = n
1209 pos = n
1209 addsalt(n, False)
1210 addsalt(n, False)
1210 cmd = l[4:].split()
1211 cmd = l[4:].split()
1211 if len(cmd) == 2 and cmd[0] == b'cd':
1212 if len(cmd) == 2 and cmd[0] == b'cd':
1212 l = b' $ cd %s || exit 1\n' % cmd[1]
1213 l = b' $ cd %s || exit 1\n' % cmd[1]
1213 script.append(l[4:])
1214 script.append(l[4:])
1214 elif l.startswith(b' > '): # continuations
1215 elif l.startswith(b' > '): # continuations
1215 after.setdefault(prepos, []).append(l)
1216 after.setdefault(prepos, []).append(l)
1216 script.append(l[4:])
1217 script.append(l[4:])
1217 elif l.startswith(b' '): # results
1218 elif l.startswith(b' '): # results
1218 # Queue up a list of expected results.
1219 # Queue up a list of expected results.
1219 expected.setdefault(pos, []).append(l[2:])
1220 expected.setdefault(pos, []).append(l[2:])
1220 else:
1221 else:
1221 if inpython:
1222 if inpython:
1222 script.append(b'EOF\n')
1223 script.append(b'EOF\n')
1223 inpython = False
1224 inpython = False
1224 # Non-command/result. Queue up for merged output.
1225 # Non-command/result. Queue up for merged output.
1225 after.setdefault(pos, []).append(l)
1226 after.setdefault(pos, []).append(l)
1226
1227
1227 if inpython:
1228 if inpython:
1228 script.append(b'EOF\n')
1229 script.append(b'EOF\n')
1229 if skipping is not None:
1230 if skipping is not None:
1230 after.setdefault(pos, []).append(' !!! missing #endif\n')
1231 after.setdefault(pos, []).append(' !!! missing #endif\n')
1231 addsalt(n + 1, False)
1232 addsalt(n + 1, False)
1232
1233
1233 return salt, script, after, expected
1234 return salt, script, after, expected
1234
1235
1235 def _processoutput(self, exitcode, output, salt, after, expected):
1236 def _processoutput(self, exitcode, output, salt, after, expected):
1236 # Merge the script output back into a unified test.
1237 # Merge the script output back into a unified test.
1237 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1238 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1238 if exitcode != 0:
1239 if exitcode != 0:
1239 warnonly = 3
1240 warnonly = 3
1240
1241
1241 pos = -1
1242 pos = -1
1242 postout = []
1243 postout = []
1243 for l in output:
1244 for l in output:
1244 lout, lcmd = l, None
1245 lout, lcmd = l, None
1245 if salt in l:
1246 if salt in l:
1246 lout, lcmd = l.split(salt, 1)
1247 lout, lcmd = l.split(salt, 1)
1247
1248
1248 while lout:
1249 while lout:
1249 if not lout.endswith(b'\n'):
1250 if not lout.endswith(b'\n'):
1250 lout += b' (no-eol)\n'
1251 lout += b' (no-eol)\n'
1251
1252
1252 # Find the expected output at the current position.
1253 # Find the expected output at the current position.
1253 els = [None]
1254 els = [None]
1254 if expected.get(pos, None):
1255 if expected.get(pos, None):
1255 els = expected[pos]
1256 els = expected[pos]
1256
1257
1257 i = 0
1258 i = 0
1258 optional = []
1259 optional = []
1259 while i < len(els):
1260 while i < len(els):
1260 el = els[i]
1261 el = els[i]
1261
1262
1262 r = TTest.linematch(el, lout)
1263 r = TTest.linematch(el, lout)
1263 if isinstance(r, str):
1264 if isinstance(r, str):
1264 if r == '+glob':
1265 if r == '+glob':
1265 lout = el[:-1] + ' (glob)\n'
1266 lout = el[:-1] + ' (glob)\n'
1266 r = '' # Warn only this line.
1267 r = '' # Warn only this line.
1267 elif r == '-glob':
1268 elif r == '-glob':
1268 lout = ''.join(el.rsplit(' (glob)', 1))
1269 lout = ''.join(el.rsplit(' (glob)', 1))
1269 r = '' # Warn only this line.
1270 r = '' # Warn only this line.
1270 elif r == "retry":
1271 elif r == "retry":
1271 postout.append(b' ' + el)
1272 postout.append(b' ' + el)
1272 els.pop(i)
1273 els.pop(i)
1273 break
1274 break
1274 else:
1275 else:
1275 log('\ninfo, unknown linematch result: %r\n' % r)
1276 log('\ninfo, unknown linematch result: %r\n' % r)
1276 r = False
1277 r = False
1277 if r:
1278 if r:
1278 els.pop(i)
1279 els.pop(i)
1279 break
1280 break
1280 if el:
1281 if el:
1281 if el.endswith(b" (?)\n"):
1282 if el.endswith(b" (?)\n"):
1282 optional.append(i)
1283 optional.append(i)
1283 else:
1284 else:
1284 m = optline.match(el)
1285 m = optline.match(el)
1285 if m:
1286 if m:
1286 conditions = [c for c in m.group(2).split(' ')]
1287 conditions = [c for c in m.group(2).split(' ')]
1287
1288
1288 if self._hghave(conditions)[0]:
1289 if self._hghave(conditions)[0]:
1289 lout = el
1290 lout = el
1290 else:
1291 else:
1291 optional.append(i)
1292 optional.append(i)
1292
1293
1293 i += 1
1294 i += 1
1294
1295
1295 if r:
1296 if r:
1296 if r == "retry":
1297 if r == "retry":
1297 continue
1298 continue
1298 # clean up any optional leftovers
1299 # clean up any optional leftovers
1299 for i in optional:
1300 for i in optional:
1300 postout.append(b' ' + els[i])
1301 postout.append(b' ' + els[i])
1301 for i in reversed(optional):
1302 for i in reversed(optional):
1302 del els[i]
1303 del els[i]
1303 postout.append(b' ' + el)
1304 postout.append(b' ' + el)
1304 else:
1305 else:
1305 if self.NEEDESCAPE(lout):
1306 if self.NEEDESCAPE(lout):
1306 lout = TTest._stringescape(b'%s (esc)\n' %
1307 lout = TTest._stringescape(b'%s (esc)\n' %
1307 lout.rstrip(b'\n'))
1308 lout.rstrip(b'\n'))
1308 postout.append(b' ' + lout) # Let diff deal with it.
1309 postout.append(b' ' + lout) # Let diff deal with it.
1309 if r != '': # If line failed.
1310 if r != '': # If line failed.
1310 warnonly = 3 # for sure not
1311 warnonly = 3 # for sure not
1311 elif warnonly == 1: # Is "not yet" and line is warn only.
1312 elif warnonly == 1: # Is "not yet" and line is warn only.
1312 warnonly = 2 # Yes do warn.
1313 warnonly = 2 # Yes do warn.
1313 break
1314 break
1314 else:
1315 else:
1315 # clean up any optional leftovers
1316 # clean up any optional leftovers
1316 while expected.get(pos, None):
1317 while expected.get(pos, None):
1317 el = expected[pos].pop(0)
1318 el = expected[pos].pop(0)
1318 if el:
1319 if el:
1319 if (not optline.match(el)
1320 if (not optline.match(el)
1320 and not el.endswith(b" (?)\n")):
1321 and not el.endswith(b" (?)\n")):
1321 break
1322 break
1322 postout.append(b' ' + el)
1323 postout.append(b' ' + el)
1323
1324
1324 if lcmd:
1325 if lcmd:
1325 # Add on last return code.
1326 # Add on last return code.
1326 ret = int(lcmd.split()[1])
1327 ret = int(lcmd.split()[1])
1327 if ret != 0:
1328 if ret != 0:
1328 postout.append(b' [%d]\n' % ret)
1329 postout.append(b' [%d]\n' % ret)
1329 if pos in after:
1330 if pos in after:
1330 # Merge in non-active test bits.
1331 # Merge in non-active test bits.
1331 postout += after.pop(pos)
1332 postout += after.pop(pos)
1332 pos = int(lcmd.split()[0])
1333 pos = int(lcmd.split()[0])
1333
1334
1334 if pos in after:
1335 if pos in after:
1335 postout += after.pop(pos)
1336 postout += after.pop(pos)
1336
1337
1337 if warnonly == 2:
1338 if warnonly == 2:
1338 exitcode = False # Set exitcode to warned.
1339 exitcode = False # Set exitcode to warned.
1339
1340
1340 return exitcode, postout
1341 return exitcode, postout
1341
1342
1342 @staticmethod
1343 @staticmethod
1343 def rematch(el, l):
1344 def rematch(el, l):
1344 try:
1345 try:
1345 # use \Z to ensure that the regex matches to the end of the string
1346 # use \Z to ensure that the regex matches to the end of the string
1346 if os.name == 'nt':
1347 if os.name == 'nt':
1347 return re.match(el + br'\r?\n\Z', l)
1348 return re.match(el + br'\r?\n\Z', l)
1348 return re.match(el + br'\n\Z', l)
1349 return re.match(el + br'\n\Z', l)
1349 except re.error:
1350 except re.error:
1350 # el is an invalid regex
1351 # el is an invalid regex
1351 return False
1352 return False
1352
1353
1353 @staticmethod
1354 @staticmethod
1354 def globmatch(el, l):
1355 def globmatch(el, l):
1355 # The only supported special characters are * and ? plus / which also
1356 # The only supported special characters are * and ? plus / which also
1356 # matches \ on windows. Escaping of these characters is supported.
1357 # matches \ on windows. Escaping of these characters is supported.
1357 if el + b'\n' == l:
1358 if el + b'\n' == l:
1358 if os.altsep:
1359 if os.altsep:
1359 # matching on "/" is not needed for this line
1360 # matching on "/" is not needed for this line
1360 for pat in checkcodeglobpats:
1361 for pat in checkcodeglobpats:
1361 if pat.match(el):
1362 if pat.match(el):
1362 return True
1363 return True
1363 return b'-glob'
1364 return b'-glob'
1364 return True
1365 return True
1365 el = el.replace(b'$LOCALIP', b'*')
1366 el = el.replace(b'$LOCALIP', b'*')
1366 i, n = 0, len(el)
1367 i, n = 0, len(el)
1367 res = b''
1368 res = b''
1368 while i < n:
1369 while i < n:
1369 c = el[i:i + 1]
1370 c = el[i:i + 1]
1370 i += 1
1371 i += 1
1371 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1372 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1372 res += el[i - 1:i + 1]
1373 res += el[i - 1:i + 1]
1373 i += 1
1374 i += 1
1374 elif c == b'*':
1375 elif c == b'*':
1375 res += b'.*'
1376 res += b'.*'
1376 elif c == b'?':
1377 elif c == b'?':
1377 res += b'.'
1378 res += b'.'
1378 elif c == b'/' and os.altsep:
1379 elif c == b'/' and os.altsep:
1379 res += b'[/\\\\]'
1380 res += b'[/\\\\]'
1380 else:
1381 else:
1381 res += re.escape(c)
1382 res += re.escape(c)
1382 return TTest.rematch(res, l)
1383 return TTest.rematch(res, l)
1383
1384
1384 @staticmethod
1385 @staticmethod
1385 def linematch(el, l):
1386 def linematch(el, l):
1386 retry = False
1387 retry = False
1387 if el == l: # perfect match (fast)
1388 if el == l: # perfect match (fast)
1388 return True
1389 return True
1389 if el:
1390 if el:
1390 if el.endswith(b" (?)\n"):
1391 if el.endswith(b" (?)\n"):
1391 retry = "retry"
1392 retry = "retry"
1392 el = el[:-5] + b"\n"
1393 el = el[:-5] + b"\n"
1393 else:
1394 else:
1394 m = optline.match(el)
1395 m = optline.match(el)
1395 if m:
1396 if m:
1396 el = m.group(1) + b"\n"
1397 el = m.group(1) + b"\n"
1397 retry = "retry"
1398 retry = "retry"
1398
1399
1399 if el.endswith(b" (esc)\n"):
1400 if el.endswith(b" (esc)\n"):
1400 if PYTHON3:
1401 if PYTHON3:
1401 el = el[:-7].decode('unicode_escape') + '\n'
1402 el = el[:-7].decode('unicode_escape') + '\n'
1402 el = el.encode('utf-8')
1403 el = el.encode('utf-8')
1403 else:
1404 else:
1404 el = el[:-7].decode('string-escape') + '\n'
1405 el = el[:-7].decode('string-escape') + '\n'
1405 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1406 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1406 return True
1407 return True
1407 if el.endswith(b" (re)\n"):
1408 if el.endswith(b" (re)\n"):
1408 return TTest.rematch(el[:-6], l) or retry
1409 return TTest.rematch(el[:-6], l) or retry
1409 if el.endswith(b" (glob)\n"):
1410 if el.endswith(b" (glob)\n"):
1410 # ignore '(glob)' added to l by 'replacements'
1411 # ignore '(glob)' added to l by 'replacements'
1411 if l.endswith(b" (glob)\n"):
1412 if l.endswith(b" (glob)\n"):
1412 l = l[:-8] + b"\n"
1413 l = l[:-8] + b"\n"
1413 return TTest.globmatch(el[:-8], l) or retry
1414 return TTest.globmatch(el[:-8], l) or retry
1414 if os.altsep and l.replace(b'\\', b'/') == el:
1415 if os.altsep and l.replace(b'\\', b'/') == el:
1415 return b'+glob'
1416 return b'+glob'
1416 return retry
1417 return retry
1417
1418
1418 @staticmethod
1419 @staticmethod
1419 def parsehghaveoutput(lines):
1420 def parsehghaveoutput(lines):
1420 '''Parse hghave log lines.
1421 '''Parse hghave log lines.
1421
1422
1422 Return tuple of lists (missing, failed):
1423 Return tuple of lists (missing, failed):
1423 * the missing/unknown features
1424 * the missing/unknown features
1424 * the features for which existence check failed'''
1425 * the features for which existence check failed'''
1425 missing = []
1426 missing = []
1426 failed = []
1427 failed = []
1427 for line in lines:
1428 for line in lines:
1428 if line.startswith(TTest.SKIPPED_PREFIX):
1429 if line.startswith(TTest.SKIPPED_PREFIX):
1429 line = line.splitlines()[0]
1430 line = line.splitlines()[0]
1430 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1431 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1431 elif line.startswith(TTest.FAILED_PREFIX):
1432 elif line.startswith(TTest.FAILED_PREFIX):
1432 line = line.splitlines()[0]
1433 line = line.splitlines()[0]
1433 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1434 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1434
1435
1435 return missing, failed
1436 return missing, failed
1436
1437
1437 @staticmethod
1438 @staticmethod
1438 def _escapef(m):
1439 def _escapef(m):
1439 return TTest.ESCAPEMAP[m.group(0)]
1440 return TTest.ESCAPEMAP[m.group(0)]
1440
1441
1441 @staticmethod
1442 @staticmethod
1442 def _stringescape(s):
1443 def _stringescape(s):
1443 return TTest.ESCAPESUB(TTest._escapef, s)
1444 return TTest.ESCAPESUB(TTest._escapef, s)
1444
1445
1445 iolock = threading.RLock()
1446 iolock = threading.RLock()
1446
1447
1447 class SkipTest(Exception):
1448 class SkipTest(Exception):
1448 """Raised to indicate that a test is to be skipped."""
1449 """Raised to indicate that a test is to be skipped."""
1449
1450
1450 class IgnoreTest(Exception):
1451 class IgnoreTest(Exception):
1451 """Raised to indicate that a test is to be ignored."""
1452 """Raised to indicate that a test is to be ignored."""
1452
1453
1453 class WarnTest(Exception):
1454 class WarnTest(Exception):
1454 """Raised to indicate that a test warned."""
1455 """Raised to indicate that a test warned."""
1455
1456
1456 class ReportedTest(Exception):
1457 class ReportedTest(Exception):
1457 """Raised to indicate that a test already reported."""
1458 """Raised to indicate that a test already reported."""
1458
1459
1459 class TestResult(unittest._TextTestResult):
1460 class TestResult(unittest._TextTestResult):
1460 """Holds results when executing via unittest."""
1461 """Holds results when executing via unittest."""
1461 # Don't worry too much about accessing the non-public _TextTestResult.
1462 # Don't worry too much about accessing the non-public _TextTestResult.
1462 # It is relatively common in Python testing tools.
1463 # It is relatively common in Python testing tools.
1463 def __init__(self, options, *args, **kwargs):
1464 def __init__(self, options, *args, **kwargs):
1464 super(TestResult, self).__init__(*args, **kwargs)
1465 super(TestResult, self).__init__(*args, **kwargs)
1465
1466
1466 self._options = options
1467 self._options = options
1467
1468
1468 # unittest.TestResult didn't have skipped until 2.7. We need to
1469 # unittest.TestResult didn't have skipped until 2.7. We need to
1469 # polyfill it.
1470 # polyfill it.
1470 self.skipped = []
1471 self.skipped = []
1471
1472
1472 # We have a custom "ignored" result that isn't present in any Python
1473 # We have a custom "ignored" result that isn't present in any Python
1473 # unittest implementation. It is very similar to skipped. It may make
1474 # unittest implementation. It is very similar to skipped. It may make
1474 # sense to map it into skip some day.
1475 # sense to map it into skip some day.
1475 self.ignored = []
1476 self.ignored = []
1476
1477
1477 # We have a custom "warned" result that isn't present in any Python
1478 # We have a custom "warned" result that isn't present in any Python
1478 # unittest implementation. It is very similar to failed. It may make
1479 # unittest implementation. It is very similar to failed. It may make
1479 # sense to map it into fail some day.
1480 # sense to map it into fail some day.
1480 self.warned = []
1481 self.warned = []
1481
1482
1482 self.times = []
1483 self.times = []
1483 self._firststarttime = None
1484 self._firststarttime = None
1484 # Data stored for the benefit of generating xunit reports.
1485 # Data stored for the benefit of generating xunit reports.
1485 self.successes = []
1486 self.successes = []
1486 self.faildata = {}
1487 self.faildata = {}
1487
1488
1488 def addFailure(self, test, reason):
1489 def addFailure(self, test, reason):
1489 self.failures.append((test, reason))
1490 self.failures.append((test, reason))
1490
1491
1491 if self._options.first:
1492 if self._options.first:
1492 self.stop()
1493 self.stop()
1493 else:
1494 else:
1494 with iolock:
1495 with iolock:
1495 if reason == "timed out":
1496 if reason == "timed out":
1496 self.stream.write('t')
1497 self.stream.write('t')
1497 else:
1498 else:
1498 if not self._options.nodiff:
1499 if not self._options.nodiff:
1499 self.stream.write('\nERROR: %s output changed\n' % test)
1500 self.stream.write('\nERROR: %s output changed\n' % test)
1500 self.stream.write('!')
1501 self.stream.write('!')
1501
1502
1502 self.stream.flush()
1503 self.stream.flush()
1503
1504
1504 def addSuccess(self, test):
1505 def addSuccess(self, test):
1505 with iolock:
1506 with iolock:
1506 super(TestResult, self).addSuccess(test)
1507 super(TestResult, self).addSuccess(test)
1507 self.successes.append(test)
1508 self.successes.append(test)
1508
1509
1509 def addError(self, test, err):
1510 def addError(self, test, err):
1510 super(TestResult, self).addError(test, err)
1511 super(TestResult, self).addError(test, err)
1511 if self._options.first:
1512 if self._options.first:
1512 self.stop()
1513 self.stop()
1513
1514
1514 # Polyfill.
1515 # Polyfill.
1515 def addSkip(self, test, reason):
1516 def addSkip(self, test, reason):
1516 self.skipped.append((test, reason))
1517 self.skipped.append((test, reason))
1517 with iolock:
1518 with iolock:
1518 if self.showAll:
1519 if self.showAll:
1519 self.stream.writeln('skipped %s' % reason)
1520 self.stream.writeln('skipped %s' % reason)
1520 else:
1521 else:
1521 self.stream.write('s')
1522 self.stream.write('s')
1522 self.stream.flush()
1523 self.stream.flush()
1523
1524
1524 def addIgnore(self, test, reason):
1525 def addIgnore(self, test, reason):
1525 self.ignored.append((test, reason))
1526 self.ignored.append((test, reason))
1526 with iolock:
1527 with iolock:
1527 if self.showAll:
1528 if self.showAll:
1528 self.stream.writeln('ignored %s' % reason)
1529 self.stream.writeln('ignored %s' % reason)
1529 else:
1530 else:
1530 if reason not in ('not retesting', "doesn't match keyword"):
1531 if reason not in ('not retesting', "doesn't match keyword"):
1531 self.stream.write('i')
1532 self.stream.write('i')
1532 else:
1533 else:
1533 self.testsRun += 1
1534 self.testsRun += 1
1534 self.stream.flush()
1535 self.stream.flush()
1535
1536
1536 def addWarn(self, test, reason):
1537 def addWarn(self, test, reason):
1537 self.warned.append((test, reason))
1538 self.warned.append((test, reason))
1538
1539
1539 if self._options.first:
1540 if self._options.first:
1540 self.stop()
1541 self.stop()
1541
1542
1542 with iolock:
1543 with iolock:
1543 if self.showAll:
1544 if self.showAll:
1544 self.stream.writeln('warned %s' % reason)
1545 self.stream.writeln('warned %s' % reason)
1545 else:
1546 else:
1546 self.stream.write('~')
1547 self.stream.write('~')
1547 self.stream.flush()
1548 self.stream.flush()
1548
1549
1549 def addOutputMismatch(self, test, ret, got, expected):
1550 def addOutputMismatch(self, test, ret, got, expected):
1550 """Record a mismatch in test output for a particular test."""
1551 """Record a mismatch in test output for a particular test."""
1551 if self.shouldStop:
1552 if self.shouldStop:
1552 # don't print, some other test case already failed and
1553 # don't print, some other test case already failed and
1553 # printed, we're just stale and probably failed due to our
1554 # printed, we're just stale and probably failed due to our
1554 # temp dir getting cleaned up.
1555 # temp dir getting cleaned up.
1555 return
1556 return
1556
1557
1557 accepted = False
1558 accepted = False
1558 lines = []
1559 lines = []
1559
1560
1560 with iolock:
1561 with iolock:
1561 if self._options.nodiff:
1562 if self._options.nodiff:
1562 pass
1563 pass
1563 elif self._options.view:
1564 elif self._options.view:
1564 v = self._options.view
1565 v = self._options.view
1565 if PYTHON3:
1566 if PYTHON3:
1566 v = _bytespath(v)
1567 v = _bytespath(v)
1567 os.system(b"%s %s %s" %
1568 os.system(b"%s %s %s" %
1568 (v, test.refpath, test.errpath))
1569 (v, test.refpath, test.errpath))
1569 else:
1570 else:
1570 servefail, lines = getdiff(expected, got,
1571 servefail, lines = getdiff(expected, got,
1571 test.refpath, test.errpath)
1572 test.refpath, test.errpath)
1572 if servefail:
1573 if servefail:
1573 self.addFailure(
1574 self.addFailure(
1574 test,
1575 test,
1575 'server failed to start (HGPORT=%s)' % test._startport)
1576 'server failed to start (HGPORT=%s)' % test._startport)
1576 raise ReportedTest('server failed to start')
1577 raise ReportedTest('server failed to start')
1577 else:
1578 else:
1578 self.stream.write('\n')
1579 self.stream.write('\n')
1579 for line in lines:
1580 for line in lines:
1580 if PYTHON3:
1581 if PYTHON3:
1581 self.stream.flush()
1582 self.stream.flush()
1582 self.stream.buffer.write(line)
1583 self.stream.buffer.write(line)
1583 self.stream.buffer.flush()
1584 self.stream.buffer.flush()
1584 else:
1585 else:
1585 self.stream.write(line)
1586 self.stream.write(line)
1586 self.stream.flush()
1587 self.stream.flush()
1587
1588
1588 # handle interactive prompt without releasing iolock
1589 # handle interactive prompt without releasing iolock
1589 if self._options.interactive:
1590 if self._options.interactive:
1590 self.stream.write('Accept this change? [n] ')
1591 self.stream.write('Accept this change? [n] ')
1591 answer = sys.stdin.readline().strip()
1592 answer = sys.stdin.readline().strip()
1592 if answer.lower() in ('y', 'yes'):
1593 if answer.lower() in ('y', 'yes'):
1593 if test.name.endswith('.t'):
1594 if test.name.endswith('.t'):
1594 rename(test.errpath, test.path)
1595 rename(test.errpath, test.path)
1595 else:
1596 else:
1596 rename(test.errpath, '%s.out' % test.path)
1597 rename(test.errpath, '%s.out' % test.path)
1597 accepted = True
1598 accepted = True
1598 if not accepted:
1599 if not accepted:
1599 self.faildata[test.name] = b''.join(lines)
1600 self.faildata[test.name] = b''.join(lines)
1600
1601
1601 return accepted
1602 return accepted
1602
1603
1603 def startTest(self, test):
1604 def startTest(self, test):
1604 super(TestResult, self).startTest(test)
1605 super(TestResult, self).startTest(test)
1605
1606
1606 # os.times module computes the user time and system time spent by
1607 # os.times module computes the user time and system time spent by
1607 # child's processes along with real elapsed time taken by a process.
1608 # child's processes along with real elapsed time taken by a process.
1608 # This module has one limitation. It can only work for Linux user
1609 # This module has one limitation. It can only work for Linux user
1609 # and not for Windows.
1610 # and not for Windows.
1610 test.started = os.times()
1611 test.started = os.times()
1611 if self._firststarttime is None: # thread racy but irrelevant
1612 if self._firststarttime is None: # thread racy but irrelevant
1612 self._firststarttime = test.started[4]
1613 self._firststarttime = test.started[4]
1613
1614
1614 def stopTest(self, test, interrupted=False):
1615 def stopTest(self, test, interrupted=False):
1615 super(TestResult, self).stopTest(test)
1616 super(TestResult, self).stopTest(test)
1616
1617
1617 test.stopped = os.times()
1618 test.stopped = os.times()
1618
1619
1619 starttime = test.started
1620 starttime = test.started
1620 endtime = test.stopped
1621 endtime = test.stopped
1621 origin = self._firststarttime
1622 origin = self._firststarttime
1622 self.times.append((test.name,
1623 self.times.append((test.name,
1623 endtime[2] - starttime[2], # user space CPU time
1624 endtime[2] - starttime[2], # user space CPU time
1624 endtime[3] - starttime[3], # sys space CPU time
1625 endtime[3] - starttime[3], # sys space CPU time
1625 endtime[4] - starttime[4], # real time
1626 endtime[4] - starttime[4], # real time
1626 starttime[4] - origin, # start date in run context
1627 starttime[4] - origin, # start date in run context
1627 endtime[4] - origin, # end date in run context
1628 endtime[4] - origin, # end date in run context
1628 ))
1629 ))
1629
1630
1630 if interrupted:
1631 if interrupted:
1631 with iolock:
1632 with iolock:
1632 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1633 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1633 test.name, self.times[-1][3]))
1634 test.name, self.times[-1][3]))
1634
1635
1635 class TestSuite(unittest.TestSuite):
1636 class TestSuite(unittest.TestSuite):
1636 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1637 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1637
1638
1638 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1639 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1639 retest=False, keywords=None, loop=False, runs_per_test=1,
1640 retest=False, keywords=None, loop=False, runs_per_test=1,
1640 loadtest=None, showchannels=False,
1641 loadtest=None, showchannels=False,
1641 *args, **kwargs):
1642 *args, **kwargs):
1642 """Create a new instance that can run tests with a configuration.
1643 """Create a new instance that can run tests with a configuration.
1643
1644
1644 testdir specifies the directory where tests are executed from. This
1645 testdir specifies the directory where tests are executed from. This
1645 is typically the ``tests`` directory from Mercurial's source
1646 is typically the ``tests`` directory from Mercurial's source
1646 repository.
1647 repository.
1647
1648
1648 jobs specifies the number of jobs to run concurrently. Each test
1649 jobs specifies the number of jobs to run concurrently. Each test
1649 executes on its own thread. Tests actually spawn new processes, so
1650 executes on its own thread. Tests actually spawn new processes, so
1650 state mutation should not be an issue.
1651 state mutation should not be an issue.
1651
1652
1652 If there is only one job, it will use the main thread.
1653 If there is only one job, it will use the main thread.
1653
1654
1654 whitelist and blacklist denote tests that have been whitelisted and
1655 whitelist and blacklist denote tests that have been whitelisted and
1655 blacklisted, respectively. These arguments don't belong in TestSuite.
1656 blacklisted, respectively. These arguments don't belong in TestSuite.
1656 Instead, whitelist and blacklist should be handled by the thing that
1657 Instead, whitelist and blacklist should be handled by the thing that
1657 populates the TestSuite with tests. They are present to preserve
1658 populates the TestSuite with tests. They are present to preserve
1658 backwards compatible behavior which reports skipped tests as part
1659 backwards compatible behavior which reports skipped tests as part
1659 of the results.
1660 of the results.
1660
1661
1661 retest denotes whether to retest failed tests. This arguably belongs
1662 retest denotes whether to retest failed tests. This arguably belongs
1662 outside of TestSuite.
1663 outside of TestSuite.
1663
1664
1664 keywords denotes key words that will be used to filter which tests
1665 keywords denotes key words that will be used to filter which tests
1665 to execute. This arguably belongs outside of TestSuite.
1666 to execute. This arguably belongs outside of TestSuite.
1666
1667
1667 loop denotes whether to loop over tests forever.
1668 loop denotes whether to loop over tests forever.
1668 """
1669 """
1669 super(TestSuite, self).__init__(*args, **kwargs)
1670 super(TestSuite, self).__init__(*args, **kwargs)
1670
1671
1671 self._jobs = jobs
1672 self._jobs = jobs
1672 self._whitelist = whitelist
1673 self._whitelist = whitelist
1673 self._blacklist = blacklist
1674 self._blacklist = blacklist
1674 self._retest = retest
1675 self._retest = retest
1675 self._keywords = keywords
1676 self._keywords = keywords
1676 self._loop = loop
1677 self._loop = loop
1677 self._runs_per_test = runs_per_test
1678 self._runs_per_test = runs_per_test
1678 self._loadtest = loadtest
1679 self._loadtest = loadtest
1679 self._showchannels = showchannels
1680 self._showchannels = showchannels
1680
1681
1681 def run(self, result):
1682 def run(self, result):
1682 # We have a number of filters that need to be applied. We do this
1683 # We have a number of filters that need to be applied. We do this
1683 # here instead of inside Test because it makes the running logic for
1684 # here instead of inside Test because it makes the running logic for
1684 # Test simpler.
1685 # Test simpler.
1685 tests = []
1686 tests = []
1686 num_tests = [0]
1687 num_tests = [0]
1687 for test in self._tests:
1688 for test in self._tests:
1688 def get():
1689 def get():
1689 num_tests[0] += 1
1690 num_tests[0] += 1
1690 if getattr(test, 'should_reload', False):
1691 if getattr(test, 'should_reload', False):
1691 return self._loadtest(test.path, num_tests[0])
1692 return self._loadtest(test.path, num_tests[0])
1692 return test
1693 return test
1693 if not os.path.exists(test.path):
1694 if not os.path.exists(test.path):
1694 result.addSkip(test, "Doesn't exist")
1695 result.addSkip(test, "Doesn't exist")
1695 continue
1696 continue
1696
1697
1697 if not (self._whitelist and test.name in self._whitelist):
1698 if not (self._whitelist and test.name in self._whitelist):
1698 if self._blacklist and test.bname in self._blacklist:
1699 if self._blacklist and test.bname in self._blacklist:
1699 result.addSkip(test, 'blacklisted')
1700 result.addSkip(test, 'blacklisted')
1700 continue
1701 continue
1701
1702
1702 if self._retest and not os.path.exists(test.errpath):
1703 if self._retest and not os.path.exists(test.errpath):
1703 result.addIgnore(test, 'not retesting')
1704 result.addIgnore(test, 'not retesting')
1704 continue
1705 continue
1705
1706
1706 if self._keywords:
1707 if self._keywords:
1707 f = open(test.path, 'rb')
1708 f = open(test.path, 'rb')
1708 t = f.read().lower() + test.bname.lower()
1709 t = f.read().lower() + test.bname.lower()
1709 f.close()
1710 f.close()
1710 ignored = False
1711 ignored = False
1711 for k in self._keywords.lower().split():
1712 for k in self._keywords.lower().split():
1712 if k not in t:
1713 if k not in t:
1713 result.addIgnore(test, "doesn't match keyword")
1714 result.addIgnore(test, "doesn't match keyword")
1714 ignored = True
1715 ignored = True
1715 break
1716 break
1716
1717
1717 if ignored:
1718 if ignored:
1718 continue
1719 continue
1719 for _ in xrange(self._runs_per_test):
1720 for _ in xrange(self._runs_per_test):
1720 tests.append(get())
1721 tests.append(get())
1721
1722
1722 runtests = list(tests)
1723 runtests = list(tests)
1723 done = queue.Queue()
1724 done = queue.Queue()
1724 running = 0
1725 running = 0
1725
1726
1726 channels = [""] * self._jobs
1727 channels = [""] * self._jobs
1727
1728
1728 def job(test, result):
1729 def job(test, result):
1729 for n, v in enumerate(channels):
1730 for n, v in enumerate(channels):
1730 if not v:
1731 if not v:
1731 channel = n
1732 channel = n
1732 break
1733 break
1733 channels[channel] = "=" + test.name[5:].split(".")[0]
1734 channels[channel] = "=" + test.name[5:].split(".")[0]
1734 try:
1735 try:
1735 test(result)
1736 test(result)
1736 done.put(None)
1737 done.put(None)
1737 except KeyboardInterrupt:
1738 except KeyboardInterrupt:
1738 pass
1739 pass
1739 except: # re-raises
1740 except: # re-raises
1740 done.put(('!', test, 'run-test raised an error, see traceback'))
1741 done.put(('!', test, 'run-test raised an error, see traceback'))
1741 raise
1742 raise
1742 try:
1743 try:
1743 channels[channel] = ''
1744 channels[channel] = ''
1744 except IndexError:
1745 except IndexError:
1745 pass
1746 pass
1746
1747
1747 def stat():
1748 def stat():
1748 count = 0
1749 count = 0
1749 while channels:
1750 while channels:
1750 d = '\n%03s ' % count
1751 d = '\n%03s ' % count
1751 for n, v in enumerate(channels):
1752 for n, v in enumerate(channels):
1752 if v:
1753 if v:
1753 d += v[0]
1754 d += v[0]
1754 channels[n] = v[1:] or '.'
1755 channels[n] = v[1:] or '.'
1755 else:
1756 else:
1756 d += ' '
1757 d += ' '
1757 d += ' '
1758 d += ' '
1758 with iolock:
1759 with iolock:
1759 sys.stdout.write(d + ' ')
1760 sys.stdout.write(d + ' ')
1760 sys.stdout.flush()
1761 sys.stdout.flush()
1761 for x in xrange(10):
1762 for x in xrange(10):
1762 if channels:
1763 if channels:
1763 time.sleep(.1)
1764 time.sleep(.1)
1764 count += 1
1765 count += 1
1765
1766
1766 stoppedearly = False
1767 stoppedearly = False
1767
1768
1768 if self._showchannels:
1769 if self._showchannels:
1769 statthread = threading.Thread(target=stat, name="stat")
1770 statthread = threading.Thread(target=stat, name="stat")
1770 statthread.start()
1771 statthread.start()
1771
1772
1772 try:
1773 try:
1773 while tests or running:
1774 while tests or running:
1774 if not done.empty() or running == self._jobs or not tests:
1775 if not done.empty() or running == self._jobs or not tests:
1775 try:
1776 try:
1776 done.get(True, 1)
1777 done.get(True, 1)
1777 running -= 1
1778 running -= 1
1778 if result and result.shouldStop:
1779 if result and result.shouldStop:
1779 stoppedearly = True
1780 stoppedearly = True
1780 break
1781 break
1781 except queue.Empty:
1782 except queue.Empty:
1782 continue
1783 continue
1783 if tests and not running == self._jobs:
1784 if tests and not running == self._jobs:
1784 test = tests.pop(0)
1785 test = tests.pop(0)
1785 if self._loop:
1786 if self._loop:
1786 if getattr(test, 'should_reload', False):
1787 if getattr(test, 'should_reload', False):
1787 num_tests[0] += 1
1788 num_tests[0] += 1
1788 tests.append(
1789 tests.append(
1789 self._loadtest(test.name, num_tests[0]))
1790 self._loadtest(test.name, num_tests[0]))
1790 else:
1791 else:
1791 tests.append(test)
1792 tests.append(test)
1792 if self._jobs == 1:
1793 if self._jobs == 1:
1793 job(test, result)
1794 job(test, result)
1794 else:
1795 else:
1795 t = threading.Thread(target=job, name=test.name,
1796 t = threading.Thread(target=job, name=test.name,
1796 args=(test, result))
1797 args=(test, result))
1797 t.start()
1798 t.start()
1798 running += 1
1799 running += 1
1799
1800
1800 # If we stop early we still need to wait on started tests to
1801 # If we stop early we still need to wait on started tests to
1801 # finish. Otherwise, there is a race between the test completing
1802 # finish. Otherwise, there is a race between the test completing
1802 # and the test's cleanup code running. This could result in the
1803 # and the test's cleanup code running. This could result in the
1803 # test reporting incorrect.
1804 # test reporting incorrect.
1804 if stoppedearly:
1805 if stoppedearly:
1805 while running:
1806 while running:
1806 try:
1807 try:
1807 done.get(True, 1)
1808 done.get(True, 1)
1808 running -= 1
1809 running -= 1
1809 except queue.Empty:
1810 except queue.Empty:
1810 continue
1811 continue
1811 except KeyboardInterrupt:
1812 except KeyboardInterrupt:
1812 for test in runtests:
1813 for test in runtests:
1813 test.abort()
1814 test.abort()
1814
1815
1815 channels = []
1816 channels = []
1816
1817
1817 return result
1818 return result
1818
1819
1819 # Save the most recent 5 wall-clock runtimes of each test to a
1820 # Save the most recent 5 wall-clock runtimes of each test to a
1820 # human-readable text file named .testtimes. Tests are sorted
1821 # human-readable text file named .testtimes. Tests are sorted
1821 # alphabetically, while times for each test are listed from oldest to
1822 # alphabetically, while times for each test are listed from oldest to
1822 # newest.
1823 # newest.
1823
1824
1824 def loadtimes(testdir):
1825 def loadtimes(testdir):
1825 times = []
1826 times = []
1826 try:
1827 try:
1827 with open(os.path.join(testdir, b'.testtimes-')) as fp:
1828 with open(os.path.join(testdir, b'.testtimes-')) as fp:
1828 for line in fp:
1829 for line in fp:
1829 ts = line.split()
1830 ts = line.split()
1830 times.append((ts[0], [float(t) for t in ts[1:]]))
1831 times.append((ts[0], [float(t) for t in ts[1:]]))
1831 except IOError as err:
1832 except IOError as err:
1832 if err.errno != errno.ENOENT:
1833 if err.errno != errno.ENOENT:
1833 raise
1834 raise
1834 return times
1835 return times
1835
1836
1836 def savetimes(testdir, result):
1837 def savetimes(testdir, result):
1837 saved = dict(loadtimes(testdir))
1838 saved = dict(loadtimes(testdir))
1838 maxruns = 5
1839 maxruns = 5
1839 skipped = set([str(t[0]) for t in result.skipped])
1840 skipped = set([str(t[0]) for t in result.skipped])
1840 for tdata in result.times:
1841 for tdata in result.times:
1841 test, real = tdata[0], tdata[3]
1842 test, real = tdata[0], tdata[3]
1842 if test not in skipped:
1843 if test not in skipped:
1843 ts = saved.setdefault(test, [])
1844 ts = saved.setdefault(test, [])
1844 ts.append(real)
1845 ts.append(real)
1845 ts[:] = ts[-maxruns:]
1846 ts[:] = ts[-maxruns:]
1846
1847
1847 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
1848 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
1848 dir=testdir, text=True)
1849 dir=testdir, text=True)
1849 with os.fdopen(fd, 'w') as fp:
1850 with os.fdopen(fd, 'w') as fp:
1850 for name, ts in sorted(saved.items()):
1851 for name, ts in sorted(saved.items()):
1851 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
1852 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
1852 timepath = os.path.join(testdir, b'.testtimes')
1853 timepath = os.path.join(testdir, b'.testtimes')
1853 try:
1854 try:
1854 os.unlink(timepath)
1855 os.unlink(timepath)
1855 except OSError:
1856 except OSError:
1856 pass
1857 pass
1857 try:
1858 try:
1858 os.rename(tmpname, timepath)
1859 os.rename(tmpname, timepath)
1859 except OSError:
1860 except OSError:
1860 pass
1861 pass
1861
1862
1862 class TextTestRunner(unittest.TextTestRunner):
1863 class TextTestRunner(unittest.TextTestRunner):
1863 """Custom unittest test runner that uses appropriate settings."""
1864 """Custom unittest test runner that uses appropriate settings."""
1864
1865
1865 def __init__(self, runner, *args, **kwargs):
1866 def __init__(self, runner, *args, **kwargs):
1866 super(TextTestRunner, self).__init__(*args, **kwargs)
1867 super(TextTestRunner, self).__init__(*args, **kwargs)
1867
1868
1868 self._runner = runner
1869 self._runner = runner
1869
1870
1870 def run(self, test):
1871 def run(self, test):
1871 result = TestResult(self._runner.options, self.stream,
1872 result = TestResult(self._runner.options, self.stream,
1872 self.descriptions, self.verbosity)
1873 self.descriptions, self.verbosity)
1873
1874
1874 test(result)
1875 test(result)
1875
1876
1876 failed = len(result.failures)
1877 failed = len(result.failures)
1877 warned = len(result.warned)
1878 warned = len(result.warned)
1878 skipped = len(result.skipped)
1879 skipped = len(result.skipped)
1879 ignored = len(result.ignored)
1880 ignored = len(result.ignored)
1880
1881
1881 with iolock:
1882 with iolock:
1882 self.stream.writeln('')
1883 self.stream.writeln('')
1883
1884
1884 if not self._runner.options.noskips:
1885 if not self._runner.options.noskips:
1885 for test, msg in result.skipped:
1886 for test, msg in result.skipped:
1886 self.stream.writeln('Skipped %s: %s' % (test.name, msg))
1887 self.stream.writeln('Skipped %s: %s' % (test.name, msg))
1887 for test, msg in result.warned:
1888 for test, msg in result.warned:
1888 self.stream.writeln('Warned %s: %s' % (test.name, msg))
1889 self.stream.writeln('Warned %s: %s' % (test.name, msg))
1889 for test, msg in result.failures:
1890 for test, msg in result.failures:
1890 self.stream.writeln('Failed %s: %s' % (test.name, msg))
1891 self.stream.writeln('Failed %s: %s' % (test.name, msg))
1891 for test, msg in result.errors:
1892 for test, msg in result.errors:
1892 self.stream.writeln('Errored %s: %s' % (test.name, msg))
1893 self.stream.writeln('Errored %s: %s' % (test.name, msg))
1893
1894
1894 if self._runner.options.xunit:
1895 if self._runner.options.xunit:
1895 with open(self._runner.options.xunit, 'wb') as xuf:
1896 with open(self._runner.options.xunit, 'wb') as xuf:
1896 timesd = dict((t[0], t[3]) for t in result.times)
1897 timesd = dict((t[0], t[3]) for t in result.times)
1897 doc = minidom.Document()
1898 doc = minidom.Document()
1898 s = doc.createElement('testsuite')
1899 s = doc.createElement('testsuite')
1899 s.setAttribute('name', 'run-tests')
1900 s.setAttribute('name', 'run-tests')
1900 s.setAttribute('tests', str(result.testsRun))
1901 s.setAttribute('tests', str(result.testsRun))
1901 s.setAttribute('errors', "0") # TODO
1902 s.setAttribute('errors', "0") # TODO
1902 s.setAttribute('failures', str(failed))
1903 s.setAttribute('failures', str(failed))
1903 s.setAttribute('skipped', str(skipped + ignored))
1904 s.setAttribute('skipped', str(skipped + ignored))
1904 doc.appendChild(s)
1905 doc.appendChild(s)
1905 for tc in result.successes:
1906 for tc in result.successes:
1906 t = doc.createElement('testcase')
1907 t = doc.createElement('testcase')
1907 t.setAttribute('name', tc.name)
1908 t.setAttribute('name', tc.name)
1908 t.setAttribute('time', '%.3f' % timesd[tc.name])
1909 t.setAttribute('time', '%.3f' % timesd[tc.name])
1909 s.appendChild(t)
1910 s.appendChild(t)
1910 for tc, err in sorted(result.faildata.items()):
1911 for tc, err in sorted(result.faildata.items()):
1911 t = doc.createElement('testcase')
1912 t = doc.createElement('testcase')
1912 t.setAttribute('name', tc)
1913 t.setAttribute('name', tc)
1913 t.setAttribute('time', '%.3f' % timesd[tc])
1914 t.setAttribute('time', '%.3f' % timesd[tc])
1914 # createCDATASection expects a unicode or it will
1915 # createCDATASection expects a unicode or it will
1915 # convert using default conversion rules, which will
1916 # convert using default conversion rules, which will
1916 # fail if string isn't ASCII.
1917 # fail if string isn't ASCII.
1917 err = cdatasafe(err).decode('utf-8', 'replace')
1918 err = cdatasafe(err).decode('utf-8', 'replace')
1918 cd = doc.createCDATASection(err)
1919 cd = doc.createCDATASection(err)
1919 t.appendChild(cd)
1920 t.appendChild(cd)
1920 s.appendChild(t)
1921 s.appendChild(t)
1921 xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
1922 xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
1922
1923
1923 if self._runner.options.json:
1924 if self._runner.options.json:
1924 jsonpath = os.path.join(self._runner._testdir, b'report.json')
1925 jsonpath = os.path.join(self._runner._testdir, b'report.json')
1925 with open(jsonpath, 'w') as fp:
1926 with open(jsonpath, 'w') as fp:
1926 timesd = {}
1927 timesd = {}
1927 for tdata in result.times:
1928 for tdata in result.times:
1928 test = tdata[0]
1929 test = tdata[0]
1929 timesd[test] = tdata[1:]
1930 timesd[test] = tdata[1:]
1930
1931
1931 outcome = {}
1932 outcome = {}
1932 groups = [('success', ((tc, None)
1933 groups = [('success', ((tc, None)
1933 for tc in result.successes)),
1934 for tc in result.successes)),
1934 ('failure', result.failures),
1935 ('failure', result.failures),
1935 ('skip', result.skipped)]
1936 ('skip', result.skipped)]
1936 for res, testcases in groups:
1937 for res, testcases in groups:
1937 for tc, __ in testcases:
1938 for tc, __ in testcases:
1938 if tc.name in timesd:
1939 if tc.name in timesd:
1939 diff = result.faildata.get(tc.name, b'')
1940 diff = result.faildata.get(tc.name, b'')
1940 tres = {'result': res,
1941 tres = {'result': res,
1941 'time': ('%0.3f' % timesd[tc.name][2]),
1942 'time': ('%0.3f' % timesd[tc.name][2]),
1942 'cuser': ('%0.3f' % timesd[tc.name][0]),
1943 'cuser': ('%0.3f' % timesd[tc.name][0]),
1943 'csys': ('%0.3f' % timesd[tc.name][1]),
1944 'csys': ('%0.3f' % timesd[tc.name][1]),
1944 'start': ('%0.3f' % timesd[tc.name][3]),
1945 'start': ('%0.3f' % timesd[tc.name][3]),
1945 'end': ('%0.3f' % timesd[tc.name][4]),
1946 'end': ('%0.3f' % timesd[tc.name][4]),
1946 'diff': diff.decode('unicode_escape'),
1947 'diff': diff.decode('unicode_escape'),
1947 }
1948 }
1948 else:
1949 else:
1949 # blacklisted test
1950 # blacklisted test
1950 tres = {'result': res}
1951 tres = {'result': res}
1951
1952
1952 outcome[tc.name] = tres
1953 outcome[tc.name] = tres
1953 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
1954 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
1954 separators=(',', ': '))
1955 separators=(',', ': '))
1955 fp.writelines(("testreport =", jsonout))
1956 fp.writelines(("testreport =", jsonout))
1956
1957
1957 self._runner._checkhglib('Tested')
1958 self._runner._checkhglib('Tested')
1958
1959
1959 savetimes(self._runner._testdir, result)
1960 savetimes(self._runner._testdir, result)
1960
1961
1961 if failed and self._runner.options.known_good_rev:
1962 if failed and self._runner.options.known_good_rev:
1962 def nooutput(args):
1963 def nooutput(args):
1963 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
1964 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
1964 stdout=subprocess.PIPE)
1965 stdout=subprocess.PIPE)
1965 p.stdout.read()
1966 p.stdout.read()
1966 p.wait()
1967 p.wait()
1967 for test, msg in result.failures:
1968 for test, msg in result.failures:
1968 nooutput(['hg', 'bisect', '--reset']),
1969 nooutput(['hg', 'bisect', '--reset']),
1969 nooutput(['hg', 'bisect', '--bad', '.'])
1970 nooutput(['hg', 'bisect', '--bad', '.'])
1970 nooutput(['hg', 'bisect', '--good',
1971 nooutput(['hg', 'bisect', '--good',
1971 self._runner.options.known_good_rev])
1972 self._runner.options.known_good_rev])
1972 # TODO: we probably need to forward some options
1973 # TODO: we probably need to forward some options
1973 # that alter hg's behavior inside the tests.
1974 # that alter hg's behavior inside the tests.
1974 rtc = '%s %s %s' % (sys.executable, sys.argv[0], test)
1975 rtc = '%s %s %s' % (sys.executable, sys.argv[0], test)
1975 sub = subprocess.Popen(['hg', 'bisect', '--command', rtc],
1976 sub = subprocess.Popen(['hg', 'bisect', '--command', rtc],
1976 stderr=subprocess.STDOUT,
1977 stderr=subprocess.STDOUT,
1977 stdout=subprocess.PIPE)
1978 stdout=subprocess.PIPE)
1978 data = sub.stdout.read()
1979 data = sub.stdout.read()
1979 sub.wait()
1980 sub.wait()
1980 m = re.search(
1981 m = re.search(
1981 (r'\nThe first (?P<goodbad>bad|good) revision '
1982 (r'\nThe first (?P<goodbad>bad|good) revision '
1982 r'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
1983 r'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
1983 r'summary: +(?P<summary>[^\n]+)\n'),
1984 r'summary: +(?P<summary>[^\n]+)\n'),
1984 data, (re.MULTILINE | re.DOTALL))
1985 data, (re.MULTILINE | re.DOTALL))
1985 if m is None:
1986 if m is None:
1986 self.stream.writeln(
1987 self.stream.writeln(
1987 'Failed to identify failure point for %s' % test)
1988 'Failed to identify failure point for %s' % test)
1988 continue
1989 continue
1989 dat = m.groupdict()
1990 dat = m.groupdict()
1990 verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed'
1991 verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed'
1991 self.stream.writeln(
1992 self.stream.writeln(
1992 '%s %s by %s (%s)' % (
1993 '%s %s by %s (%s)' % (
1993 test, verb, dat['node'], dat['summary']))
1994 test, verb, dat['node'], dat['summary']))
1994 self.stream.writeln(
1995 self.stream.writeln(
1995 '# Ran %d tests, %d skipped, %d warned, %d failed.'
1996 '# Ran %d tests, %d skipped, %d warned, %d failed.'
1996 % (result.testsRun,
1997 % (result.testsRun,
1997 skipped + ignored, warned, failed))
1998 skipped + ignored, warned, failed))
1998 if failed:
1999 if failed:
1999 self.stream.writeln('python hash seed: %s' %
2000 self.stream.writeln('python hash seed: %s' %
2000 os.environ['PYTHONHASHSEED'])
2001 os.environ['PYTHONHASHSEED'])
2001 if self._runner.options.time:
2002 if self._runner.options.time:
2002 self.printtimes(result.times)
2003 self.printtimes(result.times)
2003
2004
2004 return result
2005 return result
2005
2006
2006 def printtimes(self, times):
2007 def printtimes(self, times):
2007 # iolock held by run
2008 # iolock held by run
2008 self.stream.writeln('# Producing time report')
2009 self.stream.writeln('# Producing time report')
2009 times.sort(key=lambda t: (t[3]))
2010 times.sort(key=lambda t: (t[3]))
2010 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2011 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2011 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2012 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2012 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2013 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2013 for tdata in times:
2014 for tdata in times:
2014 test = tdata[0]
2015 test = tdata[0]
2015 cuser, csys, real, start, end = tdata[1:6]
2016 cuser, csys, real, start, end = tdata[1:6]
2016 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2017 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2017
2018
2018 class TestRunner(object):
2019 class TestRunner(object):
2019 """Holds context for executing tests.
2020 """Holds context for executing tests.
2020
2021
2021 Tests rely on a lot of state. This object holds it for them.
2022 Tests rely on a lot of state. This object holds it for them.
2022 """
2023 """
2023
2024
2024 # Programs required to run tests.
2025 # Programs required to run tests.
2025 REQUIREDTOOLS = [
2026 REQUIREDTOOLS = [
2026 os.path.basename(_bytespath(sys.executable)),
2027 os.path.basename(_bytespath(sys.executable)),
2027 b'diff',
2028 b'diff',
2028 b'grep',
2029 b'grep',
2029 b'unzip',
2030 b'unzip',
2030 b'gunzip',
2031 b'gunzip',
2031 b'bunzip2',
2032 b'bunzip2',
2032 b'sed',
2033 b'sed',
2033 ]
2034 ]
2034
2035
2035 # Maps file extensions to test class.
2036 # Maps file extensions to test class.
2036 TESTTYPES = [
2037 TESTTYPES = [
2037 (b'.py', PythonTest),
2038 (b'.py', PythonTest),
2038 (b'.t', TTest),
2039 (b'.t', TTest),
2039 ]
2040 ]
2040
2041
2041 def __init__(self):
2042 def __init__(self):
2042 self.options = None
2043 self.options = None
2043 self._hgroot = None
2044 self._hgroot = None
2044 self._testdir = None
2045 self._testdir = None
2045 self._hgtmp = None
2046 self._hgtmp = None
2046 self._installdir = None
2047 self._installdir = None
2047 self._bindir = None
2048 self._bindir = None
2048 self._tmpbinddir = None
2049 self._tmpbinddir = None
2049 self._pythondir = None
2050 self._pythondir = None
2050 self._coveragefile = None
2051 self._coveragefile = None
2051 self._createdfiles = []
2052 self._createdfiles = []
2052 self._hgcommand = None
2053 self._hgcommand = None
2053 self._hgpath = None
2054 self._hgpath = None
2054 self._portoffset = 0
2055 self._portoffset = 0
2055 self._ports = {}
2056 self._ports = {}
2056
2057
2057 def run(self, args, parser=None):
2058 def run(self, args, parser=None):
2058 """Run the test suite."""
2059 """Run the test suite."""
2059 oldmask = os.umask(0o22)
2060 oldmask = os.umask(0o22)
2060 try:
2061 try:
2061 parser = parser or getparser()
2062 parser = parser or getparser()
2062 options, args = parseargs(args, parser)
2063 options, args = parseargs(args, parser)
2063 # positional arguments are paths to test files to run, so
2064 # positional arguments are paths to test files to run, so
2064 # we make sure they're all bytestrings
2065 # we make sure they're all bytestrings
2065 args = [_bytespath(a) for a in args]
2066 args = [_bytespath(a) for a in args]
2066 self.options = options
2067 self.options = options
2067
2068
2068 self._checktools()
2069 self._checktools()
2069 tests = self.findtests(args)
2070 tests = self.findtests(args)
2070 if options.profile_runner:
2071 if options.profile_runner:
2071 import statprof
2072 import statprof
2072 statprof.start()
2073 statprof.start()
2073 result = self._run(tests)
2074 result = self._run(tests)
2074 if options.profile_runner:
2075 if options.profile_runner:
2075 statprof.stop()
2076 statprof.stop()
2076 statprof.display()
2077 statprof.display()
2077 return result
2078 return result
2078
2079
2079 finally:
2080 finally:
2080 os.umask(oldmask)
2081 os.umask(oldmask)
2081
2082
2082 def _run(self, tests):
2083 def _run(self, tests):
2083 if self.options.random:
2084 if self.options.random:
2084 random.shuffle(tests)
2085 random.shuffle(tests)
2085 else:
2086 else:
2086 # keywords for slow tests
2087 # keywords for slow tests
2087 slow = {b'svn': 10,
2088 slow = {b'svn': 10,
2088 b'cvs': 10,
2089 b'cvs': 10,
2089 b'hghave': 10,
2090 b'hghave': 10,
2090 b'largefiles-update': 10,
2091 b'largefiles-update': 10,
2091 b'run-tests': 10,
2092 b'run-tests': 10,
2092 b'corruption': 10,
2093 b'corruption': 10,
2093 b'race': 10,
2094 b'race': 10,
2094 b'i18n': 10,
2095 b'i18n': 10,
2095 b'check': 100,
2096 b'check': 100,
2096 b'gendoc': 100,
2097 b'gendoc': 100,
2097 b'contrib-perf': 200,
2098 b'contrib-perf': 200,
2098 }
2099 }
2099 perf = {}
2100 perf = {}
2100 def sortkey(f):
2101 def sortkey(f):
2101 # run largest tests first, as they tend to take the longest
2102 # run largest tests first, as they tend to take the longest
2102 try:
2103 try:
2103 return perf[f]
2104 return perf[f]
2104 except KeyError:
2105 except KeyError:
2105 try:
2106 try:
2106 val = -os.stat(f).st_size
2107 val = -os.stat(f).st_size
2107 except OSError as e:
2108 except OSError as e:
2108 if e.errno != errno.ENOENT:
2109 if e.errno != errno.ENOENT:
2109 raise
2110 raise
2110 perf[f] = -1e9 # file does not exist, tell early
2111 perf[f] = -1e9 # file does not exist, tell early
2111 return -1e9
2112 return -1e9
2112 for kw, mul in slow.items():
2113 for kw, mul in slow.items():
2113 if kw in f:
2114 if kw in f:
2114 val *= mul
2115 val *= mul
2115 if f.endswith(b'.py'):
2116 if f.endswith(b'.py'):
2116 val /= 10.0
2117 val /= 10.0
2117 perf[f] = val / 1000.0
2118 perf[f] = val / 1000.0
2118 return perf[f]
2119 return perf[f]
2119 tests.sort(key=sortkey)
2120 tests.sort(key=sortkey)
2120
2121
2121 self._testdir = osenvironb[b'TESTDIR'] = getattr(
2122 self._testdir = osenvironb[b'TESTDIR'] = getattr(
2122 os, 'getcwdb', os.getcwd)()
2123 os, 'getcwdb', os.getcwd)()
2123
2124
2124 if 'PYTHONHASHSEED' not in os.environ:
2125 if 'PYTHONHASHSEED' not in os.environ:
2125 # use a random python hash seed all the time
2126 # use a random python hash seed all the time
2126 # we do the randomness ourself to know what seed is used
2127 # we do the randomness ourself to know what seed is used
2127 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2128 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2128
2129
2129 if self.options.tmpdir:
2130 if self.options.tmpdir:
2130 self.options.keep_tmpdir = True
2131 self.options.keep_tmpdir = True
2131 tmpdir = _bytespath(self.options.tmpdir)
2132 tmpdir = _bytespath(self.options.tmpdir)
2132 if os.path.exists(tmpdir):
2133 if os.path.exists(tmpdir):
2133 # Meaning of tmpdir has changed since 1.3: we used to create
2134 # Meaning of tmpdir has changed since 1.3: we used to create
2134 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2135 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2135 # tmpdir already exists.
2136 # tmpdir already exists.
2136 print("error: temp dir %r already exists" % tmpdir)
2137 print("error: temp dir %r already exists" % tmpdir)
2137 return 1
2138 return 1
2138
2139
2139 # Automatically removing tmpdir sounds convenient, but could
2140 # Automatically removing tmpdir sounds convenient, but could
2140 # really annoy anyone in the habit of using "--tmpdir=/tmp"
2141 # really annoy anyone in the habit of using "--tmpdir=/tmp"
2141 # or "--tmpdir=$HOME".
2142 # or "--tmpdir=$HOME".
2142 #vlog("# Removing temp dir", tmpdir)
2143 #vlog("# Removing temp dir", tmpdir)
2143 #shutil.rmtree(tmpdir)
2144 #shutil.rmtree(tmpdir)
2144 os.makedirs(tmpdir)
2145 os.makedirs(tmpdir)
2145 else:
2146 else:
2146 d = None
2147 d = None
2147 if os.name == 'nt':
2148 if os.name == 'nt':
2148 # without this, we get the default temp dir location, but
2149 # without this, we get the default temp dir location, but
2149 # in all lowercase, which causes troubles with paths (issue3490)
2150 # in all lowercase, which causes troubles with paths (issue3490)
2150 d = osenvironb.get(b'TMP', None)
2151 d = osenvironb.get(b'TMP', None)
2151 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2152 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2152
2153
2153 self._hgtmp = osenvironb[b'HGTMP'] = (
2154 self._hgtmp = osenvironb[b'HGTMP'] = (
2154 os.path.realpath(tmpdir))
2155 os.path.realpath(tmpdir))
2155
2156
2156 if self.options.with_hg:
2157 if self.options.with_hg:
2157 self._installdir = None
2158 self._installdir = None
2158 whg = self.options.with_hg
2159 whg = self.options.with_hg
2159 self._bindir = os.path.dirname(os.path.realpath(whg))
2160 self._bindir = os.path.dirname(os.path.realpath(whg))
2160 assert isinstance(self._bindir, bytes)
2161 assert isinstance(self._bindir, bytes)
2161 self._hgcommand = os.path.basename(whg)
2162 self._hgcommand = os.path.basename(whg)
2162 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2163 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2163 os.makedirs(self._tmpbindir)
2164 os.makedirs(self._tmpbindir)
2164
2165
2165 # This looks redundant with how Python initializes sys.path from
2166 # This looks redundant with how Python initializes sys.path from
2166 # the location of the script being executed. Needed because the
2167 # the location of the script being executed. Needed because the
2167 # "hg" specified by --with-hg is not the only Python script
2168 # "hg" specified by --with-hg is not the only Python script
2168 # executed in the test suite that needs to import 'mercurial'
2169 # executed in the test suite that needs to import 'mercurial'
2169 # ... which means it's not really redundant at all.
2170 # ... which means it's not really redundant at all.
2170 self._pythondir = self._bindir
2171 self._pythondir = self._bindir
2171 else:
2172 else:
2172 self._installdir = os.path.join(self._hgtmp, b"install")
2173 self._installdir = os.path.join(self._hgtmp, b"install")
2173 self._bindir = os.path.join(self._installdir, b"bin")
2174 self._bindir = os.path.join(self._installdir, b"bin")
2174 self._hgcommand = b'hg'
2175 self._hgcommand = b'hg'
2175 self._tmpbindir = self._bindir
2176 self._tmpbindir = self._bindir
2176 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2177 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2177
2178
2178 # set CHGHG, then replace "hg" command by "chg"
2179 # set CHGHG, then replace "hg" command by "chg"
2179 chgbindir = self._bindir
2180 chgbindir = self._bindir
2180 if self.options.chg or self.options.with_chg:
2181 if self.options.chg or self.options.with_chg:
2181 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2182 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2182 else:
2183 else:
2183 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2184 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2184 if self.options.chg:
2185 if self.options.chg:
2185 self._hgcommand = b'chg'
2186 self._hgcommand = b'chg'
2186 elif self.options.with_chg:
2187 elif self.options.with_chg:
2187 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2188 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2188 self._hgcommand = os.path.basename(self.options.with_chg)
2189 self._hgcommand = os.path.basename(self.options.with_chg)
2189
2190
2190 osenvironb[b"BINDIR"] = self._bindir
2191 osenvironb[b"BINDIR"] = self._bindir
2191 osenvironb[b"PYTHON"] = PYTHON
2192 osenvironb[b"PYTHON"] = PYTHON
2192
2193
2193 if self.options.with_python3:
2194 if self.options.with_python3:
2194 osenvironb[b'PYTHON3'] = self.options.with_python3
2195 osenvironb[b'PYTHON3'] = self.options.with_python3
2195
2196
2196 fileb = _bytespath(__file__)
2197 fileb = _bytespath(__file__)
2197 runtestdir = os.path.abspath(os.path.dirname(fileb))
2198 runtestdir = os.path.abspath(os.path.dirname(fileb))
2198 osenvironb[b'RUNTESTDIR'] = runtestdir
2199 osenvironb[b'RUNTESTDIR'] = runtestdir
2199 if PYTHON3:
2200 if PYTHON3:
2200 sepb = _bytespath(os.pathsep)
2201 sepb = _bytespath(os.pathsep)
2201 else:
2202 else:
2202 sepb = os.pathsep
2203 sepb = os.pathsep
2203 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2204 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2204 if os.path.islink(__file__):
2205 if os.path.islink(__file__):
2205 # test helper will likely be at the end of the symlink
2206 # test helper will likely be at the end of the symlink
2206 realfile = os.path.realpath(fileb)
2207 realfile = os.path.realpath(fileb)
2207 realdir = os.path.abspath(os.path.dirname(realfile))
2208 realdir = os.path.abspath(os.path.dirname(realfile))
2208 path.insert(2, realdir)
2209 path.insert(2, realdir)
2209 if chgbindir != self._bindir:
2210 if chgbindir != self._bindir:
2210 path.insert(1, chgbindir)
2211 path.insert(1, chgbindir)
2211 if self._testdir != runtestdir:
2212 if self._testdir != runtestdir:
2212 path = [self._testdir] + path
2213 path = [self._testdir] + path
2213 if self._tmpbindir != self._bindir:
2214 if self._tmpbindir != self._bindir:
2214 path = [self._tmpbindir] + path
2215 path = [self._tmpbindir] + path
2215 osenvironb[b"PATH"] = sepb.join(path)
2216 osenvironb[b"PATH"] = sepb.join(path)
2216
2217
2217 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2218 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2218 # can run .../tests/run-tests.py test-foo where test-foo
2219 # can run .../tests/run-tests.py test-foo where test-foo
2219 # adds an extension to HGRC. Also include run-test.py directory to
2220 # adds an extension to HGRC. Also include run-test.py directory to
2220 # import modules like heredoctest.
2221 # import modules like heredoctest.
2221 pypath = [self._pythondir, self._testdir, runtestdir]
2222 pypath = [self._pythondir, self._testdir, runtestdir]
2222 # We have to augment PYTHONPATH, rather than simply replacing
2223 # We have to augment PYTHONPATH, rather than simply replacing
2223 # it, in case external libraries are only available via current
2224 # it, in case external libraries are only available via current
2224 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2225 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2225 # are in /opt/subversion.)
2226 # are in /opt/subversion.)
2226 oldpypath = osenvironb.get(IMPL_PATH)
2227 oldpypath = osenvironb.get(IMPL_PATH)
2227 if oldpypath:
2228 if oldpypath:
2228 pypath.append(oldpypath)
2229 pypath.append(oldpypath)
2229 osenvironb[IMPL_PATH] = sepb.join(pypath)
2230 osenvironb[IMPL_PATH] = sepb.join(pypath)
2230
2231
2231 if self.options.pure:
2232 if self.options.pure:
2232 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2233 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2233 os.environ["HGMODULEPOLICY"] = "py"
2234 os.environ["HGMODULEPOLICY"] = "py"
2234
2235
2235 if self.options.allow_slow_tests:
2236 if self.options.allow_slow_tests:
2236 os.environ["HGTEST_SLOW"] = "slow"
2237 os.environ["HGTEST_SLOW"] = "slow"
2237 elif 'HGTEST_SLOW' in os.environ:
2238 elif 'HGTEST_SLOW' in os.environ:
2238 del os.environ['HGTEST_SLOW']
2239 del os.environ['HGTEST_SLOW']
2239
2240
2240 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2241 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2241
2242
2242 vlog("# Using TESTDIR", self._testdir)
2243 vlog("# Using TESTDIR", self._testdir)
2243 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2244 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2244 vlog("# Using HGTMP", self._hgtmp)
2245 vlog("# Using HGTMP", self._hgtmp)
2245 vlog("# Using PATH", os.environ["PATH"])
2246 vlog("# Using PATH", os.environ["PATH"])
2246 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2247 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2247
2248
2248 try:
2249 try:
2249 return self._runtests(tests) or 0
2250 return self._runtests(tests) or 0
2250 finally:
2251 finally:
2251 time.sleep(.1)
2252 time.sleep(.1)
2252 self._cleanup()
2253 self._cleanup()
2253
2254
2254 def findtests(self, args):
2255 def findtests(self, args):
2255 """Finds possible test files from arguments.
2256 """Finds possible test files from arguments.
2256
2257
2257 If you wish to inject custom tests into the test harness, this would
2258 If you wish to inject custom tests into the test harness, this would
2258 be a good function to monkeypatch or override in a derived class.
2259 be a good function to monkeypatch or override in a derived class.
2259 """
2260 """
2260 if not args:
2261 if not args:
2261 if self.options.changed:
2262 if self.options.changed:
2262 proc = Popen4('hg st --rev "%s" -man0 .' %
2263 proc = Popen4('hg st --rev "%s" -man0 .' %
2263 self.options.changed, None, 0)
2264 self.options.changed, None, 0)
2264 stdout, stderr = proc.communicate()
2265 stdout, stderr = proc.communicate()
2265 args = stdout.strip(b'\0').split(b'\0')
2266 args = stdout.strip(b'\0').split(b'\0')
2266 else:
2267 else:
2267 args = os.listdir(b'.')
2268 args = os.listdir(b'.')
2268
2269
2269 return [t for t in args
2270 return [t for t in args
2270 if os.path.basename(t).startswith(b'test-')
2271 if os.path.basename(t).startswith(b'test-')
2271 and (t.endswith(b'.py') or t.endswith(b'.t'))]
2272 and (t.endswith(b'.py') or t.endswith(b'.t'))]
2272
2273
2273 def _runtests(self, tests):
2274 def _runtests(self, tests):
2274 try:
2275 try:
2275 if self._installdir:
2276 if self._installdir:
2276 self._installhg()
2277 self._installhg()
2277 self._checkhglib("Testing")
2278 self._checkhglib("Testing")
2278 else:
2279 else:
2279 self._usecorrectpython()
2280 self._usecorrectpython()
2280 if self.options.chg:
2281 if self.options.chg:
2281 assert self._installdir
2282 assert self._installdir
2282 self._installchg()
2283 self._installchg()
2283
2284
2284 if self.options.restart:
2285 if self.options.restart:
2285 orig = list(tests)
2286 orig = list(tests)
2286 while tests:
2287 while tests:
2287 if os.path.exists(tests[0] + ".err"):
2288 if os.path.exists(tests[0] + ".err"):
2288 break
2289 break
2289 tests.pop(0)
2290 tests.pop(0)
2290 if not tests:
2291 if not tests:
2291 print("running all tests")
2292 print("running all tests")
2292 tests = orig
2293 tests = orig
2293
2294
2294 tests = [self._gettest(t, i) for i, t in enumerate(tests)]
2295 tests = [self._gettest(t, i) for i, t in enumerate(tests)]
2295
2296
2296 failed = False
2297 failed = False
2297 warned = False
2298 warned = False
2298 kws = self.options.keywords
2299 kws = self.options.keywords
2299 if kws is not None and PYTHON3:
2300 if kws is not None and PYTHON3:
2300 kws = kws.encode('utf-8')
2301 kws = kws.encode('utf-8')
2301
2302
2302 suite = TestSuite(self._testdir,
2303 suite = TestSuite(self._testdir,
2303 jobs=self.options.jobs,
2304 jobs=self.options.jobs,
2304 whitelist=self.options.whitelisted,
2305 whitelist=self.options.whitelisted,
2305 blacklist=self.options.blacklist,
2306 blacklist=self.options.blacklist,
2306 retest=self.options.retest,
2307 retest=self.options.retest,
2307 keywords=kws,
2308 keywords=kws,
2308 loop=self.options.loop,
2309 loop=self.options.loop,
2309 runs_per_test=self.options.runs_per_test,
2310 runs_per_test=self.options.runs_per_test,
2310 showchannels=self.options.showchannels,
2311 showchannels=self.options.showchannels,
2311 tests=tests, loadtest=self._gettest)
2312 tests=tests, loadtest=self._gettest)
2312 verbosity = 1
2313 verbosity = 1
2313 if self.options.verbose:
2314 if self.options.verbose:
2314 verbosity = 2
2315 verbosity = 2
2315 runner = TextTestRunner(self, verbosity=verbosity)
2316 runner = TextTestRunner(self, verbosity=verbosity)
2316 result = runner.run(suite)
2317 result = runner.run(suite)
2317
2318
2318 if result.failures:
2319 if result.failures:
2319 failed = True
2320 failed = True
2320 if result.warned:
2321 if result.warned:
2321 warned = True
2322 warned = True
2322
2323
2323 if self.options.anycoverage:
2324 if self.options.anycoverage:
2324 self._outputcoverage()
2325 self._outputcoverage()
2325 except KeyboardInterrupt:
2326 except KeyboardInterrupt:
2326 failed = True
2327 failed = True
2327 print("\ninterrupted!")
2328 print("\ninterrupted!")
2328
2329
2329 if failed:
2330 if failed:
2330 return 1
2331 return 1
2331 if warned:
2332 if warned:
2332 return 80
2333 return 80
2333
2334
2334 def _getport(self, count):
2335 def _getport(self, count):
2335 port = self._ports.get(count) # do we have a cached entry?
2336 port = self._ports.get(count) # do we have a cached entry?
2336 if port is None:
2337 if port is None:
2337 portneeded = 3
2338 portneeded = 3
2338 # above 100 tries we just give up and let test reports failure
2339 # above 100 tries we just give up and let test reports failure
2339 for tries in xrange(100):
2340 for tries in xrange(100):
2340 allfree = True
2341 allfree = True
2341 port = self.options.port + self._portoffset
2342 port = self.options.port + self._portoffset
2342 for idx in xrange(portneeded):
2343 for idx in xrange(portneeded):
2343 if not checkportisavailable(port + idx):
2344 if not checkportisavailable(port + idx):
2344 allfree = False
2345 allfree = False
2345 break
2346 break
2346 self._portoffset += portneeded
2347 self._portoffset += portneeded
2347 if allfree:
2348 if allfree:
2348 break
2349 break
2349 self._ports[count] = port
2350 self._ports[count] = port
2350 return port
2351 return port
2351
2352
2352 def _gettest(self, test, count):
2353 def _gettest(self, test, count):
2353 """Obtain a Test by looking at its filename.
2354 """Obtain a Test by looking at its filename.
2354
2355
2355 Returns a Test instance. The Test may not be runnable if it doesn't
2356 Returns a Test instance. The Test may not be runnable if it doesn't
2356 map to a known type.
2357 map to a known type.
2357 """
2358 """
2358 lctest = test.lower()
2359 lctest = test.lower()
2359 testcls = Test
2360 testcls = Test
2360
2361
2361 for ext, cls in self.TESTTYPES:
2362 for ext, cls in self.TESTTYPES:
2362 if lctest.endswith(ext):
2363 if lctest.endswith(ext):
2363 testcls = cls
2364 testcls = cls
2364 break
2365 break
2365
2366
2366 refpath = os.path.join(self._testdir, test)
2367 refpath = os.path.join(self._testdir, test)
2367 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2368 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2368
2369
2369 t = testcls(refpath, tmpdir,
2370 t = testcls(refpath, tmpdir,
2370 keeptmpdir=self.options.keep_tmpdir,
2371 keeptmpdir=self.options.keep_tmpdir,
2371 debug=self.options.debug,
2372 debug=self.options.debug,
2372 timeout=self.options.timeout,
2373 timeout=self.options.timeout,
2373 startport=self._getport(count),
2374 startport=self._getport(count),
2374 extraconfigopts=self.options.extra_config_opt,
2375 extraconfigopts=self.options.extra_config_opt,
2375 py3kwarnings=self.options.py3k_warnings,
2376 py3kwarnings=self.options.py3k_warnings,
2376 shell=self.options.shell,
2377 shell=self.options.shell,
2377 hgcommand=self._hgcommand,
2378 hgcommand=self._hgcommand,
2378 usechg=bool(self.options.with_chg or self.options.chg),
2379 usechg=bool(self.options.with_chg or self.options.chg),
2379 useipv6=useipv6)
2380 useipv6=useipv6)
2380 t.should_reload = True
2381 t.should_reload = True
2381 return t
2382 return t
2382
2383
2383 def _cleanup(self):
2384 def _cleanup(self):
2384 """Clean up state from this test invocation."""
2385 """Clean up state from this test invocation."""
2385 if self.options.keep_tmpdir:
2386 if self.options.keep_tmpdir:
2386 return
2387 return
2387
2388
2388 vlog("# Cleaning up HGTMP", self._hgtmp)
2389 vlog("# Cleaning up HGTMP", self._hgtmp)
2389 shutil.rmtree(self._hgtmp, True)
2390 shutil.rmtree(self._hgtmp, True)
2390 for f in self._createdfiles:
2391 for f in self._createdfiles:
2391 try:
2392 try:
2392 os.remove(f)
2393 os.remove(f)
2393 except OSError:
2394 except OSError:
2394 pass
2395 pass
2395
2396
2396 def _usecorrectpython(self):
2397 def _usecorrectpython(self):
2397 """Configure the environment to use the appropriate Python in tests."""
2398 """Configure the environment to use the appropriate Python in tests."""
2398 # Tests must use the same interpreter as us or bad things will happen.
2399 # Tests must use the same interpreter as us or bad things will happen.
2399 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2400 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2400 if getattr(os, 'symlink', None):
2401 if getattr(os, 'symlink', None):
2401 vlog("# Making python executable in test path a symlink to '%s'" %
2402 vlog("# Making python executable in test path a symlink to '%s'" %
2402 sys.executable)
2403 sys.executable)
2403 mypython = os.path.join(self._tmpbindir, pyexename)
2404 mypython = os.path.join(self._tmpbindir, pyexename)
2404 try:
2405 try:
2405 if os.readlink(mypython) == sys.executable:
2406 if os.readlink(mypython) == sys.executable:
2406 return
2407 return
2407 os.unlink(mypython)
2408 os.unlink(mypython)
2408 except OSError as err:
2409 except OSError as err:
2409 if err.errno != errno.ENOENT:
2410 if err.errno != errno.ENOENT:
2410 raise
2411 raise
2411 if self._findprogram(pyexename) != sys.executable:
2412 if self._findprogram(pyexename) != sys.executable:
2412 try:
2413 try:
2413 os.symlink(sys.executable, mypython)
2414 os.symlink(sys.executable, mypython)
2414 self._createdfiles.append(mypython)
2415 self._createdfiles.append(mypython)
2415 except OSError as err:
2416 except OSError as err:
2416 # child processes may race, which is harmless
2417 # child processes may race, which is harmless
2417 if err.errno != errno.EEXIST:
2418 if err.errno != errno.EEXIST:
2418 raise
2419 raise
2419 else:
2420 else:
2420 exedir, exename = os.path.split(sys.executable)
2421 exedir, exename = os.path.split(sys.executable)
2421 vlog("# Modifying search path to find %s as %s in '%s'" %
2422 vlog("# Modifying search path to find %s as %s in '%s'" %
2422 (exename, pyexename, exedir))
2423 (exename, pyexename, exedir))
2423 path = os.environ['PATH'].split(os.pathsep)
2424 path = os.environ['PATH'].split(os.pathsep)
2424 while exedir in path:
2425 while exedir in path:
2425 path.remove(exedir)
2426 path.remove(exedir)
2426 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2427 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2427 if not self._findprogram(pyexename):
2428 if not self._findprogram(pyexename):
2428 print("WARNING: Cannot find %s in search path" % pyexename)
2429 print("WARNING: Cannot find %s in search path" % pyexename)
2429
2430
2430 def _installhg(self):
2431 def _installhg(self):
2431 """Install hg into the test environment.
2432 """Install hg into the test environment.
2432
2433
2433 This will also configure hg with the appropriate testing settings.
2434 This will also configure hg with the appropriate testing settings.
2434 """
2435 """
2435 vlog("# Performing temporary installation of HG")
2436 vlog("# Performing temporary installation of HG")
2436 installerrs = os.path.join(self._hgtmp, b"install.err")
2437 installerrs = os.path.join(self._hgtmp, b"install.err")
2437 compiler = ''
2438 compiler = ''
2438 if self.options.compiler:
2439 if self.options.compiler:
2439 compiler = '--compiler ' + self.options.compiler
2440 compiler = '--compiler ' + self.options.compiler
2440 if self.options.pure:
2441 if self.options.pure:
2441 pure = b"--pure"
2442 pure = b"--pure"
2442 else:
2443 else:
2443 pure = b""
2444 pure = b""
2444
2445
2445 # Run installer in hg root
2446 # Run installer in hg root
2446 script = os.path.realpath(sys.argv[0])
2447 script = os.path.realpath(sys.argv[0])
2447 exe = sys.executable
2448 exe = sys.executable
2448 if PYTHON3:
2449 if PYTHON3:
2449 compiler = _bytespath(compiler)
2450 compiler = _bytespath(compiler)
2450 script = _bytespath(script)
2451 script = _bytespath(script)
2451 exe = _bytespath(exe)
2452 exe = _bytespath(exe)
2452 hgroot = os.path.dirname(os.path.dirname(script))
2453 hgroot = os.path.dirname(os.path.dirname(script))
2453 self._hgroot = hgroot
2454 self._hgroot = hgroot
2454 os.chdir(hgroot)
2455 os.chdir(hgroot)
2455 nohome = b'--home=""'
2456 nohome = b'--home=""'
2456 if os.name == 'nt':
2457 if os.name == 'nt':
2457 # The --home="" trick works only on OS where os.sep == '/'
2458 # The --home="" trick works only on OS where os.sep == '/'
2458 # because of a distutils convert_path() fast-path. Avoid it at
2459 # because of a distutils convert_path() fast-path. Avoid it at
2459 # least on Windows for now, deal with .pydistutils.cfg bugs
2460 # least on Windows for now, deal with .pydistutils.cfg bugs
2460 # when they happen.
2461 # when they happen.
2461 nohome = b''
2462 nohome = b''
2462 cmd = (b'%(exe)s setup.py %(pure)s clean --all'
2463 cmd = (b'%(exe)s setup.py %(pure)s clean --all'
2463 b' build %(compiler)s --build-base="%(base)s"'
2464 b' build %(compiler)s --build-base="%(base)s"'
2464 b' install --force --prefix="%(prefix)s"'
2465 b' install --force --prefix="%(prefix)s"'
2465 b' --install-lib="%(libdir)s"'
2466 b' --install-lib="%(libdir)s"'
2466 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2467 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2467 % {b'exe': exe, b'pure': pure,
2468 % {b'exe': exe, b'pure': pure,
2468 b'compiler': compiler,
2469 b'compiler': compiler,
2469 b'base': os.path.join(self._hgtmp, b"build"),
2470 b'base': os.path.join(self._hgtmp, b"build"),
2470 b'prefix': self._installdir, b'libdir': self._pythondir,
2471 b'prefix': self._installdir, b'libdir': self._pythondir,
2471 b'bindir': self._bindir,
2472 b'bindir': self._bindir,
2472 b'nohome': nohome, b'logfile': installerrs})
2473 b'nohome': nohome, b'logfile': installerrs})
2473
2474
2474 # setuptools requires install directories to exist.
2475 # setuptools requires install directories to exist.
2475 def makedirs(p):
2476 def makedirs(p):
2476 try:
2477 try:
2477 os.makedirs(p)
2478 os.makedirs(p)
2478 except OSError as e:
2479 except OSError as e:
2479 if e.errno != errno.EEXIST:
2480 if e.errno != errno.EEXIST:
2480 raise
2481 raise
2481 makedirs(self._pythondir)
2482 makedirs(self._pythondir)
2482 makedirs(self._bindir)
2483 makedirs(self._bindir)
2483
2484
2484 vlog("# Running", cmd)
2485 vlog("# Running", cmd)
2485 if os.system(cmd) == 0:
2486 if os.system(cmd) == 0:
2486 if not self.options.verbose:
2487 if not self.options.verbose:
2487 try:
2488 try:
2488 os.remove(installerrs)
2489 os.remove(installerrs)
2489 except OSError as e:
2490 except OSError as e:
2490 if e.errno != errno.ENOENT:
2491 if e.errno != errno.ENOENT:
2491 raise
2492 raise
2492 else:
2493 else:
2493 f = open(installerrs, 'rb')
2494 f = open(installerrs, 'rb')
2494 for line in f:
2495 for line in f:
2495 if PYTHON3:
2496 if PYTHON3:
2496 sys.stdout.buffer.write(line)
2497 sys.stdout.buffer.write(line)
2497 else:
2498 else:
2498 sys.stdout.write(line)
2499 sys.stdout.write(line)
2499 f.close()
2500 f.close()
2500 sys.exit(1)
2501 sys.exit(1)
2501 os.chdir(self._testdir)
2502 os.chdir(self._testdir)
2502
2503
2503 self._usecorrectpython()
2504 self._usecorrectpython()
2504
2505
2505 if self.options.py3k_warnings and not self.options.anycoverage:
2506 if self.options.py3k_warnings and not self.options.anycoverage:
2506 vlog("# Updating hg command to enable Py3k Warnings switch")
2507 vlog("# Updating hg command to enable Py3k Warnings switch")
2507 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2508 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2508 lines = [line.rstrip() for line in f]
2509 lines = [line.rstrip() for line in f]
2509 lines[0] += ' -3'
2510 lines[0] += ' -3'
2510 f.close()
2511 f.close()
2511 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2512 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2512 for line in lines:
2513 for line in lines:
2513 f.write(line + '\n')
2514 f.write(line + '\n')
2514 f.close()
2515 f.close()
2515
2516
2516 hgbat = os.path.join(self._bindir, b'hg.bat')
2517 hgbat = os.path.join(self._bindir, b'hg.bat')
2517 if os.path.isfile(hgbat):
2518 if os.path.isfile(hgbat):
2518 # hg.bat expects to be put in bin/scripts while run-tests.py
2519 # hg.bat expects to be put in bin/scripts while run-tests.py
2519 # installation layout put it in bin/ directly. Fix it
2520 # installation layout put it in bin/ directly. Fix it
2520 f = open(hgbat, 'rb')
2521 f = open(hgbat, 'rb')
2521 data = f.read()
2522 data = f.read()
2522 f.close()
2523 f.close()
2523 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2524 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2524 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2525 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2525 b'"%~dp0python" "%~dp0hg" %*')
2526 b'"%~dp0python" "%~dp0hg" %*')
2526 f = open(hgbat, 'wb')
2527 f = open(hgbat, 'wb')
2527 f.write(data)
2528 f.write(data)
2528 f.close()
2529 f.close()
2529 else:
2530 else:
2530 print('WARNING: cannot fix hg.bat reference to python.exe')
2531 print('WARNING: cannot fix hg.bat reference to python.exe')
2531
2532
2532 if self.options.anycoverage:
2533 if self.options.anycoverage:
2533 custom = os.path.join(self._testdir, 'sitecustomize.py')
2534 custom = os.path.join(self._testdir, 'sitecustomize.py')
2534 target = os.path.join(self._pythondir, 'sitecustomize.py')
2535 target = os.path.join(self._pythondir, 'sitecustomize.py')
2535 vlog('# Installing coverage trigger to %s' % target)
2536 vlog('# Installing coverage trigger to %s' % target)
2536 shutil.copyfile(custom, target)
2537 shutil.copyfile(custom, target)
2537 rc = os.path.join(self._testdir, '.coveragerc')
2538 rc = os.path.join(self._testdir, '.coveragerc')
2538 vlog('# Installing coverage rc to %s' % rc)
2539 vlog('# Installing coverage rc to %s' % rc)
2539 os.environ['COVERAGE_PROCESS_START'] = rc
2540 os.environ['COVERAGE_PROCESS_START'] = rc
2540 covdir = os.path.join(self._installdir, '..', 'coverage')
2541 covdir = os.path.join(self._installdir, '..', 'coverage')
2541 try:
2542 try:
2542 os.mkdir(covdir)
2543 os.mkdir(covdir)
2543 except OSError as e:
2544 except OSError as e:
2544 if e.errno != errno.EEXIST:
2545 if e.errno != errno.EEXIST:
2545 raise
2546 raise
2546
2547
2547 os.environ['COVERAGE_DIR'] = covdir
2548 os.environ['COVERAGE_DIR'] = covdir
2548
2549
2549 def _checkhglib(self, verb):
2550 def _checkhglib(self, verb):
2550 """Ensure that the 'mercurial' package imported by python is
2551 """Ensure that the 'mercurial' package imported by python is
2551 the one we expect it to be. If not, print a warning to stderr."""
2552 the one we expect it to be. If not, print a warning to stderr."""
2552 if ((self._bindir == self._pythondir) and
2553 if ((self._bindir == self._pythondir) and
2553 (self._bindir != self._tmpbindir)):
2554 (self._bindir != self._tmpbindir)):
2554 # The pythondir has been inferred from --with-hg flag.
2555 # The pythondir has been inferred from --with-hg flag.
2555 # We cannot expect anything sensible here.
2556 # We cannot expect anything sensible here.
2556 return
2557 return
2557 expecthg = os.path.join(self._pythondir, b'mercurial')
2558 expecthg = os.path.join(self._pythondir, b'mercurial')
2558 actualhg = self._gethgpath()
2559 actualhg = self._gethgpath()
2559 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2560 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2560 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2561 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2561 ' (expected %s)\n'
2562 ' (expected %s)\n'
2562 % (verb, actualhg, expecthg))
2563 % (verb, actualhg, expecthg))
2563 def _gethgpath(self):
2564 def _gethgpath(self):
2564 """Return the path to the mercurial package that is actually found by
2565 """Return the path to the mercurial package that is actually found by
2565 the current Python interpreter."""
2566 the current Python interpreter."""
2566 if self._hgpath is not None:
2567 if self._hgpath is not None:
2567 return self._hgpath
2568 return self._hgpath
2568
2569
2569 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2570 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2570 cmd = cmd % PYTHON
2571 cmd = cmd % PYTHON
2571 if PYTHON3:
2572 if PYTHON3:
2572 cmd = _strpath(cmd)
2573 cmd = _strpath(cmd)
2573 pipe = os.popen(cmd)
2574 pipe = os.popen(cmd)
2574 try:
2575 try:
2575 self._hgpath = _bytespath(pipe.read().strip())
2576 self._hgpath = _bytespath(pipe.read().strip())
2576 finally:
2577 finally:
2577 pipe.close()
2578 pipe.close()
2578
2579
2579 return self._hgpath
2580 return self._hgpath
2580
2581
2581 def _installchg(self):
2582 def _installchg(self):
2582 """Install chg into the test environment"""
2583 """Install chg into the test environment"""
2583 vlog('# Performing temporary installation of CHG')
2584 vlog('# Performing temporary installation of CHG')
2584 assert os.path.dirname(self._bindir) == self._installdir
2585 assert os.path.dirname(self._bindir) == self._installdir
2585 assert self._hgroot, 'must be called after _installhg()'
2586 assert self._hgroot, 'must be called after _installhg()'
2586 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
2587 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
2587 % {b'make': 'make', # TODO: switch by option or environment?
2588 % {b'make': 'make', # TODO: switch by option or environment?
2588 b'prefix': self._installdir})
2589 b'prefix': self._installdir})
2589 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
2590 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
2590 vlog("# Running", cmd)
2591 vlog("# Running", cmd)
2591 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
2592 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
2592 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
2593 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
2593 stderr=subprocess.STDOUT)
2594 stderr=subprocess.STDOUT)
2594 out, _err = proc.communicate()
2595 out, _err = proc.communicate()
2595 if proc.returncode != 0:
2596 if proc.returncode != 0:
2596 if PYTHON3:
2597 if PYTHON3:
2597 sys.stdout.buffer.write(out)
2598 sys.stdout.buffer.write(out)
2598 else:
2599 else:
2599 sys.stdout.write(out)
2600 sys.stdout.write(out)
2600 sys.exit(1)
2601 sys.exit(1)
2601
2602
2602 def _outputcoverage(self):
2603 def _outputcoverage(self):
2603 """Produce code coverage output."""
2604 """Produce code coverage output."""
2604 import coverage
2605 import coverage
2605 coverage = coverage.coverage
2606 coverage = coverage.coverage
2606
2607
2607 vlog('# Producing coverage report')
2608 vlog('# Producing coverage report')
2608 # chdir is the easiest way to get short, relative paths in the
2609 # chdir is the easiest way to get short, relative paths in the
2609 # output.
2610 # output.
2610 os.chdir(self._hgroot)
2611 os.chdir(self._hgroot)
2611 covdir = os.path.join(self._installdir, '..', 'coverage')
2612 covdir = os.path.join(self._installdir, '..', 'coverage')
2612 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2613 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2613
2614
2614 # Map install directory paths back to source directory.
2615 # Map install directory paths back to source directory.
2615 cov.config.paths['srcdir'] = ['.', self._pythondir]
2616 cov.config.paths['srcdir'] = ['.', self._pythondir]
2616
2617
2617 cov.combine()
2618 cov.combine()
2618
2619
2619 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2620 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2620 cov.report(ignore_errors=True, omit=omit)
2621 cov.report(ignore_errors=True, omit=omit)
2621
2622
2622 if self.options.htmlcov:
2623 if self.options.htmlcov:
2623 htmldir = os.path.join(self._testdir, 'htmlcov')
2624 htmldir = os.path.join(self._testdir, 'htmlcov')
2624 cov.html_report(directory=htmldir, omit=omit)
2625 cov.html_report(directory=htmldir, omit=omit)
2625 if self.options.annotate:
2626 if self.options.annotate:
2626 adir = os.path.join(self._testdir, 'annotated')
2627 adir = os.path.join(self._testdir, 'annotated')
2627 if not os.path.isdir(adir):
2628 if not os.path.isdir(adir):
2628 os.mkdir(adir)
2629 os.mkdir(adir)
2629 cov.annotate(directory=adir, omit=omit)
2630 cov.annotate(directory=adir, omit=omit)
2630
2631
2631 def _findprogram(self, program):
2632 def _findprogram(self, program):
2632 """Search PATH for a executable program"""
2633 """Search PATH for a executable program"""
2633 dpb = _bytespath(os.defpath)
2634 dpb = _bytespath(os.defpath)
2634 sepb = _bytespath(os.pathsep)
2635 sepb = _bytespath(os.pathsep)
2635 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2636 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2636 name = os.path.join(p, program)
2637 name = os.path.join(p, program)
2637 if os.name == 'nt' or os.access(name, os.X_OK):
2638 if os.name == 'nt' or os.access(name, os.X_OK):
2638 return name
2639 return name
2639 return None
2640 return None
2640
2641
2641 def _checktools(self):
2642 def _checktools(self):
2642 """Ensure tools required to run tests are present."""
2643 """Ensure tools required to run tests are present."""
2643 for p in self.REQUIREDTOOLS:
2644 for p in self.REQUIREDTOOLS:
2644 if os.name == 'nt' and not p.endswith('.exe'):
2645 if os.name == 'nt' and not p.endswith('.exe'):
2645 p += '.exe'
2646 p += '.exe'
2646 found = self._findprogram(p)
2647 found = self._findprogram(p)
2647 if found:
2648 if found:
2648 vlog("# Found prerequisite", p, "at", found)
2649 vlog("# Found prerequisite", p, "at", found)
2649 else:
2650 else:
2650 print("WARNING: Did not find prerequisite tool: %s " %
2651 print("WARNING: Did not find prerequisite tool: %s " %
2651 p.decode("utf-8"))
2652 p.decode("utf-8"))
2652
2653
2653 if __name__ == '__main__':
2654 if __name__ == '__main__':
2654 runner = TestRunner()
2655 runner = TestRunner()
2655
2656
2656 try:
2657 try:
2657 import msvcrt
2658 import msvcrt
2658 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
2659 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
2659 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2660 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2660 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
2661 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
2661 except ImportError:
2662 except ImportError:
2662 pass
2663 pass
2663
2664
2664 sys.exit(runner.run(sys.argv[1:]))
2665 sys.exit(runner.run(sys.argv[1:]))
@@ -1,166 +1,180 b''
1
1
2 $ cat << EOF > buggylocking.py
2 $ cat << EOF > buggylocking.py
3 > """A small extension that tests our developer warnings
3 > """A small extension that tests our developer warnings
4 > """
4 > """
5 >
5 >
6 > from mercurial import cmdutil, repair
6 > from mercurial import cmdutil, repair, util
7 >
7 >
8 > cmdtable = {}
8 > cmdtable = {}
9 > command = cmdutil.command(cmdtable)
9 > command = cmdutil.command(cmdtable)
10 >
10 >
11 > @command('buggylocking', [], '')
11 > @command('buggylocking', [], '')
12 > def buggylocking(ui, repo):
12 > def buggylocking(ui, repo):
13 > lo = repo.lock()
13 > lo = repo.lock()
14 > wl = repo.wlock()
14 > wl = repo.wlock()
15 > wl.release()
15 > wl.release()
16 > lo.release()
16 > lo.release()
17 >
17 >
18 > @command('buggytransaction', [], '')
18 > @command('buggytransaction', [], '')
19 > def buggylocking(ui, repo):
19 > def buggylocking(ui, repo):
20 > tr = repo.transaction('buggy')
20 > tr = repo.transaction('buggy')
21 > # make sure we rollback the transaction as we don't want to rely on the__del__
21 > # make sure we rollback the transaction as we don't want to rely on the__del__
22 > tr.release()
22 > tr.release()
23 >
23 >
24 > @command('properlocking', [], '')
24 > @command('properlocking', [], '')
25 > def properlocking(ui, repo):
25 > def properlocking(ui, repo):
26 > """check that reentrance is fine"""
26 > """check that reentrance is fine"""
27 > wl = repo.wlock()
27 > wl = repo.wlock()
28 > lo = repo.lock()
28 > lo = repo.lock()
29 > tr = repo.transaction('proper')
29 > tr = repo.transaction('proper')
30 > tr2 = repo.transaction('proper')
30 > tr2 = repo.transaction('proper')
31 > lo2 = repo.lock()
31 > lo2 = repo.lock()
32 > wl2 = repo.wlock()
32 > wl2 = repo.wlock()
33 > wl2.release()
33 > wl2.release()
34 > lo2.release()
34 > lo2.release()
35 > tr2.close()
35 > tr2.close()
36 > tr.close()
36 > tr.close()
37 > lo.release()
37 > lo.release()
38 > wl.release()
38 > wl.release()
39 >
39 >
40 > @command('nowaitlocking', [], '')
40 > @command('nowaitlocking', [], '')
41 > def nowaitlocking(ui, repo):
41 > def nowaitlocking(ui, repo):
42 > lo = repo.lock()
42 > lo = repo.lock()
43 > wl = repo.wlock(wait=False)
43 > wl = repo.wlock(wait=False)
44 > wl.release()
44 > wl.release()
45 > lo.release()
45 > lo.release()
46 >
46 >
47 > @command('stripintr', [], '')
47 > @command('stripintr', [], '')
48 > def stripintr(ui, repo):
48 > def stripintr(ui, repo):
49 > lo = repo.lock()
49 > lo = repo.lock()
50 > tr = repo.transaction('foobar')
50 > tr = repo.transaction('foobar')
51 > try:
51 > try:
52 > repair.strip(repo.ui, repo, [repo['.'].node()])
52 > repair.strip(repo.ui, repo, [repo['.'].node()])
53 > finally:
53 > finally:
54 > lo.release()
54 > lo.release()
55 > @command('oldanddeprecated', [], '')
55 > @command('oldanddeprecated', [], '')
56 > def oldanddeprecated(ui, repo):
56 > def oldanddeprecated(ui, repo):
57 > """test deprecation warning API"""
57 > """test deprecation warning API"""
58 > def foobar(ui):
58 > def foobar(ui):
59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
60 > foobar(ui)
60 > foobar(ui)
61 > @command('nouiwarning', [], '')
62 > def nouiwarning(ui, repo):
63 > util.nouideprecwarn('this is a test', '13.37')
61 > EOF
64 > EOF
62
65
63 $ cat << EOF >> $HGRCPATH
66 $ cat << EOF >> $HGRCPATH
64 > [extensions]
67 > [extensions]
65 > buggylocking=$TESTTMP/buggylocking.py
68 > buggylocking=$TESTTMP/buggylocking.py
66 > mock=$TESTDIR/mockblackbox.py
69 > mock=$TESTDIR/mockblackbox.py
67 > blackbox=
70 > blackbox=
68 > [devel]
71 > [devel]
69 > all-warnings=1
72 > all-warnings=1
70 > EOF
73 > EOF
71
74
72 $ hg init lock-checker
75 $ hg init lock-checker
73 $ cd lock-checker
76 $ cd lock-checker
74 $ hg buggylocking
77 $ hg buggylocking
75 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
78 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
76 $ cat << EOF >> $HGRCPATH
79 $ cat << EOF >> $HGRCPATH
77 > [devel]
80 > [devel]
78 > all=0
81 > all=0
79 > check-locks=1
82 > check-locks=1
80 > EOF
83 > EOF
81 $ hg buggylocking
84 $ hg buggylocking
82 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
85 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
83 $ hg buggylocking --traceback
86 $ hg buggylocking --traceback
84 devel-warn: "wlock" acquired after "lock" at:
87 devel-warn: "wlock" acquired after "lock" at:
85 */hg:* in * (glob)
88 */hg:* in * (glob)
86 */mercurial/dispatch.py:* in run (glob)
89 */mercurial/dispatch.py:* in run (glob)
87 */mercurial/dispatch.py:* in dispatch (glob)
90 */mercurial/dispatch.py:* in dispatch (glob)
88 */mercurial/dispatch.py:* in _runcatch (glob)
91 */mercurial/dispatch.py:* in _runcatch (glob)
89 */mercurial/dispatch.py:* in callcatch (glob)
92 */mercurial/dispatch.py:* in callcatch (glob)
90 */mercurial/scmutil.py* in callcatch (glob)
93 */mercurial/scmutil.py* in callcatch (glob)
91 */mercurial/dispatch.py:* in _runcatchfunc (glob)
94 */mercurial/dispatch.py:* in _runcatchfunc (glob)
92 */mercurial/dispatch.py:* in _dispatch (glob)
95 */mercurial/dispatch.py:* in _dispatch (glob)
93 */mercurial/dispatch.py:* in runcommand (glob)
96 */mercurial/dispatch.py:* in runcommand (glob)
94 */mercurial/dispatch.py:* in _runcommand (glob)
97 */mercurial/dispatch.py:* in _runcommand (glob)
95 */mercurial/dispatch.py:* in <lambda> (glob)
98 */mercurial/dispatch.py:* in <lambda> (glob)
96 */mercurial/util.py:* in check (glob)
99 */mercurial/util.py:* in check (glob)
97 $TESTTMP/buggylocking.py:* in buggylocking (glob)
100 $TESTTMP/buggylocking.py:* in buggylocking (glob)
98 $ hg properlocking
101 $ hg properlocking
99 $ hg nowaitlocking
102 $ hg nowaitlocking
100
103
101 $ echo a > a
104 $ echo a > a
102 $ hg add a
105 $ hg add a
103 $ hg commit -m a
106 $ hg commit -m a
104 $ hg stripintr 2>&1 | egrep -v '^(\*\*| )'
107 $ hg stripintr 2>&1 | egrep -v '^(\*\*| )'
105 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
108 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
106 Traceback (most recent call last):
109 Traceback (most recent call last):
107 mercurial.error.ProgrammingError: cannot strip from inside a transaction
110 mercurial.error.ProgrammingError: cannot strip from inside a transaction
108
111
109 $ hg oldanddeprecated
112 $ hg oldanddeprecated
110 devel-warn: foorbar is deprecated, go shopping
113 devel-warn: foorbar is deprecated, go shopping
111 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
114 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
112
115
113 $ hg oldanddeprecated --traceback
116 $ hg oldanddeprecated --traceback
114 devel-warn: foorbar is deprecated, go shopping
117 devel-warn: foorbar is deprecated, go shopping
115 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
118 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
116 */hg:* in <module> (glob)
119 */hg:* in <module> (glob)
117 */mercurial/dispatch.py:* in run (glob)
120 */mercurial/dispatch.py:* in run (glob)
118 */mercurial/dispatch.py:* in dispatch (glob)
121 */mercurial/dispatch.py:* in dispatch (glob)
119 */mercurial/dispatch.py:* in _runcatch (glob)
122 */mercurial/dispatch.py:* in _runcatch (glob)
120 */mercurial/dispatch.py:* in callcatch (glob)
123 */mercurial/dispatch.py:* in callcatch (glob)
121 */mercurial/scmutil.py* in callcatch (glob)
124 */mercurial/scmutil.py* in callcatch (glob)
122 */mercurial/dispatch.py:* in _runcatchfunc (glob)
125 */mercurial/dispatch.py:* in _runcatchfunc (glob)
123 */mercurial/dispatch.py:* in _dispatch (glob)
126 */mercurial/dispatch.py:* in _dispatch (glob)
124 */mercurial/dispatch.py:* in runcommand (glob)
127 */mercurial/dispatch.py:* in runcommand (glob)
125 */mercurial/dispatch.py:* in _runcommand (glob)
128 */mercurial/dispatch.py:* in _runcommand (glob)
126 */mercurial/dispatch.py:* in <lambda> (glob)
129 */mercurial/dispatch.py:* in <lambda> (glob)
127 */mercurial/util.py:* in check (glob)
130 */mercurial/util.py:* in check (glob)
128 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
131 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
129 $ hg blackbox -l 7
132 $ hg blackbox -l 7
130 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
133 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
131 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
134 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
132 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
135 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
133 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
136 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
134 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
137 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
135 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
138 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
136 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
139 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
137 */hg:* in <module> (glob)
140 */hg:* in <module> (glob)
138 */mercurial/dispatch.py:* in run (glob)
141 */mercurial/dispatch.py:* in run (glob)
139 */mercurial/dispatch.py:* in dispatch (glob)
142 */mercurial/dispatch.py:* in dispatch (glob)
140 */mercurial/dispatch.py:* in _runcatch (glob)
143 */mercurial/dispatch.py:* in _runcatch (glob)
141 */mercurial/dispatch.py:* in callcatch (glob)
144 */mercurial/dispatch.py:* in callcatch (glob)
142 */mercurial/scmutil.py* in callcatch (glob)
145 */mercurial/scmutil.py* in callcatch (glob)
143 */mercurial/dispatch.py:* in _runcatchfunc (glob)
146 */mercurial/dispatch.py:* in _runcatchfunc (glob)
144 */mercurial/dispatch.py:* in _dispatch (glob)
147 */mercurial/dispatch.py:* in _dispatch (glob)
145 */mercurial/dispatch.py:* in runcommand (glob)
148 */mercurial/dispatch.py:* in runcommand (glob)
146 */mercurial/dispatch.py:* in _runcommand (glob)
149 */mercurial/dispatch.py:* in _runcommand (glob)
147 */mercurial/dispatch.py:* in <lambda> (glob)
150 */mercurial/dispatch.py:* in <lambda> (glob)
148 */mercurial/util.py:* in check (glob)
151 */mercurial/util.py:* in check (glob)
149 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
152 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
150 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
153 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
151 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 7
154 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 7
152
155
153 Test programming error failure:
156 Test programming error failure:
154
157
155 $ hg buggytransaction 2>&1 | egrep -v '^ '
158 $ hg buggytransaction 2>&1 | egrep -v '^ '
156 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
159 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
157 ** which supports versions unknown of Mercurial.
160 ** which supports versions unknown of Mercurial.
158 ** Please disable buggylocking and try your action again.
161 ** Please disable buggylocking and try your action again.
159 ** If that fixes the bug please report it to the extension author.
162 ** If that fixes the bug please report it to the extension author.
160 ** Python * (glob)
163 ** Python * (glob)
161 ** Mercurial Distributed SCM (*) (glob)
164 ** Mercurial Distributed SCM (*) (glob)
162 ** Extensions loaded: * (glob)
165 ** Extensions loaded: * (glob)
163 Traceback (most recent call last):
166 Traceback (most recent call last):
164 mercurial.error.ProgrammingError: transaction requires locking
167 mercurial.error.ProgrammingError: transaction requires locking
165
168
169 Old style deprecation warning
170
171 $ hg nouiwarning
172 $TESTTMP/buggylocking.py:61: DeprecationWarning: this is a test
173 (compatibility will be dropped after Mercurial-13.37, update your code.)
174 util.nouideprecwarn('this is a test', '13.37')
175
176 (disabled outside of test run)
177
178 $ HGEMITWARNINGS= hg nouiwarning
179
166 $ cd ..
180 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now