##// END OF EJS Templates
util: add a way to issue deprecation warning without a UI object...
Pierre-Yves David -
r31950:cc70c6db default
parent child Browse files
Show More
@@ -1,3720 +1,3746 b''
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import bz2
19 19 import calendar
20 20 import codecs
21 21 import collections
22 22 import datetime
23 23 import errno
24 24 import gc
25 25 import hashlib
26 26 import imp
27 27 import os
28 28 import platform as pyplatform
29 29 import re as remod
30 30 import shutil
31 31 import signal
32 32 import socket
33 33 import stat
34 34 import string
35 35 import subprocess
36 36 import sys
37 37 import tempfile
38 38 import textwrap
39 39 import time
40 40 import traceback
41 import warnings
41 42 import zlib
42 43
43 44 from . import (
44 45 encoding,
45 46 error,
46 47 i18n,
47 48 osutil,
48 49 parsers,
49 50 pycompat,
50 51 )
51 52
52 53 cookielib = pycompat.cookielib
53 54 empty = pycompat.empty
54 55 httplib = pycompat.httplib
55 56 httpserver = pycompat.httpserver
56 57 pickle = pycompat.pickle
57 58 queue = pycompat.queue
58 59 socketserver = pycompat.socketserver
59 60 stderr = pycompat.stderr
60 61 stdin = pycompat.stdin
61 62 stdout = pycompat.stdout
62 63 stringio = pycompat.stringio
63 64 urlerr = pycompat.urlerr
64 65 urlreq = pycompat.urlreq
65 66 xmlrpclib = pycompat.xmlrpclib
66 67
67 68 def isatty(fp):
68 69 try:
69 70 return fp.isatty()
70 71 except AttributeError:
71 72 return False
72 73
73 74 # glibc determines buffering on first write to stdout - if we replace a TTY
74 75 # destined stdout with a pipe destined stdout (e.g. pager), we want line
75 76 # buffering
76 77 if isatty(stdout):
77 78 stdout = os.fdopen(stdout.fileno(), pycompat.sysstr('wb'), 1)
78 79
79 80 if pycompat.osname == 'nt':
80 81 from . import windows as platform
81 82 stdout = platform.winstdout(stdout)
82 83 else:
83 84 from . import posix as platform
84 85
85 86 _ = i18n._
86 87
87 88 bindunixsocket = platform.bindunixsocket
88 89 cachestat = platform.cachestat
89 90 checkexec = platform.checkexec
90 91 checklink = platform.checklink
91 92 copymode = platform.copymode
92 93 executablepath = platform.executablepath
93 94 expandglobs = platform.expandglobs
94 95 explainexit = platform.explainexit
95 96 findexe = platform.findexe
96 97 gethgcmd = platform.gethgcmd
97 98 getuser = platform.getuser
98 99 getpid = os.getpid
99 100 groupmembers = platform.groupmembers
100 101 groupname = platform.groupname
101 102 hidewindow = platform.hidewindow
102 103 isexec = platform.isexec
103 104 isowner = platform.isowner
104 105 localpath = platform.localpath
105 106 lookupreg = platform.lookupreg
106 107 makedir = platform.makedir
107 108 nlinks = platform.nlinks
108 109 normpath = platform.normpath
109 110 normcase = platform.normcase
110 111 normcasespec = platform.normcasespec
111 112 normcasefallback = platform.normcasefallback
112 113 openhardlinks = platform.openhardlinks
113 114 oslink = platform.oslink
114 115 parsepatchoutput = platform.parsepatchoutput
115 116 pconvert = platform.pconvert
116 117 poll = platform.poll
117 118 popen = platform.popen
118 119 posixfile = platform.posixfile
119 120 quotecommand = platform.quotecommand
120 121 readpipe = platform.readpipe
121 122 rename = platform.rename
122 123 removedirs = platform.removedirs
123 124 samedevice = platform.samedevice
124 125 samefile = platform.samefile
125 126 samestat = platform.samestat
126 127 setbinary = platform.setbinary
127 128 setflags = platform.setflags
128 129 setsignalhandler = platform.setsignalhandler
129 130 shellquote = platform.shellquote
130 131 spawndetached = platform.spawndetached
131 132 split = platform.split
132 133 sshargs = platform.sshargs
133 134 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
134 135 statisexec = platform.statisexec
135 136 statislink = platform.statislink
136 137 testpid = platform.testpid
137 138 umask = platform.umask
138 139 unlink = platform.unlink
139 140 username = platform.username
140 141
141 142 # Python compatibility
142 143
143 144 _notset = object()
144 145
145 146 # disable Python's problematic floating point timestamps (issue4836)
146 147 # (Python hypocritically says you shouldn't change this behavior in
147 148 # libraries, and sure enough Mercurial is not a library.)
148 149 os.stat_float_times(False)
149 150
150 151 def safehasattr(thing, attr):
151 152 return getattr(thing, attr, _notset) is not _notset
152 153
153 154 def bitsfrom(container):
154 155 bits = 0
155 156 for bit in container:
156 157 bits |= bit
157 158 return bits
158 159
160 # python 2.6 still have deprecation warning enabled by default. We do not want
161 # to display anything to standard user so detect if we are running test and
162 # only use python deprecation warning in this case.
163 _dowarn = bool(encoding.environ.get('HGEMITWARNINGS'))
164 if _dowarn:
165 # explicitly unfilter our warning for python 2.7
166 #
167 # The option of setting PYTHONWARNINGS in the test runner was investigated.
168 # However, module name set through PYTHONWARNINGS was exactly matched, so
169 # we cannot set 'mercurial' and have it match eg: 'mercurial.scmutil'. This
170 # makes the whole PYTHONWARNINGS thing useless for our usecase.
171 warnings.filterwarnings('default', '', DeprecationWarning, 'mercurial')
172 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext')
173 warnings.filterwarnings('default', '', DeprecationWarning, 'hgext3rd')
174
175 def nouideprecwarn(msg, version, stacklevel=1):
176 """Issue an python native deprecation warning
177
178 This is a noop outside of tests, use 'ui.deprecwarn' when possible.
179 """
180 if _dowarn:
181 msg += ("\n(compatibility will be dropped after Mercurial-%s,"
182 " update your code.)") % version
183 warnings.warn(msg, DeprecationWarning, stacklevel + 1)
184
159 185 DIGESTS = {
160 186 'md5': hashlib.md5,
161 187 'sha1': hashlib.sha1,
162 188 'sha512': hashlib.sha512,
163 189 }
164 190 # List of digest types from strongest to weakest
165 191 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
166 192
167 193 for k in DIGESTS_BY_STRENGTH:
168 194 assert k in DIGESTS
169 195
170 196 class digester(object):
171 197 """helper to compute digests.
172 198
173 199 This helper can be used to compute one or more digests given their name.
174 200
175 201 >>> d = digester(['md5', 'sha1'])
176 202 >>> d.update('foo')
177 203 >>> [k for k in sorted(d)]
178 204 ['md5', 'sha1']
179 205 >>> d['md5']
180 206 'acbd18db4cc2f85cedef654fccc4a4d8'
181 207 >>> d['sha1']
182 208 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
183 209 >>> digester.preferred(['md5', 'sha1'])
184 210 'sha1'
185 211 """
186 212
187 213 def __init__(self, digests, s=''):
188 214 self._hashes = {}
189 215 for k in digests:
190 216 if k not in DIGESTS:
191 217 raise Abort(_('unknown digest type: %s') % k)
192 218 self._hashes[k] = DIGESTS[k]()
193 219 if s:
194 220 self.update(s)
195 221
196 222 def update(self, data):
197 223 for h in self._hashes.values():
198 224 h.update(data)
199 225
200 226 def __getitem__(self, key):
201 227 if key not in DIGESTS:
202 228 raise Abort(_('unknown digest type: %s') % k)
203 229 return self._hashes[key].hexdigest()
204 230
205 231 def __iter__(self):
206 232 return iter(self._hashes)
207 233
208 234 @staticmethod
209 235 def preferred(supported):
210 236 """returns the strongest digest type in both supported and DIGESTS."""
211 237
212 238 for k in DIGESTS_BY_STRENGTH:
213 239 if k in supported:
214 240 return k
215 241 return None
216 242
217 243 class digestchecker(object):
218 244 """file handle wrapper that additionally checks content against a given
219 245 size and digests.
220 246
221 247 d = digestchecker(fh, size, {'md5': '...'})
222 248
223 249 When multiple digests are given, all of them are validated.
224 250 """
225 251
226 252 def __init__(self, fh, size, digests):
227 253 self._fh = fh
228 254 self._size = size
229 255 self._got = 0
230 256 self._digests = dict(digests)
231 257 self._digester = digester(self._digests.keys())
232 258
233 259 def read(self, length=-1):
234 260 content = self._fh.read(length)
235 261 self._digester.update(content)
236 262 self._got += len(content)
237 263 return content
238 264
239 265 def validate(self):
240 266 if self._size != self._got:
241 267 raise Abort(_('size mismatch: expected %d, got %d') %
242 268 (self._size, self._got))
243 269 for k, v in self._digests.items():
244 270 if v != self._digester[k]:
245 271 # i18n: first parameter is a digest name
246 272 raise Abort(_('%s mismatch: expected %s, got %s') %
247 273 (k, v, self._digester[k]))
248 274
249 275 try:
250 276 buffer = buffer
251 277 except NameError:
252 278 if not pycompat.ispy3:
253 279 def buffer(sliceable, offset=0, length=None):
254 280 if length is not None:
255 281 return sliceable[offset:offset + length]
256 282 return sliceable[offset:]
257 283 else:
258 284 def buffer(sliceable, offset=0, length=None):
259 285 if length is not None:
260 286 return memoryview(sliceable)[offset:offset + length]
261 287 return memoryview(sliceable)[offset:]
262 288
263 289 closefds = pycompat.osname == 'posix'
264 290
265 291 _chunksize = 4096
266 292
267 293 class bufferedinputpipe(object):
268 294 """a manually buffered input pipe
269 295
270 296 Python will not let us use buffered IO and lazy reading with 'polling' at
271 297 the same time. We cannot probe the buffer state and select will not detect
272 298 that data are ready to read if they are already buffered.
273 299
274 300 This class let us work around that by implementing its own buffering
275 301 (allowing efficient readline) while offering a way to know if the buffer is
276 302 empty from the output (allowing collaboration of the buffer with polling).
277 303
278 304 This class lives in the 'util' module because it makes use of the 'os'
279 305 module from the python stdlib.
280 306 """
281 307
282 308 def __init__(self, input):
283 309 self._input = input
284 310 self._buffer = []
285 311 self._eof = False
286 312 self._lenbuf = 0
287 313
288 314 @property
289 315 def hasbuffer(self):
290 316 """True is any data is currently buffered
291 317
292 318 This will be used externally a pre-step for polling IO. If there is
293 319 already data then no polling should be set in place."""
294 320 return bool(self._buffer)
295 321
296 322 @property
297 323 def closed(self):
298 324 return self._input.closed
299 325
300 326 def fileno(self):
301 327 return self._input.fileno()
302 328
303 329 def close(self):
304 330 return self._input.close()
305 331
306 332 def read(self, size):
307 333 while (not self._eof) and (self._lenbuf < size):
308 334 self._fillbuffer()
309 335 return self._frombuffer(size)
310 336
311 337 def readline(self, *args, **kwargs):
312 338 if 1 < len(self._buffer):
313 339 # this should not happen because both read and readline end with a
314 340 # _frombuffer call that collapse it.
315 341 self._buffer = [''.join(self._buffer)]
316 342 self._lenbuf = len(self._buffer[0])
317 343 lfi = -1
318 344 if self._buffer:
319 345 lfi = self._buffer[-1].find('\n')
320 346 while (not self._eof) and lfi < 0:
321 347 self._fillbuffer()
322 348 if self._buffer:
323 349 lfi = self._buffer[-1].find('\n')
324 350 size = lfi + 1
325 351 if lfi < 0: # end of file
326 352 size = self._lenbuf
327 353 elif 1 < len(self._buffer):
328 354 # we need to take previous chunks into account
329 355 size += self._lenbuf - len(self._buffer[-1])
330 356 return self._frombuffer(size)
331 357
332 358 def _frombuffer(self, size):
333 359 """return at most 'size' data from the buffer
334 360
335 361 The data are removed from the buffer."""
336 362 if size == 0 or not self._buffer:
337 363 return ''
338 364 buf = self._buffer[0]
339 365 if 1 < len(self._buffer):
340 366 buf = ''.join(self._buffer)
341 367
342 368 data = buf[:size]
343 369 buf = buf[len(data):]
344 370 if buf:
345 371 self._buffer = [buf]
346 372 self._lenbuf = len(buf)
347 373 else:
348 374 self._buffer = []
349 375 self._lenbuf = 0
350 376 return data
351 377
352 378 def _fillbuffer(self):
353 379 """read data to the buffer"""
354 380 data = os.read(self._input.fileno(), _chunksize)
355 381 if not data:
356 382 self._eof = True
357 383 else:
358 384 self._lenbuf += len(data)
359 385 self._buffer.append(data)
360 386
361 387 def popen2(cmd, env=None, newlines=False):
362 388 # Setting bufsize to -1 lets the system decide the buffer size.
363 389 # The default for bufsize is 0, meaning unbuffered. This leads to
364 390 # poor performance on Mac OS X: http://bugs.python.org/issue4194
365 391 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
366 392 close_fds=closefds,
367 393 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
368 394 universal_newlines=newlines,
369 395 env=env)
370 396 return p.stdin, p.stdout
371 397
372 398 def popen3(cmd, env=None, newlines=False):
373 399 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
374 400 return stdin, stdout, stderr
375 401
376 402 def popen4(cmd, env=None, newlines=False, bufsize=-1):
377 403 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
378 404 close_fds=closefds,
379 405 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
380 406 stderr=subprocess.PIPE,
381 407 universal_newlines=newlines,
382 408 env=env)
383 409 return p.stdin, p.stdout, p.stderr, p
384 410
385 411 def version():
386 412 """Return version information if available."""
387 413 try:
388 414 from . import __version__
389 415 return __version__.version
390 416 except ImportError:
391 417 return 'unknown'
392 418
393 419 def versiontuple(v=None, n=4):
394 420 """Parses a Mercurial version string into an N-tuple.
395 421
396 422 The version string to be parsed is specified with the ``v`` argument.
397 423 If it isn't defined, the current Mercurial version string will be parsed.
398 424
399 425 ``n`` can be 2, 3, or 4. Here is how some version strings map to
400 426 returned values:
401 427
402 428 >>> v = '3.6.1+190-df9b73d2d444'
403 429 >>> versiontuple(v, 2)
404 430 (3, 6)
405 431 >>> versiontuple(v, 3)
406 432 (3, 6, 1)
407 433 >>> versiontuple(v, 4)
408 434 (3, 6, 1, '190-df9b73d2d444')
409 435
410 436 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
411 437 (3, 6, 1, '190-df9b73d2d444+20151118')
412 438
413 439 >>> v = '3.6'
414 440 >>> versiontuple(v, 2)
415 441 (3, 6)
416 442 >>> versiontuple(v, 3)
417 443 (3, 6, None)
418 444 >>> versiontuple(v, 4)
419 445 (3, 6, None, None)
420 446
421 447 >>> v = '3.9-rc'
422 448 >>> versiontuple(v, 2)
423 449 (3, 9)
424 450 >>> versiontuple(v, 3)
425 451 (3, 9, None)
426 452 >>> versiontuple(v, 4)
427 453 (3, 9, None, 'rc')
428 454
429 455 >>> v = '3.9-rc+2-02a8fea4289b'
430 456 >>> versiontuple(v, 2)
431 457 (3, 9)
432 458 >>> versiontuple(v, 3)
433 459 (3, 9, None)
434 460 >>> versiontuple(v, 4)
435 461 (3, 9, None, 'rc+2-02a8fea4289b')
436 462 """
437 463 if not v:
438 464 v = version()
439 465 parts = remod.split('[\+-]', v, 1)
440 466 if len(parts) == 1:
441 467 vparts, extra = parts[0], None
442 468 else:
443 469 vparts, extra = parts
444 470
445 471 vints = []
446 472 for i in vparts.split('.'):
447 473 try:
448 474 vints.append(int(i))
449 475 except ValueError:
450 476 break
451 477 # (3, 6) -> (3, 6, None)
452 478 while len(vints) < 3:
453 479 vints.append(None)
454 480
455 481 if n == 2:
456 482 return (vints[0], vints[1])
457 483 if n == 3:
458 484 return (vints[0], vints[1], vints[2])
459 485 if n == 4:
460 486 return (vints[0], vints[1], vints[2], extra)
461 487
462 488 # used by parsedate
463 489 defaultdateformats = (
464 490 '%Y-%m-%dT%H:%M:%S', # the 'real' ISO8601
465 491 '%Y-%m-%dT%H:%M', # without seconds
466 492 '%Y-%m-%dT%H%M%S', # another awful but legal variant without :
467 493 '%Y-%m-%dT%H%M', # without seconds
468 494 '%Y-%m-%d %H:%M:%S', # our common legal variant
469 495 '%Y-%m-%d %H:%M', # without seconds
470 496 '%Y-%m-%d %H%M%S', # without :
471 497 '%Y-%m-%d %H%M', # without seconds
472 498 '%Y-%m-%d %I:%M:%S%p',
473 499 '%Y-%m-%d %H:%M',
474 500 '%Y-%m-%d %I:%M%p',
475 501 '%Y-%m-%d',
476 502 '%m-%d',
477 503 '%m/%d',
478 504 '%m/%d/%y',
479 505 '%m/%d/%Y',
480 506 '%a %b %d %H:%M:%S %Y',
481 507 '%a %b %d %I:%M:%S%p %Y',
482 508 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
483 509 '%b %d %H:%M:%S %Y',
484 510 '%b %d %I:%M:%S%p %Y',
485 511 '%b %d %H:%M:%S',
486 512 '%b %d %I:%M:%S%p',
487 513 '%b %d %H:%M',
488 514 '%b %d %I:%M%p',
489 515 '%b %d %Y',
490 516 '%b %d',
491 517 '%H:%M:%S',
492 518 '%I:%M:%S%p',
493 519 '%H:%M',
494 520 '%I:%M%p',
495 521 )
496 522
497 523 extendeddateformats = defaultdateformats + (
498 524 "%Y",
499 525 "%Y-%m",
500 526 "%b",
501 527 "%b %Y",
502 528 )
503 529
504 530 def cachefunc(func):
505 531 '''cache the result of function calls'''
506 532 # XXX doesn't handle keywords args
507 533 if func.__code__.co_argcount == 0:
508 534 cache = []
509 535 def f():
510 536 if len(cache) == 0:
511 537 cache.append(func())
512 538 return cache[0]
513 539 return f
514 540 cache = {}
515 541 if func.__code__.co_argcount == 1:
516 542 # we gain a small amount of time because
517 543 # we don't need to pack/unpack the list
518 544 def f(arg):
519 545 if arg not in cache:
520 546 cache[arg] = func(arg)
521 547 return cache[arg]
522 548 else:
523 549 def f(*args):
524 550 if args not in cache:
525 551 cache[args] = func(*args)
526 552 return cache[args]
527 553
528 554 return f
529 555
530 556 class sortdict(dict):
531 557 '''a simple sorted dictionary'''
532 558 def __init__(self, data=None):
533 559 self._list = []
534 560 if data:
535 561 self.update(data)
536 562 def copy(self):
537 563 return sortdict(self)
538 564 def __setitem__(self, key, val):
539 565 if key in self:
540 566 self._list.remove(key)
541 567 self._list.append(key)
542 568 dict.__setitem__(self, key, val)
543 569 def __iter__(self):
544 570 return self._list.__iter__()
545 571 def update(self, src):
546 572 if isinstance(src, dict):
547 573 src = src.iteritems()
548 574 for k, v in src:
549 575 self[k] = v
550 576 def clear(self):
551 577 dict.clear(self)
552 578 self._list = []
553 579 def items(self):
554 580 return [(k, self[k]) for k in self._list]
555 581 def __delitem__(self, key):
556 582 dict.__delitem__(self, key)
557 583 self._list.remove(key)
558 584 def pop(self, key, *args, **kwargs):
559 585 try:
560 586 self._list.remove(key)
561 587 except ValueError:
562 588 pass
563 589 return dict.pop(self, key, *args, **kwargs)
564 590 def keys(self):
565 591 return self._list[:]
566 592 def iterkeys(self):
567 593 return self._list.__iter__()
568 594 def iteritems(self):
569 595 for k in self._list:
570 596 yield k, self[k]
571 597 def insert(self, index, key, val):
572 598 self._list.insert(index, key)
573 599 dict.__setitem__(self, key, val)
574 600 def __repr__(self):
575 601 if not self:
576 602 return '%s()' % self.__class__.__name__
577 603 return '%s(%r)' % (self.__class__.__name__, self.items())
578 604
579 605 class _lrucachenode(object):
580 606 """A node in a doubly linked list.
581 607
582 608 Holds a reference to nodes on either side as well as a key-value
583 609 pair for the dictionary entry.
584 610 """
585 611 __slots__ = (u'next', u'prev', u'key', u'value')
586 612
587 613 def __init__(self):
588 614 self.next = None
589 615 self.prev = None
590 616
591 617 self.key = _notset
592 618 self.value = None
593 619
594 620 def markempty(self):
595 621 """Mark the node as emptied."""
596 622 self.key = _notset
597 623
598 624 class lrucachedict(object):
599 625 """Dict that caches most recent accesses and sets.
600 626
601 627 The dict consists of an actual backing dict - indexed by original
602 628 key - and a doubly linked circular list defining the order of entries in
603 629 the cache.
604 630
605 631 The head node is the newest entry in the cache. If the cache is full,
606 632 we recycle head.prev and make it the new head. Cache accesses result in
607 633 the node being moved to before the existing head and being marked as the
608 634 new head node.
609 635 """
610 636 def __init__(self, max):
611 637 self._cache = {}
612 638
613 639 self._head = head = _lrucachenode()
614 640 head.prev = head
615 641 head.next = head
616 642 self._size = 1
617 643 self._capacity = max
618 644
619 645 def __len__(self):
620 646 return len(self._cache)
621 647
622 648 def __contains__(self, k):
623 649 return k in self._cache
624 650
625 651 def __iter__(self):
626 652 # We don't have to iterate in cache order, but why not.
627 653 n = self._head
628 654 for i in range(len(self._cache)):
629 655 yield n.key
630 656 n = n.next
631 657
632 658 def __getitem__(self, k):
633 659 node = self._cache[k]
634 660 self._movetohead(node)
635 661 return node.value
636 662
637 663 def __setitem__(self, k, v):
638 664 node = self._cache.get(k)
639 665 # Replace existing value and mark as newest.
640 666 if node is not None:
641 667 node.value = v
642 668 self._movetohead(node)
643 669 return
644 670
645 671 if self._size < self._capacity:
646 672 node = self._addcapacity()
647 673 else:
648 674 # Grab the last/oldest item.
649 675 node = self._head.prev
650 676
651 677 # At capacity. Kill the old entry.
652 678 if node.key is not _notset:
653 679 del self._cache[node.key]
654 680
655 681 node.key = k
656 682 node.value = v
657 683 self._cache[k] = node
658 684 # And mark it as newest entry. No need to adjust order since it
659 685 # is already self._head.prev.
660 686 self._head = node
661 687
662 688 def __delitem__(self, k):
663 689 node = self._cache.pop(k)
664 690 node.markempty()
665 691
666 692 # Temporarily mark as newest item before re-adjusting head to make
667 693 # this node the oldest item.
668 694 self._movetohead(node)
669 695 self._head = node.next
670 696
671 697 # Additional dict methods.
672 698
673 699 def get(self, k, default=None):
674 700 try:
675 701 return self._cache[k].value
676 702 except KeyError:
677 703 return default
678 704
679 705 def clear(self):
680 706 n = self._head
681 707 while n.key is not _notset:
682 708 n.markempty()
683 709 n = n.next
684 710
685 711 self._cache.clear()
686 712
687 713 def copy(self):
688 714 result = lrucachedict(self._capacity)
689 715 n = self._head.prev
690 716 # Iterate in oldest-to-newest order, so the copy has the right ordering
691 717 for i in range(len(self._cache)):
692 718 result[n.key] = n.value
693 719 n = n.prev
694 720 return result
695 721
696 722 def _movetohead(self, node):
697 723 """Mark a node as the newest, making it the new head.
698 724
699 725 When a node is accessed, it becomes the freshest entry in the LRU
700 726 list, which is denoted by self._head.
701 727
702 728 Visually, let's make ``N`` the new head node (* denotes head):
703 729
704 730 previous/oldest <-> head <-> next/next newest
705 731
706 732 ----<->--- A* ---<->-----
707 733 | |
708 734 E <-> D <-> N <-> C <-> B
709 735
710 736 To:
711 737
712 738 ----<->--- N* ---<->-----
713 739 | |
714 740 E <-> D <-> C <-> B <-> A
715 741
716 742 This requires the following moves:
717 743
718 744 C.next = D (node.prev.next = node.next)
719 745 D.prev = C (node.next.prev = node.prev)
720 746 E.next = N (head.prev.next = node)
721 747 N.prev = E (node.prev = head.prev)
722 748 N.next = A (node.next = head)
723 749 A.prev = N (head.prev = node)
724 750 """
725 751 head = self._head
726 752 # C.next = D
727 753 node.prev.next = node.next
728 754 # D.prev = C
729 755 node.next.prev = node.prev
730 756 # N.prev = E
731 757 node.prev = head.prev
732 758 # N.next = A
733 759 # It is tempting to do just "head" here, however if node is
734 760 # adjacent to head, this will do bad things.
735 761 node.next = head.prev.next
736 762 # E.next = N
737 763 node.next.prev = node
738 764 # A.prev = N
739 765 node.prev.next = node
740 766
741 767 self._head = node
742 768
743 769 def _addcapacity(self):
744 770 """Add a node to the circular linked list.
745 771
746 772 The new node is inserted before the head node.
747 773 """
748 774 head = self._head
749 775 node = _lrucachenode()
750 776 head.prev.next = node
751 777 node.prev = head.prev
752 778 node.next = head
753 779 head.prev = node
754 780 self._size += 1
755 781 return node
756 782
757 783 def lrucachefunc(func):
758 784 '''cache most recent results of function calls'''
759 785 cache = {}
760 786 order = collections.deque()
761 787 if func.__code__.co_argcount == 1:
762 788 def f(arg):
763 789 if arg not in cache:
764 790 if len(cache) > 20:
765 791 del cache[order.popleft()]
766 792 cache[arg] = func(arg)
767 793 else:
768 794 order.remove(arg)
769 795 order.append(arg)
770 796 return cache[arg]
771 797 else:
772 798 def f(*args):
773 799 if args not in cache:
774 800 if len(cache) > 20:
775 801 del cache[order.popleft()]
776 802 cache[args] = func(*args)
777 803 else:
778 804 order.remove(args)
779 805 order.append(args)
780 806 return cache[args]
781 807
782 808 return f
783 809
784 810 class propertycache(object):
785 811 def __init__(self, func):
786 812 self.func = func
787 813 self.name = func.__name__
788 814 def __get__(self, obj, type=None):
789 815 result = self.func(obj)
790 816 self.cachevalue(obj, result)
791 817 return result
792 818
793 819 def cachevalue(self, obj, value):
794 820 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
795 821 obj.__dict__[self.name] = value
796 822
797 823 def pipefilter(s, cmd):
798 824 '''filter string S through command CMD, returning its output'''
799 825 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
800 826 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
801 827 pout, perr = p.communicate(s)
802 828 return pout
803 829
804 830 def tempfilter(s, cmd):
805 831 '''filter string S through a pair of temporary files with CMD.
806 832 CMD is used as a template to create the real command to be run,
807 833 with the strings INFILE and OUTFILE replaced by the real names of
808 834 the temporary files generated.'''
809 835 inname, outname = None, None
810 836 try:
811 837 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
812 838 fp = os.fdopen(infd, pycompat.sysstr('wb'))
813 839 fp.write(s)
814 840 fp.close()
815 841 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
816 842 os.close(outfd)
817 843 cmd = cmd.replace('INFILE', inname)
818 844 cmd = cmd.replace('OUTFILE', outname)
819 845 code = os.system(cmd)
820 846 if pycompat.sysplatform == 'OpenVMS' and code & 1:
821 847 code = 0
822 848 if code:
823 849 raise Abort(_("command '%s' failed: %s") %
824 850 (cmd, explainexit(code)))
825 851 return readfile(outname)
826 852 finally:
827 853 try:
828 854 if inname:
829 855 os.unlink(inname)
830 856 except OSError:
831 857 pass
832 858 try:
833 859 if outname:
834 860 os.unlink(outname)
835 861 except OSError:
836 862 pass
837 863
838 864 filtertable = {
839 865 'tempfile:': tempfilter,
840 866 'pipe:': pipefilter,
841 867 }
842 868
843 869 def filter(s, cmd):
844 870 "filter a string through a command that transforms its input to its output"
845 871 for name, fn in filtertable.iteritems():
846 872 if cmd.startswith(name):
847 873 return fn(s, cmd[len(name):].lstrip())
848 874 return pipefilter(s, cmd)
849 875
850 876 def binary(s):
851 877 """return true if a string is binary data"""
852 878 return bool(s and '\0' in s)
853 879
854 880 def increasingchunks(source, min=1024, max=65536):
855 881 '''return no less than min bytes per chunk while data remains,
856 882 doubling min after each chunk until it reaches max'''
857 883 def log2(x):
858 884 if not x:
859 885 return 0
860 886 i = 0
861 887 while x:
862 888 x >>= 1
863 889 i += 1
864 890 return i - 1
865 891
866 892 buf = []
867 893 blen = 0
868 894 for chunk in source:
869 895 buf.append(chunk)
870 896 blen += len(chunk)
871 897 if blen >= min:
872 898 if min < max:
873 899 min = min << 1
874 900 nmin = 1 << log2(blen)
875 901 if nmin > min:
876 902 min = nmin
877 903 if min > max:
878 904 min = max
879 905 yield ''.join(buf)
880 906 blen = 0
881 907 buf = []
882 908 if buf:
883 909 yield ''.join(buf)
884 910
885 911 Abort = error.Abort
886 912
887 913 def always(fn):
888 914 return True
889 915
890 916 def never(fn):
891 917 return False
892 918
893 919 def nogc(func):
894 920 """disable garbage collector
895 921
896 922 Python's garbage collector triggers a GC each time a certain number of
897 923 container objects (the number being defined by gc.get_threshold()) are
898 924 allocated even when marked not to be tracked by the collector. Tracking has
899 925 no effect on when GCs are triggered, only on what objects the GC looks
900 926 into. As a workaround, disable GC while building complex (huge)
901 927 containers.
902 928
903 929 This garbage collector issue have been fixed in 2.7.
904 930 """
905 931 if sys.version_info >= (2, 7):
906 932 return func
907 933 def wrapper(*args, **kwargs):
908 934 gcenabled = gc.isenabled()
909 935 gc.disable()
910 936 try:
911 937 return func(*args, **kwargs)
912 938 finally:
913 939 if gcenabled:
914 940 gc.enable()
915 941 return wrapper
916 942
917 943 def pathto(root, n1, n2):
918 944 '''return the relative path from one place to another.
919 945 root should use os.sep to separate directories
920 946 n1 should use os.sep to separate directories
921 947 n2 should use "/" to separate directories
922 948 returns an os.sep-separated path.
923 949
924 950 If n1 is a relative path, it's assumed it's
925 951 relative to root.
926 952 n2 should always be relative to root.
927 953 '''
928 954 if not n1:
929 955 return localpath(n2)
930 956 if os.path.isabs(n1):
931 957 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
932 958 return os.path.join(root, localpath(n2))
933 959 n2 = '/'.join((pconvert(root), n2))
934 960 a, b = splitpath(n1), n2.split('/')
935 961 a.reverse()
936 962 b.reverse()
937 963 while a and b and a[-1] == b[-1]:
938 964 a.pop()
939 965 b.pop()
940 966 b.reverse()
941 967 return pycompat.ossep.join((['..'] * len(a)) + b) or '.'
942 968
943 969 def mainfrozen():
944 970 """return True if we are a frozen executable.
945 971
946 972 The code supports py2exe (most common, Windows only) and tools/freeze
947 973 (portable, not much used).
948 974 """
949 975 return (safehasattr(sys, "frozen") or # new py2exe
950 976 safehasattr(sys, "importers") or # old py2exe
951 977 imp.is_frozen(u"__main__")) # tools/freeze
952 978
953 979 # the location of data files matching the source code
954 980 if mainfrozen() and getattr(sys, 'frozen', None) != 'macosx_app':
955 981 # executable version (py2exe) doesn't support __file__
956 982 datapath = os.path.dirname(pycompat.sysexecutable)
957 983 else:
958 984 datapath = os.path.dirname(pycompat.fsencode(__file__))
959 985
960 986 i18n.setdatapath(datapath)
961 987
962 988 _hgexecutable = None
963 989
964 990 def hgexecutable():
965 991 """return location of the 'hg' executable.
966 992
967 993 Defaults to $HG or 'hg' in the search path.
968 994 """
969 995 if _hgexecutable is None:
970 996 hg = encoding.environ.get('HG')
971 997 mainmod = sys.modules[pycompat.sysstr('__main__')]
972 998 if hg:
973 999 _sethgexecutable(hg)
974 1000 elif mainfrozen():
975 1001 if getattr(sys, 'frozen', None) == 'macosx_app':
976 1002 # Env variable set by py2app
977 1003 _sethgexecutable(encoding.environ['EXECUTABLEPATH'])
978 1004 else:
979 1005 _sethgexecutable(pycompat.sysexecutable)
980 1006 elif (os.path.basename(
981 1007 pycompat.fsencode(getattr(mainmod, '__file__', ''))) == 'hg'):
982 1008 _sethgexecutable(pycompat.fsencode(mainmod.__file__))
983 1009 else:
984 1010 exe = findexe('hg') or os.path.basename(sys.argv[0])
985 1011 _sethgexecutable(exe)
986 1012 return _hgexecutable
987 1013
988 1014 def _sethgexecutable(path):
989 1015 """set location of the 'hg' executable"""
990 1016 global _hgexecutable
991 1017 _hgexecutable = path
992 1018
993 1019 def _isstdout(f):
994 1020 fileno = getattr(f, 'fileno', None)
995 1021 return fileno and fileno() == sys.__stdout__.fileno()
996 1022
997 1023 def shellenviron(environ=None):
998 1024 """return environ with optional override, useful for shelling out"""
999 1025 def py2shell(val):
1000 1026 'convert python object into string that is useful to shell'
1001 1027 if val is None or val is False:
1002 1028 return '0'
1003 1029 if val is True:
1004 1030 return '1'
1005 1031 return str(val)
1006 1032 env = dict(encoding.environ)
1007 1033 if environ:
1008 1034 env.update((k, py2shell(v)) for k, v in environ.iteritems())
1009 1035 env['HG'] = hgexecutable()
1010 1036 return env
1011 1037
1012 1038 def system(cmd, environ=None, cwd=None, out=None):
1013 1039 '''enhanced shell command execution.
1014 1040 run with environment maybe modified, maybe in different dir.
1015 1041
1016 1042 if out is specified, it is assumed to be a file-like object that has a
1017 1043 write() method. stdout and stderr will be redirected to out.'''
1018 1044 try:
1019 1045 stdout.flush()
1020 1046 except Exception:
1021 1047 pass
1022 1048 cmd = quotecommand(cmd)
1023 1049 if pycompat.sysplatform == 'plan9' and (sys.version_info[0] == 2
1024 1050 and sys.version_info[1] < 7):
1025 1051 # subprocess kludge to work around issues in half-baked Python
1026 1052 # ports, notably bichued/python:
1027 1053 if not cwd is None:
1028 1054 os.chdir(cwd)
1029 1055 rc = os.system(cmd)
1030 1056 else:
1031 1057 env = shellenviron(environ)
1032 1058 if out is None or _isstdout(out):
1033 1059 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
1034 1060 env=env, cwd=cwd)
1035 1061 else:
1036 1062 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
1037 1063 env=env, cwd=cwd, stdout=subprocess.PIPE,
1038 1064 stderr=subprocess.STDOUT)
1039 1065 for line in iter(proc.stdout.readline, ''):
1040 1066 out.write(line)
1041 1067 proc.wait()
1042 1068 rc = proc.returncode
1043 1069 if pycompat.sysplatform == 'OpenVMS' and rc & 1:
1044 1070 rc = 0
1045 1071 return rc
1046 1072
1047 1073 def checksignature(func):
1048 1074 '''wrap a function with code to check for calling errors'''
1049 1075 def check(*args, **kwargs):
1050 1076 try:
1051 1077 return func(*args, **kwargs)
1052 1078 except TypeError:
1053 1079 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
1054 1080 raise error.SignatureError
1055 1081 raise
1056 1082
1057 1083 return check
1058 1084
1059 1085 # a whilelist of known filesystems where hardlink works reliably
1060 1086 _hardlinkfswhitelist = set([
1061 1087 'btrfs',
1062 1088 'ext2',
1063 1089 'ext3',
1064 1090 'ext4',
1065 1091 'hfs',
1066 1092 'jfs',
1067 1093 'reiserfs',
1068 1094 'tmpfs',
1069 1095 'ufs',
1070 1096 'xfs',
1071 1097 'zfs',
1072 1098 ])
1073 1099
1074 1100 def copyfile(src, dest, hardlink=False, copystat=False, checkambig=False):
1075 1101 '''copy a file, preserving mode and optionally other stat info like
1076 1102 atime/mtime
1077 1103
1078 1104 checkambig argument is used with filestat, and is useful only if
1079 1105 destination file is guarded by any lock (e.g. repo.lock or
1080 1106 repo.wlock).
1081 1107
1082 1108 copystat and checkambig should be exclusive.
1083 1109 '''
1084 1110 assert not (copystat and checkambig)
1085 1111 oldstat = None
1086 1112 if os.path.lexists(dest):
1087 1113 if checkambig:
1088 1114 oldstat = checkambig and filestat(dest)
1089 1115 unlink(dest)
1090 1116 if hardlink:
1091 1117 # Hardlinks are problematic on CIFS (issue4546), do not allow hardlinks
1092 1118 # unless we are confident that dest is on a whitelisted filesystem.
1093 1119 try:
1094 1120 fstype = getfstype(os.path.dirname(dest))
1095 1121 except OSError:
1096 1122 fstype = None
1097 1123 if fstype not in _hardlinkfswhitelist:
1098 1124 hardlink = False
1099 1125 if hardlink:
1100 1126 try:
1101 1127 oslink(src, dest)
1102 1128 return
1103 1129 except (IOError, OSError):
1104 1130 pass # fall back to normal copy
1105 1131 if os.path.islink(src):
1106 1132 os.symlink(os.readlink(src), dest)
1107 1133 # copytime is ignored for symlinks, but in general copytime isn't needed
1108 1134 # for them anyway
1109 1135 else:
1110 1136 try:
1111 1137 shutil.copyfile(src, dest)
1112 1138 if copystat:
1113 1139 # copystat also copies mode
1114 1140 shutil.copystat(src, dest)
1115 1141 else:
1116 1142 shutil.copymode(src, dest)
1117 1143 if oldstat and oldstat.stat:
1118 1144 newstat = filestat(dest)
1119 1145 if newstat.isambig(oldstat):
1120 1146 # stat of copied file is ambiguous to original one
1121 1147 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1122 1148 os.utime(dest, (advanced, advanced))
1123 1149 except shutil.Error as inst:
1124 1150 raise Abort(str(inst))
1125 1151
1126 1152 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
1127 1153 """Copy a directory tree using hardlinks if possible."""
1128 1154 num = 0
1129 1155
1130 1156 gettopic = lambda: hardlink and _('linking') or _('copying')
1131 1157
1132 1158 if os.path.isdir(src):
1133 1159 if hardlink is None:
1134 1160 hardlink = (os.stat(src).st_dev ==
1135 1161 os.stat(os.path.dirname(dst)).st_dev)
1136 1162 topic = gettopic()
1137 1163 os.mkdir(dst)
1138 1164 for name, kind in osutil.listdir(src):
1139 1165 srcname = os.path.join(src, name)
1140 1166 dstname = os.path.join(dst, name)
1141 1167 def nprog(t, pos):
1142 1168 if pos is not None:
1143 1169 return progress(t, pos + num)
1144 1170 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
1145 1171 num += n
1146 1172 else:
1147 1173 if hardlink is None:
1148 1174 hardlink = (os.stat(os.path.dirname(src)).st_dev ==
1149 1175 os.stat(os.path.dirname(dst)).st_dev)
1150 1176 topic = gettopic()
1151 1177
1152 1178 if hardlink:
1153 1179 try:
1154 1180 oslink(src, dst)
1155 1181 except (IOError, OSError):
1156 1182 hardlink = False
1157 1183 shutil.copy(src, dst)
1158 1184 else:
1159 1185 shutil.copy(src, dst)
1160 1186 num += 1
1161 1187 progress(topic, num)
1162 1188 progress(topic, None)
1163 1189
1164 1190 return hardlink, num
1165 1191
1166 1192 _winreservednames = '''con prn aux nul
1167 1193 com1 com2 com3 com4 com5 com6 com7 com8 com9
1168 1194 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
1169 1195 _winreservedchars = ':*?"<>|'
1170 1196 def checkwinfilename(path):
1171 1197 r'''Check that the base-relative path is a valid filename on Windows.
1172 1198 Returns None if the path is ok, or a UI string describing the problem.
1173 1199
1174 1200 >>> checkwinfilename("just/a/normal/path")
1175 1201 >>> checkwinfilename("foo/bar/con.xml")
1176 1202 "filename contains 'con', which is reserved on Windows"
1177 1203 >>> checkwinfilename("foo/con.xml/bar")
1178 1204 "filename contains 'con', which is reserved on Windows"
1179 1205 >>> checkwinfilename("foo/bar/xml.con")
1180 1206 >>> checkwinfilename("foo/bar/AUX/bla.txt")
1181 1207 "filename contains 'AUX', which is reserved on Windows"
1182 1208 >>> checkwinfilename("foo/bar/bla:.txt")
1183 1209 "filename contains ':', which is reserved on Windows"
1184 1210 >>> checkwinfilename("foo/bar/b\07la.txt")
1185 1211 "filename contains '\\x07', which is invalid on Windows"
1186 1212 >>> checkwinfilename("foo/bar/bla ")
1187 1213 "filename ends with ' ', which is not allowed on Windows"
1188 1214 >>> checkwinfilename("../bar")
1189 1215 >>> checkwinfilename("foo\\")
1190 1216 "filename ends with '\\', which is invalid on Windows"
1191 1217 >>> checkwinfilename("foo\\/bar")
1192 1218 "directory name ends with '\\', which is invalid on Windows"
1193 1219 '''
1194 1220 if path.endswith('\\'):
1195 1221 return _("filename ends with '\\', which is invalid on Windows")
1196 1222 if '\\/' in path:
1197 1223 return _("directory name ends with '\\', which is invalid on Windows")
1198 1224 for n in path.replace('\\', '/').split('/'):
1199 1225 if not n:
1200 1226 continue
1201 1227 for c in pycompat.bytestr(n):
1202 1228 if c in _winreservedchars:
1203 1229 return _("filename contains '%s', which is reserved "
1204 1230 "on Windows") % c
1205 1231 if ord(c) <= 31:
1206 1232 return _("filename contains %r, which is invalid "
1207 1233 "on Windows") % c
1208 1234 base = n.split('.')[0]
1209 1235 if base and base.lower() in _winreservednames:
1210 1236 return _("filename contains '%s', which is reserved "
1211 1237 "on Windows") % base
1212 1238 t = n[-1]
1213 1239 if t in '. ' and n not in '..':
1214 1240 return _("filename ends with '%s', which is not allowed "
1215 1241 "on Windows") % t
1216 1242
1217 1243 if pycompat.osname == 'nt':
1218 1244 checkosfilename = checkwinfilename
1219 1245 timer = time.clock
1220 1246 else:
1221 1247 checkosfilename = platform.checkosfilename
1222 1248 timer = time.time
1223 1249
1224 1250 if safehasattr(time, "perf_counter"):
1225 1251 timer = time.perf_counter
1226 1252
1227 1253 def makelock(info, pathname):
1228 1254 try:
1229 1255 return os.symlink(info, pathname)
1230 1256 except OSError as why:
1231 1257 if why.errno == errno.EEXIST:
1232 1258 raise
1233 1259 except AttributeError: # no symlink in os
1234 1260 pass
1235 1261
1236 1262 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
1237 1263 os.write(ld, info)
1238 1264 os.close(ld)
1239 1265
1240 1266 def readlock(pathname):
1241 1267 try:
1242 1268 return os.readlink(pathname)
1243 1269 except OSError as why:
1244 1270 if why.errno not in (errno.EINVAL, errno.ENOSYS):
1245 1271 raise
1246 1272 except AttributeError: # no symlink in os
1247 1273 pass
1248 1274 fp = posixfile(pathname)
1249 1275 r = fp.read()
1250 1276 fp.close()
1251 1277 return r
1252 1278
1253 1279 def fstat(fp):
1254 1280 '''stat file object that may not have fileno method.'''
1255 1281 try:
1256 1282 return os.fstat(fp.fileno())
1257 1283 except AttributeError:
1258 1284 return os.stat(fp.name)
1259 1285
1260 1286 # File system features
1261 1287
1262 1288 def fscasesensitive(path):
1263 1289 """
1264 1290 Return true if the given path is on a case-sensitive filesystem
1265 1291
1266 1292 Requires a path (like /foo/.hg) ending with a foldable final
1267 1293 directory component.
1268 1294 """
1269 1295 s1 = os.lstat(path)
1270 1296 d, b = os.path.split(path)
1271 1297 b2 = b.upper()
1272 1298 if b == b2:
1273 1299 b2 = b.lower()
1274 1300 if b == b2:
1275 1301 return True # no evidence against case sensitivity
1276 1302 p2 = os.path.join(d, b2)
1277 1303 try:
1278 1304 s2 = os.lstat(p2)
1279 1305 if s2 == s1:
1280 1306 return False
1281 1307 return True
1282 1308 except OSError:
1283 1309 return True
1284 1310
1285 1311 try:
1286 1312 import re2
1287 1313 _re2 = None
1288 1314 except ImportError:
1289 1315 _re2 = False
1290 1316
1291 1317 class _re(object):
1292 1318 def _checkre2(self):
1293 1319 global _re2
1294 1320 try:
1295 1321 # check if match works, see issue3964
1296 1322 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1297 1323 except ImportError:
1298 1324 _re2 = False
1299 1325
1300 1326 def compile(self, pat, flags=0):
1301 1327 '''Compile a regular expression, using re2 if possible
1302 1328
1303 1329 For best performance, use only re2-compatible regexp features. The
1304 1330 only flags from the re module that are re2-compatible are
1305 1331 IGNORECASE and MULTILINE.'''
1306 1332 if _re2 is None:
1307 1333 self._checkre2()
1308 1334 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1309 1335 if flags & remod.IGNORECASE:
1310 1336 pat = '(?i)' + pat
1311 1337 if flags & remod.MULTILINE:
1312 1338 pat = '(?m)' + pat
1313 1339 try:
1314 1340 return re2.compile(pat)
1315 1341 except re2.error:
1316 1342 pass
1317 1343 return remod.compile(pat, flags)
1318 1344
1319 1345 @propertycache
1320 1346 def escape(self):
1321 1347 '''Return the version of escape corresponding to self.compile.
1322 1348
1323 1349 This is imperfect because whether re2 or re is used for a particular
1324 1350 function depends on the flags, etc, but it's the best we can do.
1325 1351 '''
1326 1352 global _re2
1327 1353 if _re2 is None:
1328 1354 self._checkre2()
1329 1355 if _re2:
1330 1356 return re2.escape
1331 1357 else:
1332 1358 return remod.escape
1333 1359
1334 1360 re = _re()
1335 1361
1336 1362 _fspathcache = {}
1337 1363 def fspath(name, root):
1338 1364 '''Get name in the case stored in the filesystem
1339 1365
1340 1366 The name should be relative to root, and be normcase-ed for efficiency.
1341 1367
1342 1368 Note that this function is unnecessary, and should not be
1343 1369 called, for case-sensitive filesystems (simply because it's expensive).
1344 1370
1345 1371 The root should be normcase-ed, too.
1346 1372 '''
1347 1373 def _makefspathcacheentry(dir):
1348 1374 return dict((normcase(n), n) for n in os.listdir(dir))
1349 1375
1350 1376 seps = pycompat.ossep
1351 1377 if pycompat.osaltsep:
1352 1378 seps = seps + pycompat.osaltsep
1353 1379 # Protect backslashes. This gets silly very quickly.
1354 1380 seps.replace('\\','\\\\')
1355 1381 pattern = remod.compile(br'([^%s]+)|([%s]+)' % (seps, seps))
1356 1382 dir = os.path.normpath(root)
1357 1383 result = []
1358 1384 for part, sep in pattern.findall(name):
1359 1385 if sep:
1360 1386 result.append(sep)
1361 1387 continue
1362 1388
1363 1389 if dir not in _fspathcache:
1364 1390 _fspathcache[dir] = _makefspathcacheentry(dir)
1365 1391 contents = _fspathcache[dir]
1366 1392
1367 1393 found = contents.get(part)
1368 1394 if not found:
1369 1395 # retry "once per directory" per "dirstate.walk" which
1370 1396 # may take place for each patches of "hg qpush", for example
1371 1397 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1372 1398 found = contents.get(part)
1373 1399
1374 1400 result.append(found or part)
1375 1401 dir = os.path.join(dir, part)
1376 1402
1377 1403 return ''.join(result)
1378 1404
1379 1405 def getfstype(dirpath):
1380 1406 '''Get the filesystem type name from a directory (best-effort)
1381 1407
1382 1408 Returns None if we are unsure. Raises OSError on ENOENT, EPERM, etc.
1383 1409 '''
1384 1410 return getattr(osutil, 'getfstype', lambda x: None)(dirpath)
1385 1411
1386 1412 def checknlink(testfile):
1387 1413 '''check whether hardlink count reporting works properly'''
1388 1414
1389 1415 # testfile may be open, so we need a separate file for checking to
1390 1416 # work around issue2543 (or testfile may get lost on Samba shares)
1391 1417 f1 = testfile + ".hgtmp1"
1392 1418 if os.path.lexists(f1):
1393 1419 return False
1394 1420 try:
1395 1421 posixfile(f1, 'w').close()
1396 1422 except IOError:
1397 1423 try:
1398 1424 os.unlink(f1)
1399 1425 except OSError:
1400 1426 pass
1401 1427 return False
1402 1428
1403 1429 f2 = testfile + ".hgtmp2"
1404 1430 fd = None
1405 1431 try:
1406 1432 oslink(f1, f2)
1407 1433 # nlinks() may behave differently for files on Windows shares if
1408 1434 # the file is open.
1409 1435 fd = posixfile(f2)
1410 1436 return nlinks(f2) > 1
1411 1437 except OSError:
1412 1438 return False
1413 1439 finally:
1414 1440 if fd is not None:
1415 1441 fd.close()
1416 1442 for f in (f1, f2):
1417 1443 try:
1418 1444 os.unlink(f)
1419 1445 except OSError:
1420 1446 pass
1421 1447
1422 1448 def endswithsep(path):
1423 1449 '''Check path ends with os.sep or os.altsep.'''
1424 1450 return (path.endswith(pycompat.ossep)
1425 1451 or pycompat.osaltsep and path.endswith(pycompat.osaltsep))
1426 1452
1427 1453 def splitpath(path):
1428 1454 '''Split path by os.sep.
1429 1455 Note that this function does not use os.altsep because this is
1430 1456 an alternative of simple "xxx.split(os.sep)".
1431 1457 It is recommended to use os.path.normpath() before using this
1432 1458 function if need.'''
1433 1459 return path.split(pycompat.ossep)
1434 1460
1435 1461 def gui():
1436 1462 '''Are we running in a GUI?'''
1437 1463 if pycompat.sysplatform == 'darwin':
1438 1464 if 'SSH_CONNECTION' in encoding.environ:
1439 1465 # handle SSH access to a box where the user is logged in
1440 1466 return False
1441 1467 elif getattr(osutil, 'isgui', None):
1442 1468 # check if a CoreGraphics session is available
1443 1469 return osutil.isgui()
1444 1470 else:
1445 1471 # pure build; use a safe default
1446 1472 return True
1447 1473 else:
1448 1474 return pycompat.osname == "nt" or encoding.environ.get("DISPLAY")
1449 1475
1450 1476 def mktempcopy(name, emptyok=False, createmode=None):
1451 1477 """Create a temporary file with the same contents from name
1452 1478
1453 1479 The permission bits are copied from the original file.
1454 1480
1455 1481 If the temporary file is going to be truncated immediately, you
1456 1482 can use emptyok=True as an optimization.
1457 1483
1458 1484 Returns the name of the temporary file.
1459 1485 """
1460 1486 d, fn = os.path.split(name)
1461 1487 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1462 1488 os.close(fd)
1463 1489 # Temporary files are created with mode 0600, which is usually not
1464 1490 # what we want. If the original file already exists, just copy
1465 1491 # its mode. Otherwise, manually obey umask.
1466 1492 copymode(name, temp, createmode)
1467 1493 if emptyok:
1468 1494 return temp
1469 1495 try:
1470 1496 try:
1471 1497 ifp = posixfile(name, "rb")
1472 1498 except IOError as inst:
1473 1499 if inst.errno == errno.ENOENT:
1474 1500 return temp
1475 1501 if not getattr(inst, 'filename', None):
1476 1502 inst.filename = name
1477 1503 raise
1478 1504 ofp = posixfile(temp, "wb")
1479 1505 for chunk in filechunkiter(ifp):
1480 1506 ofp.write(chunk)
1481 1507 ifp.close()
1482 1508 ofp.close()
1483 1509 except: # re-raises
1484 1510 try: os.unlink(temp)
1485 1511 except OSError: pass
1486 1512 raise
1487 1513 return temp
1488 1514
1489 1515 class filestat(object):
1490 1516 """help to exactly detect change of a file
1491 1517
1492 1518 'stat' attribute is result of 'os.stat()' if specified 'path'
1493 1519 exists. Otherwise, it is None. This can avoid preparative
1494 1520 'exists()' examination on client side of this class.
1495 1521 """
1496 1522 def __init__(self, path):
1497 1523 try:
1498 1524 self.stat = os.stat(path)
1499 1525 except OSError as err:
1500 1526 if err.errno != errno.ENOENT:
1501 1527 raise
1502 1528 self.stat = None
1503 1529
1504 1530 __hash__ = object.__hash__
1505 1531
1506 1532 def __eq__(self, old):
1507 1533 try:
1508 1534 # if ambiguity between stat of new and old file is
1509 1535 # avoided, comparison of size, ctime and mtime is enough
1510 1536 # to exactly detect change of a file regardless of platform
1511 1537 return (self.stat.st_size == old.stat.st_size and
1512 1538 self.stat.st_ctime == old.stat.st_ctime and
1513 1539 self.stat.st_mtime == old.stat.st_mtime)
1514 1540 except AttributeError:
1515 1541 return False
1516 1542
1517 1543 def isambig(self, old):
1518 1544 """Examine whether new (= self) stat is ambiguous against old one
1519 1545
1520 1546 "S[N]" below means stat of a file at N-th change:
1521 1547
1522 1548 - S[n-1].ctime < S[n].ctime: can detect change of a file
1523 1549 - S[n-1].ctime == S[n].ctime
1524 1550 - S[n-1].ctime < S[n].mtime: means natural advancing (*1)
1525 1551 - S[n-1].ctime == S[n].mtime: is ambiguous (*2)
1526 1552 - S[n-1].ctime > S[n].mtime: never occurs naturally (don't care)
1527 1553 - S[n-1].ctime > S[n].ctime: never occurs naturally (don't care)
1528 1554
1529 1555 Case (*2) above means that a file was changed twice or more at
1530 1556 same time in sec (= S[n-1].ctime), and comparison of timestamp
1531 1557 is ambiguous.
1532 1558
1533 1559 Base idea to avoid such ambiguity is "advance mtime 1 sec, if
1534 1560 timestamp is ambiguous".
1535 1561
1536 1562 But advancing mtime only in case (*2) doesn't work as
1537 1563 expected, because naturally advanced S[n].mtime in case (*1)
1538 1564 might be equal to manually advanced S[n-1 or earlier].mtime.
1539 1565
1540 1566 Therefore, all "S[n-1].ctime == S[n].ctime" cases should be
1541 1567 treated as ambiguous regardless of mtime, to avoid overlooking
1542 1568 by confliction between such mtime.
1543 1569
1544 1570 Advancing mtime "if isambig(oldstat)" ensures "S[n-1].mtime !=
1545 1571 S[n].mtime", even if size of a file isn't changed.
1546 1572 """
1547 1573 try:
1548 1574 return (self.stat.st_ctime == old.stat.st_ctime)
1549 1575 except AttributeError:
1550 1576 return False
1551 1577
1552 1578 def avoidambig(self, path, old):
1553 1579 """Change file stat of specified path to avoid ambiguity
1554 1580
1555 1581 'old' should be previous filestat of 'path'.
1556 1582
1557 1583 This skips avoiding ambiguity, if a process doesn't have
1558 1584 appropriate privileges for 'path'.
1559 1585 """
1560 1586 advanced = (old.stat.st_mtime + 1) & 0x7fffffff
1561 1587 try:
1562 1588 os.utime(path, (advanced, advanced))
1563 1589 except OSError as inst:
1564 1590 if inst.errno == errno.EPERM:
1565 1591 # utime() on the file created by another user causes EPERM,
1566 1592 # if a process doesn't have appropriate privileges
1567 1593 return
1568 1594 raise
1569 1595
1570 1596 def __ne__(self, other):
1571 1597 return not self == other
1572 1598
1573 1599 class atomictempfile(object):
1574 1600 '''writable file object that atomically updates a file
1575 1601
1576 1602 All writes will go to a temporary copy of the original file. Call
1577 1603 close() when you are done writing, and atomictempfile will rename
1578 1604 the temporary copy to the original name, making the changes
1579 1605 visible. If the object is destroyed without being closed, all your
1580 1606 writes are discarded.
1581 1607
1582 1608 checkambig argument of constructor is used with filestat, and is
1583 1609 useful only if target file is guarded by any lock (e.g. repo.lock
1584 1610 or repo.wlock).
1585 1611 '''
1586 1612 def __init__(self, name, mode='w+b', createmode=None, checkambig=False):
1587 1613 self.__name = name # permanent name
1588 1614 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1589 1615 createmode=createmode)
1590 1616 self._fp = posixfile(self._tempname, mode)
1591 1617 self._checkambig = checkambig
1592 1618
1593 1619 # delegated methods
1594 1620 self.read = self._fp.read
1595 1621 self.write = self._fp.write
1596 1622 self.seek = self._fp.seek
1597 1623 self.tell = self._fp.tell
1598 1624 self.fileno = self._fp.fileno
1599 1625
1600 1626 def close(self):
1601 1627 if not self._fp.closed:
1602 1628 self._fp.close()
1603 1629 filename = localpath(self.__name)
1604 1630 oldstat = self._checkambig and filestat(filename)
1605 1631 if oldstat and oldstat.stat:
1606 1632 rename(self._tempname, filename)
1607 1633 newstat = filestat(filename)
1608 1634 if newstat.isambig(oldstat):
1609 1635 # stat of changed file is ambiguous to original one
1610 1636 advanced = (oldstat.stat.st_mtime + 1) & 0x7fffffff
1611 1637 os.utime(filename, (advanced, advanced))
1612 1638 else:
1613 1639 rename(self._tempname, filename)
1614 1640
1615 1641 def discard(self):
1616 1642 if not self._fp.closed:
1617 1643 try:
1618 1644 os.unlink(self._tempname)
1619 1645 except OSError:
1620 1646 pass
1621 1647 self._fp.close()
1622 1648
1623 1649 def __del__(self):
1624 1650 if safehasattr(self, '_fp'): # constructor actually did something
1625 1651 self.discard()
1626 1652
1627 1653 def __enter__(self):
1628 1654 return self
1629 1655
1630 1656 def __exit__(self, exctype, excvalue, traceback):
1631 1657 if exctype is not None:
1632 1658 self.discard()
1633 1659 else:
1634 1660 self.close()
1635 1661
1636 1662 def unlinkpath(f, ignoremissing=False):
1637 1663 """unlink and remove the directory if it is empty"""
1638 1664 if ignoremissing:
1639 1665 tryunlink(f)
1640 1666 else:
1641 1667 unlink(f)
1642 1668 # try removing directories that might now be empty
1643 1669 try:
1644 1670 removedirs(os.path.dirname(f))
1645 1671 except OSError:
1646 1672 pass
1647 1673
1648 1674 def tryunlink(f):
1649 1675 """Attempt to remove a file, ignoring ENOENT errors."""
1650 1676 try:
1651 1677 unlink(f)
1652 1678 except OSError as e:
1653 1679 if e.errno != errno.ENOENT:
1654 1680 raise
1655 1681
1656 1682 def makedirs(name, mode=None, notindexed=False):
1657 1683 """recursive directory creation with parent mode inheritance
1658 1684
1659 1685 Newly created directories are marked as "not to be indexed by
1660 1686 the content indexing service", if ``notindexed`` is specified
1661 1687 for "write" mode access.
1662 1688 """
1663 1689 try:
1664 1690 makedir(name, notindexed)
1665 1691 except OSError as err:
1666 1692 if err.errno == errno.EEXIST:
1667 1693 return
1668 1694 if err.errno != errno.ENOENT or not name:
1669 1695 raise
1670 1696 parent = os.path.dirname(os.path.abspath(name))
1671 1697 if parent == name:
1672 1698 raise
1673 1699 makedirs(parent, mode, notindexed)
1674 1700 try:
1675 1701 makedir(name, notindexed)
1676 1702 except OSError as err:
1677 1703 # Catch EEXIST to handle races
1678 1704 if err.errno == errno.EEXIST:
1679 1705 return
1680 1706 raise
1681 1707 if mode is not None:
1682 1708 os.chmod(name, mode)
1683 1709
1684 1710 def readfile(path):
1685 1711 with open(path, 'rb') as fp:
1686 1712 return fp.read()
1687 1713
1688 1714 def writefile(path, text):
1689 1715 with open(path, 'wb') as fp:
1690 1716 fp.write(text)
1691 1717
1692 1718 def appendfile(path, text):
1693 1719 with open(path, 'ab') as fp:
1694 1720 fp.write(text)
1695 1721
1696 1722 class chunkbuffer(object):
1697 1723 """Allow arbitrary sized chunks of data to be efficiently read from an
1698 1724 iterator over chunks of arbitrary size."""
1699 1725
1700 1726 def __init__(self, in_iter):
1701 1727 """in_iter is the iterator that's iterating over the input chunks.
1702 1728 targetsize is how big a buffer to try to maintain."""
1703 1729 def splitbig(chunks):
1704 1730 for chunk in chunks:
1705 1731 if len(chunk) > 2**20:
1706 1732 pos = 0
1707 1733 while pos < len(chunk):
1708 1734 end = pos + 2 ** 18
1709 1735 yield chunk[pos:end]
1710 1736 pos = end
1711 1737 else:
1712 1738 yield chunk
1713 1739 self.iter = splitbig(in_iter)
1714 1740 self._queue = collections.deque()
1715 1741 self._chunkoffset = 0
1716 1742
1717 1743 def read(self, l=None):
1718 1744 """Read L bytes of data from the iterator of chunks of data.
1719 1745 Returns less than L bytes if the iterator runs dry.
1720 1746
1721 1747 If size parameter is omitted, read everything"""
1722 1748 if l is None:
1723 1749 return ''.join(self.iter)
1724 1750
1725 1751 left = l
1726 1752 buf = []
1727 1753 queue = self._queue
1728 1754 while left > 0:
1729 1755 # refill the queue
1730 1756 if not queue:
1731 1757 target = 2**18
1732 1758 for chunk in self.iter:
1733 1759 queue.append(chunk)
1734 1760 target -= len(chunk)
1735 1761 if target <= 0:
1736 1762 break
1737 1763 if not queue:
1738 1764 break
1739 1765
1740 1766 # The easy way to do this would be to queue.popleft(), modify the
1741 1767 # chunk (if necessary), then queue.appendleft(). However, for cases
1742 1768 # where we read partial chunk content, this incurs 2 dequeue
1743 1769 # mutations and creates a new str for the remaining chunk in the
1744 1770 # queue. Our code below avoids this overhead.
1745 1771
1746 1772 chunk = queue[0]
1747 1773 chunkl = len(chunk)
1748 1774 offset = self._chunkoffset
1749 1775
1750 1776 # Use full chunk.
1751 1777 if offset == 0 and left >= chunkl:
1752 1778 left -= chunkl
1753 1779 queue.popleft()
1754 1780 buf.append(chunk)
1755 1781 # self._chunkoffset remains at 0.
1756 1782 continue
1757 1783
1758 1784 chunkremaining = chunkl - offset
1759 1785
1760 1786 # Use all of unconsumed part of chunk.
1761 1787 if left >= chunkremaining:
1762 1788 left -= chunkremaining
1763 1789 queue.popleft()
1764 1790 # offset == 0 is enabled by block above, so this won't merely
1765 1791 # copy via ``chunk[0:]``.
1766 1792 buf.append(chunk[offset:])
1767 1793 self._chunkoffset = 0
1768 1794
1769 1795 # Partial chunk needed.
1770 1796 else:
1771 1797 buf.append(chunk[offset:offset + left])
1772 1798 self._chunkoffset += left
1773 1799 left -= chunkremaining
1774 1800
1775 1801 return ''.join(buf)
1776 1802
1777 1803 def filechunkiter(f, size=131072, limit=None):
1778 1804 """Create a generator that produces the data in the file size
1779 1805 (default 131072) bytes at a time, up to optional limit (default is
1780 1806 to read all data). Chunks may be less than size bytes if the
1781 1807 chunk is the last chunk in the file, or the file is a socket or
1782 1808 some other type of file that sometimes reads less data than is
1783 1809 requested."""
1784 1810 assert size >= 0
1785 1811 assert limit is None or limit >= 0
1786 1812 while True:
1787 1813 if limit is None:
1788 1814 nbytes = size
1789 1815 else:
1790 1816 nbytes = min(limit, size)
1791 1817 s = nbytes and f.read(nbytes)
1792 1818 if not s:
1793 1819 break
1794 1820 if limit:
1795 1821 limit -= len(s)
1796 1822 yield s
1797 1823
1798 1824 def makedate(timestamp=None):
1799 1825 '''Return a unix timestamp (or the current time) as a (unixtime,
1800 1826 offset) tuple based off the local timezone.'''
1801 1827 if timestamp is None:
1802 1828 timestamp = time.time()
1803 1829 if timestamp < 0:
1804 1830 hint = _("check your clock")
1805 1831 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1806 1832 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1807 1833 datetime.datetime.fromtimestamp(timestamp))
1808 1834 tz = delta.days * 86400 + delta.seconds
1809 1835 return timestamp, tz
1810 1836
1811 1837 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1812 1838 """represent a (unixtime, offset) tuple as a localized time.
1813 1839 unixtime is seconds since the epoch, and offset is the time zone's
1814 1840 number of seconds away from UTC.
1815 1841
1816 1842 >>> datestr((0, 0))
1817 1843 'Thu Jan 01 00:00:00 1970 +0000'
1818 1844 >>> datestr((42, 0))
1819 1845 'Thu Jan 01 00:00:42 1970 +0000'
1820 1846 >>> datestr((-42, 0))
1821 1847 'Wed Dec 31 23:59:18 1969 +0000'
1822 1848 >>> datestr((0x7fffffff, 0))
1823 1849 'Tue Jan 19 03:14:07 2038 +0000'
1824 1850 >>> datestr((-0x80000000, 0))
1825 1851 'Fri Dec 13 20:45:52 1901 +0000'
1826 1852 """
1827 1853 t, tz = date or makedate()
1828 1854 if "%1" in format or "%2" in format or "%z" in format:
1829 1855 sign = (tz > 0) and "-" or "+"
1830 1856 minutes = abs(tz) // 60
1831 1857 q, r = divmod(minutes, 60)
1832 1858 format = format.replace("%z", "%1%2")
1833 1859 format = format.replace("%1", "%c%02d" % (sign, q))
1834 1860 format = format.replace("%2", "%02d" % r)
1835 1861 d = t - tz
1836 1862 if d > 0x7fffffff:
1837 1863 d = 0x7fffffff
1838 1864 elif d < -0x80000000:
1839 1865 d = -0x80000000
1840 1866 # Never use time.gmtime() and datetime.datetime.fromtimestamp()
1841 1867 # because they use the gmtime() system call which is buggy on Windows
1842 1868 # for negative values.
1843 1869 t = datetime.datetime(1970, 1, 1) + datetime.timedelta(seconds=d)
1844 1870 s = encoding.strtolocal(t.strftime(encoding.strfromlocal(format)))
1845 1871 return s
1846 1872
1847 1873 def shortdate(date=None):
1848 1874 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1849 1875 return datestr(date, format='%Y-%m-%d')
1850 1876
1851 1877 def parsetimezone(s):
1852 1878 """find a trailing timezone, if any, in string, and return a
1853 1879 (offset, remainder) pair"""
1854 1880
1855 1881 if s.endswith("GMT") or s.endswith("UTC"):
1856 1882 return 0, s[:-3].rstrip()
1857 1883
1858 1884 # Unix-style timezones [+-]hhmm
1859 1885 if len(s) >= 5 and s[-5] in "+-" and s[-4:].isdigit():
1860 1886 sign = (s[-5] == "+") and 1 or -1
1861 1887 hours = int(s[-4:-2])
1862 1888 minutes = int(s[-2:])
1863 1889 return -sign * (hours * 60 + minutes) * 60, s[:-5].rstrip()
1864 1890
1865 1891 # ISO8601 trailing Z
1866 1892 if s.endswith("Z") and s[-2:-1].isdigit():
1867 1893 return 0, s[:-1]
1868 1894
1869 1895 # ISO8601-style [+-]hh:mm
1870 1896 if (len(s) >= 6 and s[-6] in "+-" and s[-3] == ":" and
1871 1897 s[-5:-3].isdigit() and s[-2:].isdigit()):
1872 1898 sign = (s[-6] == "+") and 1 or -1
1873 1899 hours = int(s[-5:-3])
1874 1900 minutes = int(s[-2:])
1875 1901 return -sign * (hours * 60 + minutes) * 60, s[:-6]
1876 1902
1877 1903 return None, s
1878 1904
1879 1905 def strdate(string, format, defaults=None):
1880 1906 """parse a localized time string and return a (unixtime, offset) tuple.
1881 1907 if the string cannot be parsed, ValueError is raised."""
1882 1908 if defaults is None:
1883 1909 defaults = {}
1884 1910
1885 1911 # NOTE: unixtime = localunixtime + offset
1886 1912 offset, date = parsetimezone(string)
1887 1913
1888 1914 # add missing elements from defaults
1889 1915 usenow = False # default to using biased defaults
1890 1916 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1891 1917 found = [True for p in part if ("%"+p) in format]
1892 1918 if not found:
1893 1919 date += "@" + defaults[part][usenow]
1894 1920 format += "@%" + part[0]
1895 1921 else:
1896 1922 # We've found a specific time element, less specific time
1897 1923 # elements are relative to today
1898 1924 usenow = True
1899 1925
1900 1926 timetuple = time.strptime(date, format)
1901 1927 localunixtime = int(calendar.timegm(timetuple))
1902 1928 if offset is None:
1903 1929 # local timezone
1904 1930 unixtime = int(time.mktime(timetuple))
1905 1931 offset = unixtime - localunixtime
1906 1932 else:
1907 1933 unixtime = localunixtime + offset
1908 1934 return unixtime, offset
1909 1935
1910 1936 def parsedate(date, formats=None, bias=None):
1911 1937 """parse a localized date/time and return a (unixtime, offset) tuple.
1912 1938
1913 1939 The date may be a "unixtime offset" string or in one of the specified
1914 1940 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1915 1941
1916 1942 >>> parsedate(' today ') == parsedate(\
1917 1943 datetime.date.today().strftime('%b %d'))
1918 1944 True
1919 1945 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1920 1946 datetime.timedelta(days=1)\
1921 1947 ).strftime('%b %d'))
1922 1948 True
1923 1949 >>> now, tz = makedate()
1924 1950 >>> strnow, strtz = parsedate('now')
1925 1951 >>> (strnow - now) < 1
1926 1952 True
1927 1953 >>> tz == strtz
1928 1954 True
1929 1955 """
1930 1956 if bias is None:
1931 1957 bias = {}
1932 1958 if not date:
1933 1959 return 0, 0
1934 1960 if isinstance(date, tuple) and len(date) == 2:
1935 1961 return date
1936 1962 if not formats:
1937 1963 formats = defaultdateformats
1938 1964 date = date.strip()
1939 1965
1940 1966 if date == 'now' or date == _('now'):
1941 1967 return makedate()
1942 1968 if date == 'today' or date == _('today'):
1943 1969 date = datetime.date.today().strftime('%b %d')
1944 1970 elif date == 'yesterday' or date == _('yesterday'):
1945 1971 date = (datetime.date.today() -
1946 1972 datetime.timedelta(days=1)).strftime('%b %d')
1947 1973
1948 1974 try:
1949 1975 when, offset = map(int, date.split(' '))
1950 1976 except ValueError:
1951 1977 # fill out defaults
1952 1978 now = makedate()
1953 1979 defaults = {}
1954 1980 for part in ("d", "mb", "yY", "HI", "M", "S"):
1955 1981 # this piece is for rounding the specific end of unknowns
1956 1982 b = bias.get(part)
1957 1983 if b is None:
1958 1984 if part[0] in "HMS":
1959 1985 b = "00"
1960 1986 else:
1961 1987 b = "0"
1962 1988
1963 1989 # this piece is for matching the generic end to today's date
1964 1990 n = datestr(now, "%" + part[0])
1965 1991
1966 1992 defaults[part] = (b, n)
1967 1993
1968 1994 for format in formats:
1969 1995 try:
1970 1996 when, offset = strdate(date, format, defaults)
1971 1997 except (ValueError, OverflowError):
1972 1998 pass
1973 1999 else:
1974 2000 break
1975 2001 else:
1976 2002 raise Abort(_('invalid date: %r') % date)
1977 2003 # validate explicit (probably user-specified) date and
1978 2004 # time zone offset. values must fit in signed 32 bits for
1979 2005 # current 32-bit linux runtimes. timezones go from UTC-12
1980 2006 # to UTC+14
1981 2007 if when < -0x80000000 or when > 0x7fffffff:
1982 2008 raise Abort(_('date exceeds 32 bits: %d') % when)
1983 2009 if offset < -50400 or offset > 43200:
1984 2010 raise Abort(_('impossible time zone offset: %d') % offset)
1985 2011 return when, offset
1986 2012
1987 2013 def matchdate(date):
1988 2014 """Return a function that matches a given date match specifier
1989 2015
1990 2016 Formats include:
1991 2017
1992 2018 '{date}' match a given date to the accuracy provided
1993 2019
1994 2020 '<{date}' on or before a given date
1995 2021
1996 2022 '>{date}' on or after a given date
1997 2023
1998 2024 >>> p1 = parsedate("10:29:59")
1999 2025 >>> p2 = parsedate("10:30:00")
2000 2026 >>> p3 = parsedate("10:30:59")
2001 2027 >>> p4 = parsedate("10:31:00")
2002 2028 >>> p5 = parsedate("Sep 15 10:30:00 1999")
2003 2029 >>> f = matchdate("10:30")
2004 2030 >>> f(p1[0])
2005 2031 False
2006 2032 >>> f(p2[0])
2007 2033 True
2008 2034 >>> f(p3[0])
2009 2035 True
2010 2036 >>> f(p4[0])
2011 2037 False
2012 2038 >>> f(p5[0])
2013 2039 False
2014 2040 """
2015 2041
2016 2042 def lower(date):
2017 2043 d = {'mb': "1", 'd': "1"}
2018 2044 return parsedate(date, extendeddateformats, d)[0]
2019 2045
2020 2046 def upper(date):
2021 2047 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
2022 2048 for days in ("31", "30", "29"):
2023 2049 try:
2024 2050 d["d"] = days
2025 2051 return parsedate(date, extendeddateformats, d)[0]
2026 2052 except Abort:
2027 2053 pass
2028 2054 d["d"] = "28"
2029 2055 return parsedate(date, extendeddateformats, d)[0]
2030 2056
2031 2057 date = date.strip()
2032 2058
2033 2059 if not date:
2034 2060 raise Abort(_("dates cannot consist entirely of whitespace"))
2035 2061 elif date[0] == "<":
2036 2062 if not date[1:]:
2037 2063 raise Abort(_("invalid day spec, use '<DATE'"))
2038 2064 when = upper(date[1:])
2039 2065 return lambda x: x <= when
2040 2066 elif date[0] == ">":
2041 2067 if not date[1:]:
2042 2068 raise Abort(_("invalid day spec, use '>DATE'"))
2043 2069 when = lower(date[1:])
2044 2070 return lambda x: x >= when
2045 2071 elif date[0] == "-":
2046 2072 try:
2047 2073 days = int(date[1:])
2048 2074 except ValueError:
2049 2075 raise Abort(_("invalid day spec: %s") % date[1:])
2050 2076 if days < 0:
2051 2077 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
2052 2078 % date[1:])
2053 2079 when = makedate()[0] - days * 3600 * 24
2054 2080 return lambda x: x >= when
2055 2081 elif " to " in date:
2056 2082 a, b = date.split(" to ")
2057 2083 start, stop = lower(a), upper(b)
2058 2084 return lambda x: x >= start and x <= stop
2059 2085 else:
2060 2086 start, stop = lower(date), upper(date)
2061 2087 return lambda x: x >= start and x <= stop
2062 2088
2063 2089 def stringmatcher(pattern, casesensitive=True):
2064 2090 """
2065 2091 accepts a string, possibly starting with 're:' or 'literal:' prefix.
2066 2092 returns the matcher name, pattern, and matcher function.
2067 2093 missing or unknown prefixes are treated as literal matches.
2068 2094
2069 2095 helper for tests:
2070 2096 >>> def test(pattern, *tests):
2071 2097 ... kind, pattern, matcher = stringmatcher(pattern)
2072 2098 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2073 2099 >>> def itest(pattern, *tests):
2074 2100 ... kind, pattern, matcher = stringmatcher(pattern, casesensitive=False)
2075 2101 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
2076 2102
2077 2103 exact matching (no prefix):
2078 2104 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
2079 2105 ('literal', 'abcdefg', [False, False, True])
2080 2106
2081 2107 regex matching ('re:' prefix)
2082 2108 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
2083 2109 ('re', 'a.+b', [False, False, True])
2084 2110
2085 2111 force exact matches ('literal:' prefix)
2086 2112 >>> test('literal:re:foobar', 'foobar', 're:foobar')
2087 2113 ('literal', 're:foobar', [False, True])
2088 2114
2089 2115 unknown prefixes are ignored and treated as literals
2090 2116 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
2091 2117 ('literal', 'foo:bar', [False, False, True])
2092 2118
2093 2119 case insensitive regex matches
2094 2120 >>> itest('re:A.+b', 'nomatch', 'fooadef', 'fooadefBar')
2095 2121 ('re', 'A.+b', [False, False, True])
2096 2122
2097 2123 case insensitive literal matches
2098 2124 >>> itest('ABCDEFG', 'abc', 'def', 'abcdefg')
2099 2125 ('literal', 'ABCDEFG', [False, False, True])
2100 2126 """
2101 2127 if pattern.startswith('re:'):
2102 2128 pattern = pattern[3:]
2103 2129 try:
2104 2130 flags = 0
2105 2131 if not casesensitive:
2106 2132 flags = remod.I
2107 2133 regex = remod.compile(pattern, flags)
2108 2134 except remod.error as e:
2109 2135 raise error.ParseError(_('invalid regular expression: %s')
2110 2136 % e)
2111 2137 return 're', pattern, regex.search
2112 2138 elif pattern.startswith('literal:'):
2113 2139 pattern = pattern[8:]
2114 2140
2115 2141 match = pattern.__eq__
2116 2142
2117 2143 if not casesensitive:
2118 2144 ipat = encoding.lower(pattern)
2119 2145 match = lambda s: ipat == encoding.lower(s)
2120 2146 return 'literal', pattern, match
2121 2147
2122 2148 def shortuser(user):
2123 2149 """Return a short representation of a user name or email address."""
2124 2150 f = user.find('@')
2125 2151 if f >= 0:
2126 2152 user = user[:f]
2127 2153 f = user.find('<')
2128 2154 if f >= 0:
2129 2155 user = user[f + 1:]
2130 2156 f = user.find(' ')
2131 2157 if f >= 0:
2132 2158 user = user[:f]
2133 2159 f = user.find('.')
2134 2160 if f >= 0:
2135 2161 user = user[:f]
2136 2162 return user
2137 2163
2138 2164 def emailuser(user):
2139 2165 """Return the user portion of an email address."""
2140 2166 f = user.find('@')
2141 2167 if f >= 0:
2142 2168 user = user[:f]
2143 2169 f = user.find('<')
2144 2170 if f >= 0:
2145 2171 user = user[f + 1:]
2146 2172 return user
2147 2173
2148 2174 def email(author):
2149 2175 '''get email of author.'''
2150 2176 r = author.find('>')
2151 2177 if r == -1:
2152 2178 r = None
2153 2179 return author[author.find('<') + 1:r]
2154 2180
2155 2181 def ellipsis(text, maxlength=400):
2156 2182 """Trim string to at most maxlength (default: 400) columns in display."""
2157 2183 return encoding.trim(text, maxlength, ellipsis='...')
2158 2184
2159 2185 def unitcountfn(*unittable):
2160 2186 '''return a function that renders a readable count of some quantity'''
2161 2187
2162 2188 def go(count):
2163 2189 for multiplier, divisor, format in unittable:
2164 2190 if abs(count) >= divisor * multiplier:
2165 2191 return format % (count / float(divisor))
2166 2192 return unittable[-1][2] % count
2167 2193
2168 2194 return go
2169 2195
2170 2196 def processlinerange(fromline, toline):
2171 2197 """Check that linerange <fromline>:<toline> makes sense and return a
2172 2198 0-based range.
2173 2199
2174 2200 >>> processlinerange(10, 20)
2175 2201 (9, 20)
2176 2202 >>> processlinerange(2, 1)
2177 2203 Traceback (most recent call last):
2178 2204 ...
2179 2205 ParseError: line range must be positive
2180 2206 >>> processlinerange(0, 5)
2181 2207 Traceback (most recent call last):
2182 2208 ...
2183 2209 ParseError: fromline must be strictly positive
2184 2210 """
2185 2211 if toline - fromline < 0:
2186 2212 raise error.ParseError(_("line range must be positive"))
2187 2213 if fromline < 1:
2188 2214 raise error.ParseError(_("fromline must be strictly positive"))
2189 2215 return fromline - 1, toline
2190 2216
2191 2217 bytecount = unitcountfn(
2192 2218 (100, 1 << 30, _('%.0f GB')),
2193 2219 (10, 1 << 30, _('%.1f GB')),
2194 2220 (1, 1 << 30, _('%.2f GB')),
2195 2221 (100, 1 << 20, _('%.0f MB')),
2196 2222 (10, 1 << 20, _('%.1f MB')),
2197 2223 (1, 1 << 20, _('%.2f MB')),
2198 2224 (100, 1 << 10, _('%.0f KB')),
2199 2225 (10, 1 << 10, _('%.1f KB')),
2200 2226 (1, 1 << 10, _('%.2f KB')),
2201 2227 (1, 1, _('%.0f bytes')),
2202 2228 )
2203 2229
2204 2230 # Matches a single EOL which can either be a CRLF where repeated CR
2205 2231 # are removed or a LF. We do not care about old Macintosh files, so a
2206 2232 # stray CR is an error.
2207 2233 _eolre = remod.compile(br'\r*\n')
2208 2234
2209 2235 def tolf(s):
2210 2236 return _eolre.sub('\n', s)
2211 2237
2212 2238 def tocrlf(s):
2213 2239 return _eolre.sub('\r\n', s)
2214 2240
2215 2241 if pycompat.oslinesep == '\r\n':
2216 2242 tonativeeol = tocrlf
2217 2243 fromnativeeol = tolf
2218 2244 else:
2219 2245 tonativeeol = pycompat.identity
2220 2246 fromnativeeol = pycompat.identity
2221 2247
2222 2248 def escapestr(s):
2223 2249 # call underlying function of s.encode('string_escape') directly for
2224 2250 # Python 3 compatibility
2225 2251 return codecs.escape_encode(s)[0]
2226 2252
2227 2253 def unescapestr(s):
2228 2254 return codecs.escape_decode(s)[0]
2229 2255
2230 2256 def uirepr(s):
2231 2257 # Avoid double backslash in Windows path repr()
2232 2258 return repr(s).replace('\\\\', '\\')
2233 2259
2234 2260 # delay import of textwrap
2235 2261 def MBTextWrapper(**kwargs):
2236 2262 class tw(textwrap.TextWrapper):
2237 2263 """
2238 2264 Extend TextWrapper for width-awareness.
2239 2265
2240 2266 Neither number of 'bytes' in any encoding nor 'characters' is
2241 2267 appropriate to calculate terminal columns for specified string.
2242 2268
2243 2269 Original TextWrapper implementation uses built-in 'len()' directly,
2244 2270 so overriding is needed to use width information of each characters.
2245 2271
2246 2272 In addition, characters classified into 'ambiguous' width are
2247 2273 treated as wide in East Asian area, but as narrow in other.
2248 2274
2249 2275 This requires use decision to determine width of such characters.
2250 2276 """
2251 2277 def _cutdown(self, ucstr, space_left):
2252 2278 l = 0
2253 2279 colwidth = encoding.ucolwidth
2254 2280 for i in xrange(len(ucstr)):
2255 2281 l += colwidth(ucstr[i])
2256 2282 if space_left < l:
2257 2283 return (ucstr[:i], ucstr[i:])
2258 2284 return ucstr, ''
2259 2285
2260 2286 # overriding of base class
2261 2287 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
2262 2288 space_left = max(width - cur_len, 1)
2263 2289
2264 2290 if self.break_long_words:
2265 2291 cut, res = self._cutdown(reversed_chunks[-1], space_left)
2266 2292 cur_line.append(cut)
2267 2293 reversed_chunks[-1] = res
2268 2294 elif not cur_line:
2269 2295 cur_line.append(reversed_chunks.pop())
2270 2296
2271 2297 # this overriding code is imported from TextWrapper of Python 2.6
2272 2298 # to calculate columns of string by 'encoding.ucolwidth()'
2273 2299 def _wrap_chunks(self, chunks):
2274 2300 colwidth = encoding.ucolwidth
2275 2301
2276 2302 lines = []
2277 2303 if self.width <= 0:
2278 2304 raise ValueError("invalid width %r (must be > 0)" % self.width)
2279 2305
2280 2306 # Arrange in reverse order so items can be efficiently popped
2281 2307 # from a stack of chucks.
2282 2308 chunks.reverse()
2283 2309
2284 2310 while chunks:
2285 2311
2286 2312 # Start the list of chunks that will make up the current line.
2287 2313 # cur_len is just the length of all the chunks in cur_line.
2288 2314 cur_line = []
2289 2315 cur_len = 0
2290 2316
2291 2317 # Figure out which static string will prefix this line.
2292 2318 if lines:
2293 2319 indent = self.subsequent_indent
2294 2320 else:
2295 2321 indent = self.initial_indent
2296 2322
2297 2323 # Maximum width for this line.
2298 2324 width = self.width - len(indent)
2299 2325
2300 2326 # First chunk on line is whitespace -- drop it, unless this
2301 2327 # is the very beginning of the text (i.e. no lines started yet).
2302 2328 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
2303 2329 del chunks[-1]
2304 2330
2305 2331 while chunks:
2306 2332 l = colwidth(chunks[-1])
2307 2333
2308 2334 # Can at least squeeze this chunk onto the current line.
2309 2335 if cur_len + l <= width:
2310 2336 cur_line.append(chunks.pop())
2311 2337 cur_len += l
2312 2338
2313 2339 # Nope, this line is full.
2314 2340 else:
2315 2341 break
2316 2342
2317 2343 # The current line is full, and the next chunk is too big to
2318 2344 # fit on *any* line (not just this one).
2319 2345 if chunks and colwidth(chunks[-1]) > width:
2320 2346 self._handle_long_word(chunks, cur_line, cur_len, width)
2321 2347
2322 2348 # If the last chunk on this line is all whitespace, drop it.
2323 2349 if (self.drop_whitespace and
2324 2350 cur_line and cur_line[-1].strip() == ''):
2325 2351 del cur_line[-1]
2326 2352
2327 2353 # Convert current line back to a string and store it in list
2328 2354 # of all lines (return value).
2329 2355 if cur_line:
2330 2356 lines.append(indent + ''.join(cur_line))
2331 2357
2332 2358 return lines
2333 2359
2334 2360 global MBTextWrapper
2335 2361 MBTextWrapper = tw
2336 2362 return tw(**kwargs)
2337 2363
2338 2364 def wrap(line, width, initindent='', hangindent=''):
2339 2365 maxindent = max(len(hangindent), len(initindent))
2340 2366 if width <= maxindent:
2341 2367 # adjust for weird terminal size
2342 2368 width = max(78, maxindent + 1)
2343 2369 line = line.decode(pycompat.sysstr(encoding.encoding),
2344 2370 pycompat.sysstr(encoding.encodingmode))
2345 2371 initindent = initindent.decode(pycompat.sysstr(encoding.encoding),
2346 2372 pycompat.sysstr(encoding.encodingmode))
2347 2373 hangindent = hangindent.decode(pycompat.sysstr(encoding.encoding),
2348 2374 pycompat.sysstr(encoding.encodingmode))
2349 2375 wrapper = MBTextWrapper(width=width,
2350 2376 initial_indent=initindent,
2351 2377 subsequent_indent=hangindent)
2352 2378 return wrapper.fill(line).encode(pycompat.sysstr(encoding.encoding))
2353 2379
2354 2380 if (pyplatform.python_implementation() == 'CPython' and
2355 2381 sys.version_info < (3, 0)):
2356 2382 # There is an issue in CPython that some IO methods do not handle EINTR
2357 2383 # correctly. The following table shows what CPython version (and functions)
2358 2384 # are affected (buggy: has the EINTR bug, okay: otherwise):
2359 2385 #
2360 2386 # | < 2.7.4 | 2.7.4 to 2.7.12 | >= 3.0
2361 2387 # --------------------------------------------------
2362 2388 # fp.__iter__ | buggy | buggy | okay
2363 2389 # fp.read* | buggy | okay [1] | okay
2364 2390 #
2365 2391 # [1]: fixed by changeset 67dc99a989cd in the cpython hg repo.
2366 2392 #
2367 2393 # Here we workaround the EINTR issue for fileobj.__iter__. Other methods
2368 2394 # like "read*" are ignored for now, as Python < 2.7.4 is a minority.
2369 2395 #
2370 2396 # Although we can workaround the EINTR issue for fp.__iter__, it is slower:
2371 2397 # "for x in fp" is 4x faster than "for x in iter(fp.readline, '')" in
2372 2398 # CPython 2, because CPython 2 maintains an internal readahead buffer for
2373 2399 # fp.__iter__ but not other fp.read* methods.
2374 2400 #
2375 2401 # On modern systems like Linux, the "read" syscall cannot be interrupted
2376 2402 # when reading "fast" files like on-disk files. So the EINTR issue only
2377 2403 # affects things like pipes, sockets, ttys etc. We treat "normal" (S_ISREG)
2378 2404 # files approximately as "fast" files and use the fast (unsafe) code path,
2379 2405 # to minimize the performance impact.
2380 2406 if sys.version_info >= (2, 7, 4):
2381 2407 # fp.readline deals with EINTR correctly, use it as a workaround.
2382 2408 def _safeiterfile(fp):
2383 2409 return iter(fp.readline, '')
2384 2410 else:
2385 2411 # fp.read* are broken too, manually deal with EINTR in a stupid way.
2386 2412 # note: this may block longer than necessary because of bufsize.
2387 2413 def _safeiterfile(fp, bufsize=4096):
2388 2414 fd = fp.fileno()
2389 2415 line = ''
2390 2416 while True:
2391 2417 try:
2392 2418 buf = os.read(fd, bufsize)
2393 2419 except OSError as ex:
2394 2420 # os.read only raises EINTR before any data is read
2395 2421 if ex.errno == errno.EINTR:
2396 2422 continue
2397 2423 else:
2398 2424 raise
2399 2425 line += buf
2400 2426 if '\n' in buf:
2401 2427 splitted = line.splitlines(True)
2402 2428 line = ''
2403 2429 for l in splitted:
2404 2430 if l[-1] == '\n':
2405 2431 yield l
2406 2432 else:
2407 2433 line = l
2408 2434 if not buf:
2409 2435 break
2410 2436 if line:
2411 2437 yield line
2412 2438
2413 2439 def iterfile(fp):
2414 2440 fastpath = True
2415 2441 if type(fp) is file:
2416 2442 fastpath = stat.S_ISREG(os.fstat(fp.fileno()).st_mode)
2417 2443 if fastpath:
2418 2444 return fp
2419 2445 else:
2420 2446 return _safeiterfile(fp)
2421 2447 else:
2422 2448 # PyPy and CPython 3 do not have the EINTR issue thus no workaround needed.
2423 2449 def iterfile(fp):
2424 2450 return fp
2425 2451
2426 2452 def iterlines(iterator):
2427 2453 for chunk in iterator:
2428 2454 for line in chunk.splitlines():
2429 2455 yield line
2430 2456
2431 2457 def expandpath(path):
2432 2458 return os.path.expanduser(os.path.expandvars(path))
2433 2459
2434 2460 def hgcmd():
2435 2461 """Return the command used to execute current hg
2436 2462
2437 2463 This is different from hgexecutable() because on Windows we want
2438 2464 to avoid things opening new shell windows like batch files, so we
2439 2465 get either the python call or current executable.
2440 2466 """
2441 2467 if mainfrozen():
2442 2468 if getattr(sys, 'frozen', None) == 'macosx_app':
2443 2469 # Env variable set by py2app
2444 2470 return [encoding.environ['EXECUTABLEPATH']]
2445 2471 else:
2446 2472 return [pycompat.sysexecutable]
2447 2473 return gethgcmd()
2448 2474
2449 2475 def rundetached(args, condfn):
2450 2476 """Execute the argument list in a detached process.
2451 2477
2452 2478 condfn is a callable which is called repeatedly and should return
2453 2479 True once the child process is known to have started successfully.
2454 2480 At this point, the child process PID is returned. If the child
2455 2481 process fails to start or finishes before condfn() evaluates to
2456 2482 True, return -1.
2457 2483 """
2458 2484 # Windows case is easier because the child process is either
2459 2485 # successfully starting and validating the condition or exiting
2460 2486 # on failure. We just poll on its PID. On Unix, if the child
2461 2487 # process fails to start, it will be left in a zombie state until
2462 2488 # the parent wait on it, which we cannot do since we expect a long
2463 2489 # running process on success. Instead we listen for SIGCHLD telling
2464 2490 # us our child process terminated.
2465 2491 terminated = set()
2466 2492 def handler(signum, frame):
2467 2493 terminated.add(os.wait())
2468 2494 prevhandler = None
2469 2495 SIGCHLD = getattr(signal, 'SIGCHLD', None)
2470 2496 if SIGCHLD is not None:
2471 2497 prevhandler = signal.signal(SIGCHLD, handler)
2472 2498 try:
2473 2499 pid = spawndetached(args)
2474 2500 while not condfn():
2475 2501 if ((pid in terminated or not testpid(pid))
2476 2502 and not condfn()):
2477 2503 return -1
2478 2504 time.sleep(0.1)
2479 2505 return pid
2480 2506 finally:
2481 2507 if prevhandler is not None:
2482 2508 signal.signal(signal.SIGCHLD, prevhandler)
2483 2509
2484 2510 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
2485 2511 """Return the result of interpolating items in the mapping into string s.
2486 2512
2487 2513 prefix is a single character string, or a two character string with
2488 2514 a backslash as the first character if the prefix needs to be escaped in
2489 2515 a regular expression.
2490 2516
2491 2517 fn is an optional function that will be applied to the replacement text
2492 2518 just before replacement.
2493 2519
2494 2520 escape_prefix is an optional flag that allows using doubled prefix for
2495 2521 its escaping.
2496 2522 """
2497 2523 fn = fn or (lambda s: s)
2498 2524 patterns = '|'.join(mapping.keys())
2499 2525 if escape_prefix:
2500 2526 patterns += '|' + prefix
2501 2527 if len(prefix) > 1:
2502 2528 prefix_char = prefix[1:]
2503 2529 else:
2504 2530 prefix_char = prefix
2505 2531 mapping[prefix_char] = prefix_char
2506 2532 r = remod.compile(r'%s(%s)' % (prefix, patterns))
2507 2533 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
2508 2534
2509 2535 def getport(port):
2510 2536 """Return the port for a given network service.
2511 2537
2512 2538 If port is an integer, it's returned as is. If it's a string, it's
2513 2539 looked up using socket.getservbyname(). If there's no matching
2514 2540 service, error.Abort is raised.
2515 2541 """
2516 2542 try:
2517 2543 return int(port)
2518 2544 except ValueError:
2519 2545 pass
2520 2546
2521 2547 try:
2522 2548 return socket.getservbyname(port)
2523 2549 except socket.error:
2524 2550 raise Abort(_("no port number associated with service '%s'") % port)
2525 2551
2526 2552 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
2527 2553 '0': False, 'no': False, 'false': False, 'off': False,
2528 2554 'never': False}
2529 2555
2530 2556 def parsebool(s):
2531 2557 """Parse s into a boolean.
2532 2558
2533 2559 If s is not a valid boolean, returns None.
2534 2560 """
2535 2561 return _booleans.get(s.lower(), None)
2536 2562
2537 2563 _hextochr = dict((a + b, chr(int(a + b, 16)))
2538 2564 for a in string.hexdigits for b in string.hexdigits)
2539 2565
2540 2566 class url(object):
2541 2567 r"""Reliable URL parser.
2542 2568
2543 2569 This parses URLs and provides attributes for the following
2544 2570 components:
2545 2571
2546 2572 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2547 2573
2548 2574 Missing components are set to None. The only exception is
2549 2575 fragment, which is set to '' if present but empty.
2550 2576
2551 2577 If parsefragment is False, fragment is included in query. If
2552 2578 parsequery is False, query is included in path. If both are
2553 2579 False, both fragment and query are included in path.
2554 2580
2555 2581 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2556 2582
2557 2583 Note that for backward compatibility reasons, bundle URLs do not
2558 2584 take host names. That means 'bundle://../' has a path of '../'.
2559 2585
2560 2586 Examples:
2561 2587
2562 2588 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2563 2589 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2564 2590 >>> url('ssh://[::1]:2200//home/joe/repo')
2565 2591 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2566 2592 >>> url('file:///home/joe/repo')
2567 2593 <url scheme: 'file', path: '/home/joe/repo'>
2568 2594 >>> url('file:///c:/temp/foo/')
2569 2595 <url scheme: 'file', path: 'c:/temp/foo/'>
2570 2596 >>> url('bundle:foo')
2571 2597 <url scheme: 'bundle', path: 'foo'>
2572 2598 >>> url('bundle://../foo')
2573 2599 <url scheme: 'bundle', path: '../foo'>
2574 2600 >>> url(r'c:\foo\bar')
2575 2601 <url path: 'c:\\foo\\bar'>
2576 2602 >>> url(r'\\blah\blah\blah')
2577 2603 <url path: '\\\\blah\\blah\\blah'>
2578 2604 >>> url(r'\\blah\blah\blah#baz')
2579 2605 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2580 2606 >>> url(r'file:///C:\users\me')
2581 2607 <url scheme: 'file', path: 'C:\\users\\me'>
2582 2608
2583 2609 Authentication credentials:
2584 2610
2585 2611 >>> url('ssh://joe:xyz@x/repo')
2586 2612 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2587 2613 >>> url('ssh://joe@x/repo')
2588 2614 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2589 2615
2590 2616 Query strings and fragments:
2591 2617
2592 2618 >>> url('http://host/a?b#c')
2593 2619 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2594 2620 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2595 2621 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2596 2622
2597 2623 Empty path:
2598 2624
2599 2625 >>> url('')
2600 2626 <url path: ''>
2601 2627 >>> url('#a')
2602 2628 <url path: '', fragment: 'a'>
2603 2629 >>> url('http://host/')
2604 2630 <url scheme: 'http', host: 'host', path: ''>
2605 2631 >>> url('http://host/#a')
2606 2632 <url scheme: 'http', host: 'host', path: '', fragment: 'a'>
2607 2633
2608 2634 Only scheme:
2609 2635
2610 2636 >>> url('http:')
2611 2637 <url scheme: 'http'>
2612 2638 """
2613 2639
2614 2640 _safechars = "!~*'()+"
2615 2641 _safepchars = "/!~*'()+:\\"
2616 2642 _matchscheme = remod.compile('^[a-zA-Z0-9+.\\-]+:').match
2617 2643
2618 2644 def __init__(self, path, parsequery=True, parsefragment=True):
2619 2645 # We slowly chomp away at path until we have only the path left
2620 2646 self.scheme = self.user = self.passwd = self.host = None
2621 2647 self.port = self.path = self.query = self.fragment = None
2622 2648 self._localpath = True
2623 2649 self._hostport = ''
2624 2650 self._origpath = path
2625 2651
2626 2652 if parsefragment and '#' in path:
2627 2653 path, self.fragment = path.split('#', 1)
2628 2654
2629 2655 # special case for Windows drive letters and UNC paths
2630 2656 if hasdriveletter(path) or path.startswith('\\\\'):
2631 2657 self.path = path
2632 2658 return
2633 2659
2634 2660 # For compatibility reasons, we can't handle bundle paths as
2635 2661 # normal URLS
2636 2662 if path.startswith('bundle:'):
2637 2663 self.scheme = 'bundle'
2638 2664 path = path[7:]
2639 2665 if path.startswith('//'):
2640 2666 path = path[2:]
2641 2667 self.path = path
2642 2668 return
2643 2669
2644 2670 if self._matchscheme(path):
2645 2671 parts = path.split(':', 1)
2646 2672 if parts[0]:
2647 2673 self.scheme, path = parts
2648 2674 self._localpath = False
2649 2675
2650 2676 if not path:
2651 2677 path = None
2652 2678 if self._localpath:
2653 2679 self.path = ''
2654 2680 return
2655 2681 else:
2656 2682 if self._localpath:
2657 2683 self.path = path
2658 2684 return
2659 2685
2660 2686 if parsequery and '?' in path:
2661 2687 path, self.query = path.split('?', 1)
2662 2688 if not path:
2663 2689 path = None
2664 2690 if not self.query:
2665 2691 self.query = None
2666 2692
2667 2693 # // is required to specify a host/authority
2668 2694 if path and path.startswith('//'):
2669 2695 parts = path[2:].split('/', 1)
2670 2696 if len(parts) > 1:
2671 2697 self.host, path = parts
2672 2698 else:
2673 2699 self.host = parts[0]
2674 2700 path = None
2675 2701 if not self.host:
2676 2702 self.host = None
2677 2703 # path of file:///d is /d
2678 2704 # path of file:///d:/ is d:/, not /d:/
2679 2705 if path and not hasdriveletter(path):
2680 2706 path = '/' + path
2681 2707
2682 2708 if self.host and '@' in self.host:
2683 2709 self.user, self.host = self.host.rsplit('@', 1)
2684 2710 if ':' in self.user:
2685 2711 self.user, self.passwd = self.user.split(':', 1)
2686 2712 if not self.host:
2687 2713 self.host = None
2688 2714
2689 2715 # Don't split on colons in IPv6 addresses without ports
2690 2716 if (self.host and ':' in self.host and
2691 2717 not (self.host.startswith('[') and self.host.endswith(']'))):
2692 2718 self._hostport = self.host
2693 2719 self.host, self.port = self.host.rsplit(':', 1)
2694 2720 if not self.host:
2695 2721 self.host = None
2696 2722
2697 2723 if (self.host and self.scheme == 'file' and
2698 2724 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2699 2725 raise Abort(_('file:// URLs can only refer to localhost'))
2700 2726
2701 2727 self.path = path
2702 2728
2703 2729 # leave the query string escaped
2704 2730 for a in ('user', 'passwd', 'host', 'port',
2705 2731 'path', 'fragment'):
2706 2732 v = getattr(self, a)
2707 2733 if v is not None:
2708 2734 setattr(self, a, urlreq.unquote(v))
2709 2735
2710 2736 def __repr__(self):
2711 2737 attrs = []
2712 2738 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2713 2739 'query', 'fragment'):
2714 2740 v = getattr(self, a)
2715 2741 if v is not None:
2716 2742 attrs.append('%s: %r' % (a, v))
2717 2743 return '<url %s>' % ', '.join(attrs)
2718 2744
2719 2745 def __str__(self):
2720 2746 r"""Join the URL's components back into a URL string.
2721 2747
2722 2748 Examples:
2723 2749
2724 2750 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2725 2751 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2726 2752 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2727 2753 'http://user:pw@host:80/?foo=bar&baz=42'
2728 2754 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2729 2755 'http://user:pw@host:80/?foo=bar%3dbaz'
2730 2756 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2731 2757 'ssh://user:pw@[::1]:2200//home/joe#'
2732 2758 >>> str(url('http://localhost:80//'))
2733 2759 'http://localhost:80//'
2734 2760 >>> str(url('http://localhost:80/'))
2735 2761 'http://localhost:80/'
2736 2762 >>> str(url('http://localhost:80'))
2737 2763 'http://localhost:80/'
2738 2764 >>> str(url('bundle:foo'))
2739 2765 'bundle:foo'
2740 2766 >>> str(url('bundle://../foo'))
2741 2767 'bundle:../foo'
2742 2768 >>> str(url('path'))
2743 2769 'path'
2744 2770 >>> str(url('file:///tmp/foo/bar'))
2745 2771 'file:///tmp/foo/bar'
2746 2772 >>> str(url('file:///c:/tmp/foo/bar'))
2747 2773 'file:///c:/tmp/foo/bar'
2748 2774 >>> print url(r'bundle:foo\bar')
2749 2775 bundle:foo\bar
2750 2776 >>> print url(r'file:///D:\data\hg')
2751 2777 file:///D:\data\hg
2752 2778 """
2753 2779 return encoding.strfromlocal(self.__bytes__())
2754 2780
2755 2781 def __bytes__(self):
2756 2782 if self._localpath:
2757 2783 s = self.path
2758 2784 if self.scheme == 'bundle':
2759 2785 s = 'bundle:' + s
2760 2786 if self.fragment:
2761 2787 s += '#' + self.fragment
2762 2788 return s
2763 2789
2764 2790 s = self.scheme + ':'
2765 2791 if self.user or self.passwd or self.host:
2766 2792 s += '//'
2767 2793 elif self.scheme and (not self.path or self.path.startswith('/')
2768 2794 or hasdriveletter(self.path)):
2769 2795 s += '//'
2770 2796 if hasdriveletter(self.path):
2771 2797 s += '/'
2772 2798 if self.user:
2773 2799 s += urlreq.quote(self.user, safe=self._safechars)
2774 2800 if self.passwd:
2775 2801 s += ':' + urlreq.quote(self.passwd, safe=self._safechars)
2776 2802 if self.user or self.passwd:
2777 2803 s += '@'
2778 2804 if self.host:
2779 2805 if not (self.host.startswith('[') and self.host.endswith(']')):
2780 2806 s += urlreq.quote(self.host)
2781 2807 else:
2782 2808 s += self.host
2783 2809 if self.port:
2784 2810 s += ':' + urlreq.quote(self.port)
2785 2811 if self.host:
2786 2812 s += '/'
2787 2813 if self.path:
2788 2814 # TODO: similar to the query string, we should not unescape the
2789 2815 # path when we store it, the path might contain '%2f' = '/',
2790 2816 # which we should *not* escape.
2791 2817 s += urlreq.quote(self.path, safe=self._safepchars)
2792 2818 if self.query:
2793 2819 # we store the query in escaped form.
2794 2820 s += '?' + self.query
2795 2821 if self.fragment is not None:
2796 2822 s += '#' + urlreq.quote(self.fragment, safe=self._safepchars)
2797 2823 return s
2798 2824
2799 2825 def authinfo(self):
2800 2826 user, passwd = self.user, self.passwd
2801 2827 try:
2802 2828 self.user, self.passwd = None, None
2803 2829 s = bytes(self)
2804 2830 finally:
2805 2831 self.user, self.passwd = user, passwd
2806 2832 if not self.user:
2807 2833 return (s, None)
2808 2834 # authinfo[1] is passed to urllib2 password manager, and its
2809 2835 # URIs must not contain credentials. The host is passed in the
2810 2836 # URIs list because Python < 2.4.3 uses only that to search for
2811 2837 # a password.
2812 2838 return (s, (None, (s, self.host),
2813 2839 self.user, self.passwd or ''))
2814 2840
2815 2841 def isabs(self):
2816 2842 if self.scheme and self.scheme != 'file':
2817 2843 return True # remote URL
2818 2844 if hasdriveletter(self.path):
2819 2845 return True # absolute for our purposes - can't be joined()
2820 2846 if self.path.startswith(r'\\'):
2821 2847 return True # Windows UNC path
2822 2848 if self.path.startswith('/'):
2823 2849 return True # POSIX-style
2824 2850 return False
2825 2851
2826 2852 def localpath(self):
2827 2853 if self.scheme == 'file' or self.scheme == 'bundle':
2828 2854 path = self.path or '/'
2829 2855 # For Windows, we need to promote hosts containing drive
2830 2856 # letters to paths with drive letters.
2831 2857 if hasdriveletter(self._hostport):
2832 2858 path = self._hostport + '/' + self.path
2833 2859 elif (self.host is not None and self.path
2834 2860 and not hasdriveletter(path)):
2835 2861 path = '/' + path
2836 2862 return path
2837 2863 return self._origpath
2838 2864
2839 2865 def islocal(self):
2840 2866 '''whether localpath will return something that posixfile can open'''
2841 2867 return (not self.scheme or self.scheme == 'file'
2842 2868 or self.scheme == 'bundle')
2843 2869
2844 2870 def hasscheme(path):
2845 2871 return bool(url(path).scheme)
2846 2872
2847 2873 def hasdriveletter(path):
2848 2874 return path and path[1:2] == ':' and path[0:1].isalpha()
2849 2875
2850 2876 def urllocalpath(path):
2851 2877 return url(path, parsequery=False, parsefragment=False).localpath()
2852 2878
2853 2879 def hidepassword(u):
2854 2880 '''hide user credential in a url string'''
2855 2881 u = url(u)
2856 2882 if u.passwd:
2857 2883 u.passwd = '***'
2858 2884 return bytes(u)
2859 2885
2860 2886 def removeauth(u):
2861 2887 '''remove all authentication information from a url string'''
2862 2888 u = url(u)
2863 2889 u.user = u.passwd = None
2864 2890 return str(u)
2865 2891
2866 2892 timecount = unitcountfn(
2867 2893 (1, 1e3, _('%.0f s')),
2868 2894 (100, 1, _('%.1f s')),
2869 2895 (10, 1, _('%.2f s')),
2870 2896 (1, 1, _('%.3f s')),
2871 2897 (100, 0.001, _('%.1f ms')),
2872 2898 (10, 0.001, _('%.2f ms')),
2873 2899 (1, 0.001, _('%.3f ms')),
2874 2900 (100, 0.000001, _('%.1f us')),
2875 2901 (10, 0.000001, _('%.2f us')),
2876 2902 (1, 0.000001, _('%.3f us')),
2877 2903 (100, 0.000000001, _('%.1f ns')),
2878 2904 (10, 0.000000001, _('%.2f ns')),
2879 2905 (1, 0.000000001, _('%.3f ns')),
2880 2906 )
2881 2907
2882 2908 _timenesting = [0]
2883 2909
2884 2910 def timed(func):
2885 2911 '''Report the execution time of a function call to stderr.
2886 2912
2887 2913 During development, use as a decorator when you need to measure
2888 2914 the cost of a function, e.g. as follows:
2889 2915
2890 2916 @util.timed
2891 2917 def foo(a, b, c):
2892 2918 pass
2893 2919 '''
2894 2920
2895 2921 def wrapper(*args, **kwargs):
2896 2922 start = timer()
2897 2923 indent = 2
2898 2924 _timenesting[0] += indent
2899 2925 try:
2900 2926 return func(*args, **kwargs)
2901 2927 finally:
2902 2928 elapsed = timer() - start
2903 2929 _timenesting[0] -= indent
2904 2930 stderr.write('%s%s: %s\n' %
2905 2931 (' ' * _timenesting[0], func.__name__,
2906 2932 timecount(elapsed)))
2907 2933 return wrapper
2908 2934
2909 2935 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2910 2936 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2911 2937
2912 2938 def sizetoint(s):
2913 2939 '''Convert a space specifier to a byte count.
2914 2940
2915 2941 >>> sizetoint('30')
2916 2942 30
2917 2943 >>> sizetoint('2.2kb')
2918 2944 2252
2919 2945 >>> sizetoint('6M')
2920 2946 6291456
2921 2947 '''
2922 2948 t = s.strip().lower()
2923 2949 try:
2924 2950 for k, u in _sizeunits:
2925 2951 if t.endswith(k):
2926 2952 return int(float(t[:-len(k)]) * u)
2927 2953 return int(t)
2928 2954 except ValueError:
2929 2955 raise error.ParseError(_("couldn't parse size: %s") % s)
2930 2956
2931 2957 class hooks(object):
2932 2958 '''A collection of hook functions that can be used to extend a
2933 2959 function's behavior. Hooks are called in lexicographic order,
2934 2960 based on the names of their sources.'''
2935 2961
2936 2962 def __init__(self):
2937 2963 self._hooks = []
2938 2964
2939 2965 def add(self, source, hook):
2940 2966 self._hooks.append((source, hook))
2941 2967
2942 2968 def __call__(self, *args):
2943 2969 self._hooks.sort(key=lambda x: x[0])
2944 2970 results = []
2945 2971 for source, hook in self._hooks:
2946 2972 results.append(hook(*args))
2947 2973 return results
2948 2974
2949 2975 def getstackframes(skip=0, line=' %-*s in %s\n', fileline='%s:%s', depth=0):
2950 2976 '''Yields lines for a nicely formatted stacktrace.
2951 2977 Skips the 'skip' last entries, then return the last 'depth' entries.
2952 2978 Each file+linenumber is formatted according to fileline.
2953 2979 Each line is formatted according to line.
2954 2980 If line is None, it yields:
2955 2981 length of longest filepath+line number,
2956 2982 filepath+linenumber,
2957 2983 function
2958 2984
2959 2985 Not be used in production code but very convenient while developing.
2960 2986 '''
2961 2987 entries = [(fileline % (fn, ln), func)
2962 2988 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]
2963 2989 ][-depth:]
2964 2990 if entries:
2965 2991 fnmax = max(len(entry[0]) for entry in entries)
2966 2992 for fnln, func in entries:
2967 2993 if line is None:
2968 2994 yield (fnmax, fnln, func)
2969 2995 else:
2970 2996 yield line % (fnmax, fnln, func)
2971 2997
2972 2998 def debugstacktrace(msg='stacktrace', skip=0,
2973 2999 f=stderr, otherf=stdout, depth=0):
2974 3000 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2975 3001 Skips the 'skip' entries closest to the call, then show 'depth' entries.
2976 3002 By default it will flush stdout first.
2977 3003 It can be used everywhere and intentionally does not require an ui object.
2978 3004 Not be used in production code but very convenient while developing.
2979 3005 '''
2980 3006 if otherf:
2981 3007 otherf.flush()
2982 3008 f.write('%s at:\n' % msg.rstrip())
2983 3009 for line in getstackframes(skip + 1, depth=depth):
2984 3010 f.write(line)
2985 3011 f.flush()
2986 3012
2987 3013 class dirs(object):
2988 3014 '''a multiset of directory names from a dirstate or manifest'''
2989 3015
2990 3016 def __init__(self, map, skip=None):
2991 3017 self._dirs = {}
2992 3018 addpath = self.addpath
2993 3019 if safehasattr(map, 'iteritems') and skip is not None:
2994 3020 for f, s in map.iteritems():
2995 3021 if s[0] != skip:
2996 3022 addpath(f)
2997 3023 else:
2998 3024 for f in map:
2999 3025 addpath(f)
3000 3026
3001 3027 def addpath(self, path):
3002 3028 dirs = self._dirs
3003 3029 for base in finddirs(path):
3004 3030 if base in dirs:
3005 3031 dirs[base] += 1
3006 3032 return
3007 3033 dirs[base] = 1
3008 3034
3009 3035 def delpath(self, path):
3010 3036 dirs = self._dirs
3011 3037 for base in finddirs(path):
3012 3038 if dirs[base] > 1:
3013 3039 dirs[base] -= 1
3014 3040 return
3015 3041 del dirs[base]
3016 3042
3017 3043 def __iter__(self):
3018 3044 return iter(self._dirs)
3019 3045
3020 3046 def __contains__(self, d):
3021 3047 return d in self._dirs
3022 3048
3023 3049 if safehasattr(parsers, 'dirs'):
3024 3050 dirs = parsers.dirs
3025 3051
3026 3052 def finddirs(path):
3027 3053 pos = path.rfind('/')
3028 3054 while pos != -1:
3029 3055 yield path[:pos]
3030 3056 pos = path.rfind('/', 0, pos)
3031 3057
3032 3058 class ctxmanager(object):
3033 3059 '''A context manager for use in 'with' blocks to allow multiple
3034 3060 contexts to be entered at once. This is both safer and more
3035 3061 flexible than contextlib.nested.
3036 3062
3037 3063 Once Mercurial supports Python 2.7+, this will become mostly
3038 3064 unnecessary.
3039 3065 '''
3040 3066
3041 3067 def __init__(self, *args):
3042 3068 '''Accepts a list of no-argument functions that return context
3043 3069 managers. These will be invoked at __call__ time.'''
3044 3070 self._pending = args
3045 3071 self._atexit = []
3046 3072
3047 3073 def __enter__(self):
3048 3074 return self
3049 3075
3050 3076 def enter(self):
3051 3077 '''Create and enter context managers in the order in which they were
3052 3078 passed to the constructor.'''
3053 3079 values = []
3054 3080 for func in self._pending:
3055 3081 obj = func()
3056 3082 values.append(obj.__enter__())
3057 3083 self._atexit.append(obj.__exit__)
3058 3084 del self._pending
3059 3085 return values
3060 3086
3061 3087 def atexit(self, func, *args, **kwargs):
3062 3088 '''Add a function to call when this context manager exits. The
3063 3089 ordering of multiple atexit calls is unspecified, save that
3064 3090 they will happen before any __exit__ functions.'''
3065 3091 def wrapper(exc_type, exc_val, exc_tb):
3066 3092 func(*args, **kwargs)
3067 3093 self._atexit.append(wrapper)
3068 3094 return func
3069 3095
3070 3096 def __exit__(self, exc_type, exc_val, exc_tb):
3071 3097 '''Context managers are exited in the reverse order from which
3072 3098 they were created.'''
3073 3099 received = exc_type is not None
3074 3100 suppressed = False
3075 3101 pending = None
3076 3102 self._atexit.reverse()
3077 3103 for exitfunc in self._atexit:
3078 3104 try:
3079 3105 if exitfunc(exc_type, exc_val, exc_tb):
3080 3106 suppressed = True
3081 3107 exc_type = None
3082 3108 exc_val = None
3083 3109 exc_tb = None
3084 3110 except BaseException:
3085 3111 pending = sys.exc_info()
3086 3112 exc_type, exc_val, exc_tb = pending = sys.exc_info()
3087 3113 del self._atexit
3088 3114 if pending:
3089 3115 raise exc_val
3090 3116 return received and suppressed
3091 3117
3092 3118 # compression code
3093 3119
3094 3120 SERVERROLE = 'server'
3095 3121 CLIENTROLE = 'client'
3096 3122
3097 3123 compewireprotosupport = collections.namedtuple(u'compenginewireprotosupport',
3098 3124 (u'name', u'serverpriority',
3099 3125 u'clientpriority'))
3100 3126
3101 3127 class compressormanager(object):
3102 3128 """Holds registrations of various compression engines.
3103 3129
3104 3130 This class essentially abstracts the differences between compression
3105 3131 engines to allow new compression formats to be added easily, possibly from
3106 3132 extensions.
3107 3133
3108 3134 Compressors are registered against the global instance by calling its
3109 3135 ``register()`` method.
3110 3136 """
3111 3137 def __init__(self):
3112 3138 self._engines = {}
3113 3139 # Bundle spec human name to engine name.
3114 3140 self._bundlenames = {}
3115 3141 # Internal bundle identifier to engine name.
3116 3142 self._bundletypes = {}
3117 3143 # Revlog header to engine name.
3118 3144 self._revlogheaders = {}
3119 3145 # Wire proto identifier to engine name.
3120 3146 self._wiretypes = {}
3121 3147
3122 3148 def __getitem__(self, key):
3123 3149 return self._engines[key]
3124 3150
3125 3151 def __contains__(self, key):
3126 3152 return key in self._engines
3127 3153
3128 3154 def __iter__(self):
3129 3155 return iter(self._engines.keys())
3130 3156
3131 3157 def register(self, engine):
3132 3158 """Register a compression engine with the manager.
3133 3159
3134 3160 The argument must be a ``compressionengine`` instance.
3135 3161 """
3136 3162 if not isinstance(engine, compressionengine):
3137 3163 raise ValueError(_('argument must be a compressionengine'))
3138 3164
3139 3165 name = engine.name()
3140 3166
3141 3167 if name in self._engines:
3142 3168 raise error.Abort(_('compression engine %s already registered') %
3143 3169 name)
3144 3170
3145 3171 bundleinfo = engine.bundletype()
3146 3172 if bundleinfo:
3147 3173 bundlename, bundletype = bundleinfo
3148 3174
3149 3175 if bundlename in self._bundlenames:
3150 3176 raise error.Abort(_('bundle name %s already registered') %
3151 3177 bundlename)
3152 3178 if bundletype in self._bundletypes:
3153 3179 raise error.Abort(_('bundle type %s already registered by %s') %
3154 3180 (bundletype, self._bundletypes[bundletype]))
3155 3181
3156 3182 # No external facing name declared.
3157 3183 if bundlename:
3158 3184 self._bundlenames[bundlename] = name
3159 3185
3160 3186 self._bundletypes[bundletype] = name
3161 3187
3162 3188 wiresupport = engine.wireprotosupport()
3163 3189 if wiresupport:
3164 3190 wiretype = wiresupport.name
3165 3191 if wiretype in self._wiretypes:
3166 3192 raise error.Abort(_('wire protocol compression %s already '
3167 3193 'registered by %s') %
3168 3194 (wiretype, self._wiretypes[wiretype]))
3169 3195
3170 3196 self._wiretypes[wiretype] = name
3171 3197
3172 3198 revlogheader = engine.revlogheader()
3173 3199 if revlogheader and revlogheader in self._revlogheaders:
3174 3200 raise error.Abort(_('revlog header %s already registered by %s') %
3175 3201 (revlogheader, self._revlogheaders[revlogheader]))
3176 3202
3177 3203 if revlogheader:
3178 3204 self._revlogheaders[revlogheader] = name
3179 3205
3180 3206 self._engines[name] = engine
3181 3207
3182 3208 @property
3183 3209 def supportedbundlenames(self):
3184 3210 return set(self._bundlenames.keys())
3185 3211
3186 3212 @property
3187 3213 def supportedbundletypes(self):
3188 3214 return set(self._bundletypes.keys())
3189 3215
3190 3216 def forbundlename(self, bundlename):
3191 3217 """Obtain a compression engine registered to a bundle name.
3192 3218
3193 3219 Will raise KeyError if the bundle type isn't registered.
3194 3220
3195 3221 Will abort if the engine is known but not available.
3196 3222 """
3197 3223 engine = self._engines[self._bundlenames[bundlename]]
3198 3224 if not engine.available():
3199 3225 raise error.Abort(_('compression engine %s could not be loaded') %
3200 3226 engine.name())
3201 3227 return engine
3202 3228
3203 3229 def forbundletype(self, bundletype):
3204 3230 """Obtain a compression engine registered to a bundle type.
3205 3231
3206 3232 Will raise KeyError if the bundle type isn't registered.
3207 3233
3208 3234 Will abort if the engine is known but not available.
3209 3235 """
3210 3236 engine = self._engines[self._bundletypes[bundletype]]
3211 3237 if not engine.available():
3212 3238 raise error.Abort(_('compression engine %s could not be loaded') %
3213 3239 engine.name())
3214 3240 return engine
3215 3241
3216 3242 def supportedwireengines(self, role, onlyavailable=True):
3217 3243 """Obtain compression engines that support the wire protocol.
3218 3244
3219 3245 Returns a list of engines in prioritized order, most desired first.
3220 3246
3221 3247 If ``onlyavailable`` is set, filter out engines that can't be
3222 3248 loaded.
3223 3249 """
3224 3250 assert role in (SERVERROLE, CLIENTROLE)
3225 3251
3226 3252 attr = 'serverpriority' if role == SERVERROLE else 'clientpriority'
3227 3253
3228 3254 engines = [self._engines[e] for e in self._wiretypes.values()]
3229 3255 if onlyavailable:
3230 3256 engines = [e for e in engines if e.available()]
3231 3257
3232 3258 def getkey(e):
3233 3259 # Sort first by priority, highest first. In case of tie, sort
3234 3260 # alphabetically. This is arbitrary, but ensures output is
3235 3261 # stable.
3236 3262 w = e.wireprotosupport()
3237 3263 return -1 * getattr(w, attr), w.name
3238 3264
3239 3265 return list(sorted(engines, key=getkey))
3240 3266
3241 3267 def forwiretype(self, wiretype):
3242 3268 engine = self._engines[self._wiretypes[wiretype]]
3243 3269 if not engine.available():
3244 3270 raise error.Abort(_('compression engine %s could not be loaded') %
3245 3271 engine.name())
3246 3272 return engine
3247 3273
3248 3274 def forrevlogheader(self, header):
3249 3275 """Obtain a compression engine registered to a revlog header.
3250 3276
3251 3277 Will raise KeyError if the revlog header value isn't registered.
3252 3278 """
3253 3279 return self._engines[self._revlogheaders[header]]
3254 3280
3255 3281 compengines = compressormanager()
3256 3282
3257 3283 class compressionengine(object):
3258 3284 """Base class for compression engines.
3259 3285
3260 3286 Compression engines must implement the interface defined by this class.
3261 3287 """
3262 3288 def name(self):
3263 3289 """Returns the name of the compression engine.
3264 3290
3265 3291 This is the key the engine is registered under.
3266 3292
3267 3293 This method must be implemented.
3268 3294 """
3269 3295 raise NotImplementedError()
3270 3296
3271 3297 def available(self):
3272 3298 """Whether the compression engine is available.
3273 3299
3274 3300 The intent of this method is to allow optional compression engines
3275 3301 that may not be available in all installations (such as engines relying
3276 3302 on C extensions that may not be present).
3277 3303 """
3278 3304 return True
3279 3305
3280 3306 def bundletype(self):
3281 3307 """Describes bundle identifiers for this engine.
3282 3308
3283 3309 If this compression engine isn't supported for bundles, returns None.
3284 3310
3285 3311 If this engine can be used for bundles, returns a 2-tuple of strings of
3286 3312 the user-facing "bundle spec" compression name and an internal
3287 3313 identifier used to denote the compression format within bundles. To
3288 3314 exclude the name from external usage, set the first element to ``None``.
3289 3315
3290 3316 If bundle compression is supported, the class must also implement
3291 3317 ``compressstream`` and `decompressorreader``.
3292 3318
3293 3319 The docstring of this method is used in the help system to tell users
3294 3320 about this engine.
3295 3321 """
3296 3322 return None
3297 3323
3298 3324 def wireprotosupport(self):
3299 3325 """Declare support for this compression format on the wire protocol.
3300 3326
3301 3327 If this compression engine isn't supported for compressing wire
3302 3328 protocol payloads, returns None.
3303 3329
3304 3330 Otherwise, returns ``compenginewireprotosupport`` with the following
3305 3331 fields:
3306 3332
3307 3333 * String format identifier
3308 3334 * Integer priority for the server
3309 3335 * Integer priority for the client
3310 3336
3311 3337 The integer priorities are used to order the advertisement of format
3312 3338 support by server and client. The highest integer is advertised
3313 3339 first. Integers with non-positive values aren't advertised.
3314 3340
3315 3341 The priority values are somewhat arbitrary and only used for default
3316 3342 ordering. The relative order can be changed via config options.
3317 3343
3318 3344 If wire protocol compression is supported, the class must also implement
3319 3345 ``compressstream`` and ``decompressorreader``.
3320 3346 """
3321 3347 return None
3322 3348
3323 3349 def revlogheader(self):
3324 3350 """Header added to revlog chunks that identifies this engine.
3325 3351
3326 3352 If this engine can be used to compress revlogs, this method should
3327 3353 return the bytes used to identify chunks compressed with this engine.
3328 3354 Else, the method should return ``None`` to indicate it does not
3329 3355 participate in revlog compression.
3330 3356 """
3331 3357 return None
3332 3358
3333 3359 def compressstream(self, it, opts=None):
3334 3360 """Compress an iterator of chunks.
3335 3361
3336 3362 The method receives an iterator (ideally a generator) of chunks of
3337 3363 bytes to be compressed. It returns an iterator (ideally a generator)
3338 3364 of bytes of chunks representing the compressed output.
3339 3365
3340 3366 Optionally accepts an argument defining how to perform compression.
3341 3367 Each engine treats this argument differently.
3342 3368 """
3343 3369 raise NotImplementedError()
3344 3370
3345 3371 def decompressorreader(self, fh):
3346 3372 """Perform decompression on a file object.
3347 3373
3348 3374 Argument is an object with a ``read(size)`` method that returns
3349 3375 compressed data. Return value is an object with a ``read(size)`` that
3350 3376 returns uncompressed data.
3351 3377 """
3352 3378 raise NotImplementedError()
3353 3379
3354 3380 def revlogcompressor(self, opts=None):
3355 3381 """Obtain an object that can be used to compress revlog entries.
3356 3382
3357 3383 The object has a ``compress(data)`` method that compresses binary
3358 3384 data. This method returns compressed binary data or ``None`` if
3359 3385 the data could not be compressed (too small, not compressible, etc).
3360 3386 The returned data should have a header uniquely identifying this
3361 3387 compression format so decompression can be routed to this engine.
3362 3388 This header should be identified by the ``revlogheader()`` return
3363 3389 value.
3364 3390
3365 3391 The object has a ``decompress(data)`` method that decompresses
3366 3392 data. The method will only be called if ``data`` begins with
3367 3393 ``revlogheader()``. The method should return the raw, uncompressed
3368 3394 data or raise a ``RevlogError``.
3369 3395
3370 3396 The object is reusable but is not thread safe.
3371 3397 """
3372 3398 raise NotImplementedError()
3373 3399
3374 3400 class _zlibengine(compressionengine):
3375 3401 def name(self):
3376 3402 return 'zlib'
3377 3403
3378 3404 def bundletype(self):
3379 3405 """zlib compression using the DEFLATE algorithm.
3380 3406
3381 3407 All Mercurial clients should support this format. The compression
3382 3408 algorithm strikes a reasonable balance between compression ratio
3383 3409 and size.
3384 3410 """
3385 3411 return 'gzip', 'GZ'
3386 3412
3387 3413 def wireprotosupport(self):
3388 3414 return compewireprotosupport('zlib', 20, 20)
3389 3415
3390 3416 def revlogheader(self):
3391 3417 return 'x'
3392 3418
3393 3419 def compressstream(self, it, opts=None):
3394 3420 opts = opts or {}
3395 3421
3396 3422 z = zlib.compressobj(opts.get('level', -1))
3397 3423 for chunk in it:
3398 3424 data = z.compress(chunk)
3399 3425 # Not all calls to compress emit data. It is cheaper to inspect
3400 3426 # here than to feed empty chunks through generator.
3401 3427 if data:
3402 3428 yield data
3403 3429
3404 3430 yield z.flush()
3405 3431
3406 3432 def decompressorreader(self, fh):
3407 3433 def gen():
3408 3434 d = zlib.decompressobj()
3409 3435 for chunk in filechunkiter(fh):
3410 3436 while chunk:
3411 3437 # Limit output size to limit memory.
3412 3438 yield d.decompress(chunk, 2 ** 18)
3413 3439 chunk = d.unconsumed_tail
3414 3440
3415 3441 return chunkbuffer(gen())
3416 3442
3417 3443 class zlibrevlogcompressor(object):
3418 3444 def compress(self, data):
3419 3445 insize = len(data)
3420 3446 # Caller handles empty input case.
3421 3447 assert insize > 0
3422 3448
3423 3449 if insize < 44:
3424 3450 return None
3425 3451
3426 3452 elif insize <= 1000000:
3427 3453 compressed = zlib.compress(data)
3428 3454 if len(compressed) < insize:
3429 3455 return compressed
3430 3456 return None
3431 3457
3432 3458 # zlib makes an internal copy of the input buffer, doubling
3433 3459 # memory usage for large inputs. So do streaming compression
3434 3460 # on large inputs.
3435 3461 else:
3436 3462 z = zlib.compressobj()
3437 3463 parts = []
3438 3464 pos = 0
3439 3465 while pos < insize:
3440 3466 pos2 = pos + 2**20
3441 3467 parts.append(z.compress(data[pos:pos2]))
3442 3468 pos = pos2
3443 3469 parts.append(z.flush())
3444 3470
3445 3471 if sum(map(len, parts)) < insize:
3446 3472 return ''.join(parts)
3447 3473 return None
3448 3474
3449 3475 def decompress(self, data):
3450 3476 try:
3451 3477 return zlib.decompress(data)
3452 3478 except zlib.error as e:
3453 3479 raise error.RevlogError(_('revlog decompress error: %s') %
3454 3480 str(e))
3455 3481
3456 3482 def revlogcompressor(self, opts=None):
3457 3483 return self.zlibrevlogcompressor()
3458 3484
3459 3485 compengines.register(_zlibengine())
3460 3486
3461 3487 class _bz2engine(compressionengine):
3462 3488 def name(self):
3463 3489 return 'bz2'
3464 3490
3465 3491 def bundletype(self):
3466 3492 """An algorithm that produces smaller bundles than ``gzip``.
3467 3493
3468 3494 All Mercurial clients should support this format.
3469 3495
3470 3496 This engine will likely produce smaller bundles than ``gzip`` but
3471 3497 will be significantly slower, both during compression and
3472 3498 decompression.
3473 3499
3474 3500 If available, the ``zstd`` engine can yield similar or better
3475 3501 compression at much higher speeds.
3476 3502 """
3477 3503 return 'bzip2', 'BZ'
3478 3504
3479 3505 # We declare a protocol name but don't advertise by default because
3480 3506 # it is slow.
3481 3507 def wireprotosupport(self):
3482 3508 return compewireprotosupport('bzip2', 0, 0)
3483 3509
3484 3510 def compressstream(self, it, opts=None):
3485 3511 opts = opts or {}
3486 3512 z = bz2.BZ2Compressor(opts.get('level', 9))
3487 3513 for chunk in it:
3488 3514 data = z.compress(chunk)
3489 3515 if data:
3490 3516 yield data
3491 3517
3492 3518 yield z.flush()
3493 3519
3494 3520 def decompressorreader(self, fh):
3495 3521 def gen():
3496 3522 d = bz2.BZ2Decompressor()
3497 3523 for chunk in filechunkiter(fh):
3498 3524 yield d.decompress(chunk)
3499 3525
3500 3526 return chunkbuffer(gen())
3501 3527
3502 3528 compengines.register(_bz2engine())
3503 3529
3504 3530 class _truncatedbz2engine(compressionengine):
3505 3531 def name(self):
3506 3532 return 'bz2truncated'
3507 3533
3508 3534 def bundletype(self):
3509 3535 return None, '_truncatedBZ'
3510 3536
3511 3537 # We don't implement compressstream because it is hackily handled elsewhere.
3512 3538
3513 3539 def decompressorreader(self, fh):
3514 3540 def gen():
3515 3541 # The input stream doesn't have the 'BZ' header. So add it back.
3516 3542 d = bz2.BZ2Decompressor()
3517 3543 d.decompress('BZ')
3518 3544 for chunk in filechunkiter(fh):
3519 3545 yield d.decompress(chunk)
3520 3546
3521 3547 return chunkbuffer(gen())
3522 3548
3523 3549 compengines.register(_truncatedbz2engine())
3524 3550
3525 3551 class _noopengine(compressionengine):
3526 3552 def name(self):
3527 3553 return 'none'
3528 3554
3529 3555 def bundletype(self):
3530 3556 """No compression is performed.
3531 3557
3532 3558 Use this compression engine to explicitly disable compression.
3533 3559 """
3534 3560 return 'none', 'UN'
3535 3561
3536 3562 # Clients always support uncompressed payloads. Servers don't because
3537 3563 # unless you are on a fast network, uncompressed payloads can easily
3538 3564 # saturate your network pipe.
3539 3565 def wireprotosupport(self):
3540 3566 return compewireprotosupport('none', 0, 10)
3541 3567
3542 3568 # We don't implement revlogheader because it is handled specially
3543 3569 # in the revlog class.
3544 3570
3545 3571 def compressstream(self, it, opts=None):
3546 3572 return it
3547 3573
3548 3574 def decompressorreader(self, fh):
3549 3575 return fh
3550 3576
3551 3577 class nooprevlogcompressor(object):
3552 3578 def compress(self, data):
3553 3579 return None
3554 3580
3555 3581 def revlogcompressor(self, opts=None):
3556 3582 return self.nooprevlogcompressor()
3557 3583
3558 3584 compengines.register(_noopengine())
3559 3585
3560 3586 class _zstdengine(compressionengine):
3561 3587 def name(self):
3562 3588 return 'zstd'
3563 3589
3564 3590 @propertycache
3565 3591 def _module(self):
3566 3592 # Not all installs have the zstd module available. So defer importing
3567 3593 # until first access.
3568 3594 try:
3569 3595 from . import zstd
3570 3596 # Force delayed import.
3571 3597 zstd.__version__
3572 3598 return zstd
3573 3599 except ImportError:
3574 3600 return None
3575 3601
3576 3602 def available(self):
3577 3603 return bool(self._module)
3578 3604
3579 3605 def bundletype(self):
3580 3606 """A modern compression algorithm that is fast and highly flexible.
3581 3607
3582 3608 Only supported by Mercurial 4.1 and newer clients.
3583 3609
3584 3610 With the default settings, zstd compression is both faster and yields
3585 3611 better compression than ``gzip``. It also frequently yields better
3586 3612 compression than ``bzip2`` while operating at much higher speeds.
3587 3613
3588 3614 If this engine is available and backwards compatibility is not a
3589 3615 concern, it is likely the best available engine.
3590 3616 """
3591 3617 return 'zstd', 'ZS'
3592 3618
3593 3619 def wireprotosupport(self):
3594 3620 return compewireprotosupport('zstd', 50, 50)
3595 3621
3596 3622 def revlogheader(self):
3597 3623 return '\x28'
3598 3624
3599 3625 def compressstream(self, it, opts=None):
3600 3626 opts = opts or {}
3601 3627 # zstd level 3 is almost always significantly faster than zlib
3602 3628 # while providing no worse compression. It strikes a good balance
3603 3629 # between speed and compression.
3604 3630 level = opts.get('level', 3)
3605 3631
3606 3632 zstd = self._module
3607 3633 z = zstd.ZstdCompressor(level=level).compressobj()
3608 3634 for chunk in it:
3609 3635 data = z.compress(chunk)
3610 3636 if data:
3611 3637 yield data
3612 3638
3613 3639 yield z.flush()
3614 3640
3615 3641 def decompressorreader(self, fh):
3616 3642 zstd = self._module
3617 3643 dctx = zstd.ZstdDecompressor()
3618 3644 return chunkbuffer(dctx.read_from(fh))
3619 3645
3620 3646 class zstdrevlogcompressor(object):
3621 3647 def __init__(self, zstd, level=3):
3622 3648 # Writing the content size adds a few bytes to the output. However,
3623 3649 # it allows decompression to be more optimal since we can
3624 3650 # pre-allocate a buffer to hold the result.
3625 3651 self._cctx = zstd.ZstdCompressor(level=level,
3626 3652 write_content_size=True)
3627 3653 self._dctx = zstd.ZstdDecompressor()
3628 3654 self._compinsize = zstd.COMPRESSION_RECOMMENDED_INPUT_SIZE
3629 3655 self._decompinsize = zstd.DECOMPRESSION_RECOMMENDED_INPUT_SIZE
3630 3656
3631 3657 def compress(self, data):
3632 3658 insize = len(data)
3633 3659 # Caller handles empty input case.
3634 3660 assert insize > 0
3635 3661
3636 3662 if insize < 50:
3637 3663 return None
3638 3664
3639 3665 elif insize <= 1000000:
3640 3666 compressed = self._cctx.compress(data)
3641 3667 if len(compressed) < insize:
3642 3668 return compressed
3643 3669 return None
3644 3670 else:
3645 3671 z = self._cctx.compressobj()
3646 3672 chunks = []
3647 3673 pos = 0
3648 3674 while pos < insize:
3649 3675 pos2 = pos + self._compinsize
3650 3676 chunk = z.compress(data[pos:pos2])
3651 3677 if chunk:
3652 3678 chunks.append(chunk)
3653 3679 pos = pos2
3654 3680 chunks.append(z.flush())
3655 3681
3656 3682 if sum(map(len, chunks)) < insize:
3657 3683 return ''.join(chunks)
3658 3684 return None
3659 3685
3660 3686 def decompress(self, data):
3661 3687 insize = len(data)
3662 3688
3663 3689 try:
3664 3690 # This was measured to be faster than other streaming
3665 3691 # decompressors.
3666 3692 dobj = self._dctx.decompressobj()
3667 3693 chunks = []
3668 3694 pos = 0
3669 3695 while pos < insize:
3670 3696 pos2 = pos + self._decompinsize
3671 3697 chunk = dobj.decompress(data[pos:pos2])
3672 3698 if chunk:
3673 3699 chunks.append(chunk)
3674 3700 pos = pos2
3675 3701 # Frame should be exhausted, so no finish() API.
3676 3702
3677 3703 return ''.join(chunks)
3678 3704 except Exception as e:
3679 3705 raise error.RevlogError(_('revlog decompress error: %s') %
3680 3706 str(e))
3681 3707
3682 3708 def revlogcompressor(self, opts=None):
3683 3709 opts = opts or {}
3684 3710 return self.zstdrevlogcompressor(self._module,
3685 3711 level=opts.get('level', 3))
3686 3712
3687 3713 compengines.register(_zstdengine())
3688 3714
3689 3715 def bundlecompressiontopics():
3690 3716 """Obtains a list of available bundle compressions for use in help."""
3691 3717 # help.makeitemsdocs() expects a dict of names to items with a .__doc__.
3692 3718 items = {}
3693 3719
3694 3720 # We need to format the docstring. So use a dummy object/type to hold it
3695 3721 # rather than mutating the original.
3696 3722 class docobject(object):
3697 3723 pass
3698 3724
3699 3725 for name in compengines:
3700 3726 engine = compengines[name]
3701 3727
3702 3728 if not engine.available():
3703 3729 continue
3704 3730
3705 3731 bt = engine.bundletype()
3706 3732 if not bt or not bt[0]:
3707 3733 continue
3708 3734
3709 3735 doc = pycompat.sysstr('``%s``\n %s') % (
3710 3736 bt[0], engine.bundletype.__doc__)
3711 3737
3712 3738 value = docobject()
3713 3739 value.__doc__ = doc
3714 3740
3715 3741 items[bt[0]] = value
3716 3742
3717 3743 return items
3718 3744
3719 3745 # convenient shortcut
3720 3746 dst = debugstacktrace
@@ -1,2664 +1,2665 b''
1 1 #!/usr/bin/env python
2 2 #
3 3 # run-tests.py - Run a set of tests on Mercurial
4 4 #
5 5 # Copyright 2006 Matt Mackall <mpm@selenic.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 # Modifying this script is tricky because it has many modes:
11 11 # - serial (default) vs parallel (-jN, N > 1)
12 12 # - no coverage (default) vs coverage (-c, -C, -s)
13 13 # - temp install (default) vs specific hg script (--with-hg, --local)
14 14 # - tests are a mix of shell scripts and Python scripts
15 15 #
16 16 # If you change this script, it is recommended that you ensure you
17 17 # haven't broken it by running it in various modes with a representative
18 18 # sample of test scripts. For example:
19 19 #
20 20 # 1) serial, no coverage, temp install:
21 21 # ./run-tests.py test-s*
22 22 # 2) serial, no coverage, local hg:
23 23 # ./run-tests.py --local test-s*
24 24 # 3) serial, coverage, temp install:
25 25 # ./run-tests.py -c test-s*
26 26 # 4) serial, coverage, local hg:
27 27 # ./run-tests.py -c --local test-s* # unsupported
28 28 # 5) parallel, no coverage, temp install:
29 29 # ./run-tests.py -j2 test-s*
30 30 # 6) parallel, no coverage, local hg:
31 31 # ./run-tests.py -j2 --local test-s*
32 32 # 7) parallel, coverage, temp install:
33 33 # ./run-tests.py -j2 -c test-s* # currently broken
34 34 # 8) parallel, coverage, local install:
35 35 # ./run-tests.py -j2 -c --local test-s* # unsupported (and broken)
36 36 # 9) parallel, custom tmp dir:
37 37 # ./run-tests.py -j2 --tmpdir /tmp/myhgtests
38 38 # 10) parallel, pure, tests that call run-tests:
39 39 # ./run-tests.py --pure `grep -l run-tests.py *.t`
40 40 #
41 41 # (You could use any subset of the tests: test-s* happens to match
42 42 # enough that it's worth doing parallel runs, few enough that it
43 43 # completes fairly quickly, includes both shell and Python scripts, and
44 44 # includes some scripts that run daemon processes.)
45 45
46 46 from __future__ import absolute_import, print_function
47 47
48 48 import difflib
49 49 import distutils.version as version
50 50 import errno
51 51 import json
52 52 import optparse
53 53 import os
54 54 import random
55 55 import re
56 56 import shutil
57 57 import signal
58 58 import socket
59 59 import subprocess
60 60 import sys
61 61 try:
62 62 import sysconfig
63 63 except ImportError:
64 64 # sysconfig doesn't exist in Python 2.6
65 65 sysconfig = None
66 66 import tempfile
67 67 import threading
68 68 import time
69 69 import unittest
70 70 import xml.dom.minidom as minidom
71 71
72 72 try:
73 73 import Queue as queue
74 74 except ImportError:
75 75 import queue
76 76
77 77 if os.environ.get('RTUNICODEPEDANTRY', False):
78 78 try:
79 79 reload(sys)
80 80 sys.setdefaultencoding("undefined")
81 81 except NameError:
82 82 pass
83 83
84 84 osenvironb = getattr(os, 'environb', os.environ)
85 85 processlock = threading.Lock()
86 86
87 87 if sys.version_info > (3, 5, 0):
88 88 PYTHON3 = True
89 89 xrange = range # we use xrange in one place, and we'd rather not use range
90 90 def _bytespath(p):
91 91 return p.encode('utf-8')
92 92
93 93 def _strpath(p):
94 94 return p.decode('utf-8')
95 95
96 96 elif sys.version_info >= (3, 0, 0):
97 97 print('%s is only supported on Python 3.5+ and 2.6-2.7, not %s' %
98 98 (sys.argv[0], '.'.join(str(v) for v in sys.version_info[:3])))
99 99 sys.exit(70) # EX_SOFTWARE from `man 3 sysexit`
100 100 else:
101 101 PYTHON3 = False
102 102
103 103 # In python 2.x, path operations are generally done using
104 104 # bytestrings by default, so we don't have to do any extra
105 105 # fiddling there. We define the wrapper functions anyway just to
106 106 # help keep code consistent between platforms.
107 107 def _bytespath(p):
108 108 return p
109 109
110 110 _strpath = _bytespath
111 111
112 112 # For Windows support
113 113 wifexited = getattr(os, "WIFEXITED", lambda x: False)
114 114
115 115 # Whether to use IPv6
116 116 def checksocketfamily(name, port=20058):
117 117 """return true if we can listen on localhost using family=name
118 118
119 119 name should be either 'AF_INET', or 'AF_INET6'.
120 120 port being used is okay - EADDRINUSE is considered as successful.
121 121 """
122 122 family = getattr(socket, name, None)
123 123 if family is None:
124 124 return False
125 125 try:
126 126 s = socket.socket(family, socket.SOCK_STREAM)
127 127 s.bind(('localhost', port))
128 128 s.close()
129 129 return True
130 130 except socket.error as exc:
131 131 if exc.errno == errno.EADDRINUSE:
132 132 return True
133 133 elif exc.errno in (errno.EADDRNOTAVAIL, errno.EPROTONOSUPPORT):
134 134 return False
135 135 else:
136 136 raise
137 137 else:
138 138 return False
139 139
140 140 # useipv6 will be set by parseargs
141 141 useipv6 = None
142 142
143 143 def checkportisavailable(port):
144 144 """return true if a port seems free to bind on localhost"""
145 145 if useipv6:
146 146 family = socket.AF_INET6
147 147 else:
148 148 family = socket.AF_INET
149 149 try:
150 150 s = socket.socket(family, socket.SOCK_STREAM)
151 151 s.bind(('localhost', port))
152 152 s.close()
153 153 return True
154 154 except socket.error as exc:
155 155 if exc.errno not in (errno.EADDRINUSE, errno.EADDRNOTAVAIL,
156 156 errno.EPROTONOSUPPORT):
157 157 raise
158 158 return False
159 159
160 160 closefds = os.name == 'posix'
161 161 def Popen4(cmd, wd, timeout, env=None):
162 162 processlock.acquire()
163 163 p = subprocess.Popen(cmd, shell=True, bufsize=-1, cwd=wd, env=env,
164 164 close_fds=closefds,
165 165 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
166 166 stderr=subprocess.STDOUT)
167 167 processlock.release()
168 168
169 169 p.fromchild = p.stdout
170 170 p.tochild = p.stdin
171 171 p.childerr = p.stderr
172 172
173 173 p.timeout = False
174 174 if timeout:
175 175 def t():
176 176 start = time.time()
177 177 while time.time() - start < timeout and p.returncode is None:
178 178 time.sleep(.1)
179 179 p.timeout = True
180 180 if p.returncode is None:
181 181 terminate(p)
182 182 threading.Thread(target=t).start()
183 183
184 184 return p
185 185
186 186 PYTHON = _bytespath(sys.executable.replace('\\', '/'))
187 187 IMPL_PATH = b'PYTHONPATH'
188 188 if 'java' in sys.platform:
189 189 IMPL_PATH = b'JYTHONPATH'
190 190
191 191 defaults = {
192 192 'jobs': ('HGTEST_JOBS', 1),
193 193 'timeout': ('HGTEST_TIMEOUT', 180),
194 194 'slowtimeout': ('HGTEST_SLOWTIMEOUT', 500),
195 195 'port': ('HGTEST_PORT', 20059),
196 196 'shell': ('HGTEST_SHELL', 'sh'),
197 197 }
198 198
199 199 def canonpath(path):
200 200 return os.path.realpath(os.path.expanduser(path))
201 201
202 202 def parselistfiles(files, listtype, warn=True):
203 203 entries = dict()
204 204 for filename in files:
205 205 try:
206 206 path = os.path.expanduser(os.path.expandvars(filename))
207 207 f = open(path, "rb")
208 208 except IOError as err:
209 209 if err.errno != errno.ENOENT:
210 210 raise
211 211 if warn:
212 212 print("warning: no such %s file: %s" % (listtype, filename))
213 213 continue
214 214
215 215 for line in f.readlines():
216 216 line = line.split(b'#', 1)[0].strip()
217 217 if line:
218 218 entries[line] = filename
219 219
220 220 f.close()
221 221 return entries
222 222
223 223 def getparser():
224 224 """Obtain the OptionParser used by the CLI."""
225 225 parser = optparse.OptionParser("%prog [options] [tests]")
226 226
227 227 # keep these sorted
228 228 parser.add_option("--blacklist", action="append",
229 229 help="skip tests listed in the specified blacklist file")
230 230 parser.add_option("--whitelist", action="append",
231 231 help="always run tests listed in the specified whitelist file")
232 232 parser.add_option("--changed", type="string",
233 233 help="run tests that are changed in parent rev or working directory")
234 234 parser.add_option("-C", "--annotate", action="store_true",
235 235 help="output files annotated with coverage")
236 236 parser.add_option("-c", "--cover", action="store_true",
237 237 help="print a test coverage report")
238 238 parser.add_option("-d", "--debug", action="store_true",
239 239 help="debug mode: write output of test scripts to console"
240 240 " rather than capturing and diffing it (disables timeout)")
241 241 parser.add_option("-f", "--first", action="store_true",
242 242 help="exit on the first test failure")
243 243 parser.add_option("-H", "--htmlcov", action="store_true",
244 244 help="create an HTML report of the coverage of the files")
245 245 parser.add_option("-i", "--interactive", action="store_true",
246 246 help="prompt to accept changed output")
247 247 parser.add_option("-j", "--jobs", type="int",
248 248 help="number of jobs to run in parallel"
249 249 " (default: $%s or %d)" % defaults['jobs'])
250 250 parser.add_option("--keep-tmpdir", action="store_true",
251 251 help="keep temporary directory after running tests")
252 252 parser.add_option("-k", "--keywords",
253 253 help="run tests matching keywords")
254 254 parser.add_option("-l", "--local", action="store_true",
255 255 help="shortcut for --with-hg=<testdir>/../hg, "
256 256 "and --with-chg=<testdir>/../contrib/chg/chg if --chg is set")
257 257 parser.add_option("--loop", action="store_true",
258 258 help="loop tests repeatedly")
259 259 parser.add_option("--runs-per-test", type="int", dest="runs_per_test",
260 260 help="run each test N times (default=1)", default=1)
261 261 parser.add_option("-n", "--nodiff", action="store_true",
262 262 help="skip showing test changes")
263 263 parser.add_option("-p", "--port", type="int",
264 264 help="port on which servers should listen"
265 265 " (default: $%s or %d)" % defaults['port'])
266 266 parser.add_option("--compiler", type="string",
267 267 help="compiler to build with")
268 268 parser.add_option("--pure", action="store_true",
269 269 help="use pure Python code instead of C extensions")
270 270 parser.add_option("-R", "--restart", action="store_true",
271 271 help="restart at last error")
272 272 parser.add_option("-r", "--retest", action="store_true",
273 273 help="retest failed tests")
274 274 parser.add_option("-S", "--noskips", action="store_true",
275 275 help="don't report skip tests verbosely")
276 276 parser.add_option("--shell", type="string",
277 277 help="shell to use (default: $%s or %s)" % defaults['shell'])
278 278 parser.add_option("-t", "--timeout", type="int",
279 279 help="kill errant tests after TIMEOUT seconds"
280 280 " (default: $%s or %d)" % defaults['timeout'])
281 281 parser.add_option("--slowtimeout", type="int",
282 282 help="kill errant slow tests after SLOWTIMEOUT seconds"
283 283 " (default: $%s or %d)" % defaults['slowtimeout'])
284 284 parser.add_option("--time", action="store_true",
285 285 help="time how long each test takes")
286 286 parser.add_option("--json", action="store_true",
287 287 help="store test result data in 'report.json' file")
288 288 parser.add_option("--tmpdir", type="string",
289 289 help="run tests in the given temporary directory"
290 290 " (implies --keep-tmpdir)")
291 291 parser.add_option("-v", "--verbose", action="store_true",
292 292 help="output verbose messages")
293 293 parser.add_option("--xunit", type="string",
294 294 help="record xunit results at specified path")
295 295 parser.add_option("--view", type="string",
296 296 help="external diff viewer")
297 297 parser.add_option("--with-hg", type="string",
298 298 metavar="HG",
299 299 help="test using specified hg script rather than a "
300 300 "temporary installation")
301 301 parser.add_option("--chg", action="store_true",
302 302 help="install and use chg wrapper in place of hg")
303 303 parser.add_option("--with-chg", metavar="CHG",
304 304 help="use specified chg wrapper in place of hg")
305 305 parser.add_option("--ipv6", action="store_true",
306 306 help="prefer IPv6 to IPv4 for network related tests")
307 307 parser.add_option("-3", "--py3k-warnings", action="store_true",
308 308 help="enable Py3k warnings on Python 2.6+")
309 309 # This option should be deleted once test-check-py3-compat.t and other
310 310 # Python 3 tests run with Python 3.
311 311 parser.add_option("--with-python3", metavar="PYTHON3",
312 312 help="Python 3 interpreter (if running under Python 2)"
313 313 " (TEMPORARY)")
314 314 parser.add_option('--extra-config-opt', action="append",
315 315 help='set the given config opt in the test hgrc')
316 316 parser.add_option('--random', action="store_true",
317 317 help='run tests in random order')
318 318 parser.add_option('--profile-runner', action='store_true',
319 319 help='run statprof on run-tests')
320 320 parser.add_option('--allow-slow-tests', action='store_true',
321 321 help='allow extremely slow tests')
322 322 parser.add_option('--showchannels', action='store_true',
323 323 help='show scheduling channels')
324 324 parser.add_option('--known-good-rev', type="string",
325 325 metavar="known_good_rev",
326 326 help=("Automatically bisect any failures using this "
327 327 "revision as a known-good revision."))
328 328
329 329 for option, (envvar, default) in defaults.items():
330 330 defaults[option] = type(default)(os.environ.get(envvar, default))
331 331 parser.set_defaults(**defaults)
332 332
333 333 return parser
334 334
335 335 def parseargs(args, parser):
336 336 """Parse arguments with our OptionParser and validate results."""
337 337 (options, args) = parser.parse_args(args)
338 338
339 339 # jython is always pure
340 340 if 'java' in sys.platform or '__pypy__' in sys.modules:
341 341 options.pure = True
342 342
343 343 if options.with_hg:
344 344 options.with_hg = canonpath(_bytespath(options.with_hg))
345 345 if not (os.path.isfile(options.with_hg) and
346 346 os.access(options.with_hg, os.X_OK)):
347 347 parser.error('--with-hg must specify an executable hg script')
348 348 if not os.path.basename(options.with_hg) == b'hg':
349 349 sys.stderr.write('warning: --with-hg should specify an hg script\n')
350 350 if options.local:
351 351 testdir = os.path.dirname(_bytespath(canonpath(sys.argv[0])))
352 352 reporootdir = os.path.dirname(testdir)
353 353 pathandattrs = [(b'hg', 'with_hg')]
354 354 if options.chg:
355 355 pathandattrs.append((b'contrib/chg/chg', 'with_chg'))
356 356 for relpath, attr in pathandattrs:
357 357 binpath = os.path.join(reporootdir, relpath)
358 358 if os.name != 'nt' and not os.access(binpath, os.X_OK):
359 359 parser.error('--local specified, but %r not found or '
360 360 'not executable' % binpath)
361 361 setattr(options, attr, binpath)
362 362
363 363 if (options.chg or options.with_chg) and os.name == 'nt':
364 364 parser.error('chg does not work on %s' % os.name)
365 365 if options.with_chg:
366 366 options.chg = False # no installation to temporary location
367 367 options.with_chg = canonpath(_bytespath(options.with_chg))
368 368 if not (os.path.isfile(options.with_chg) and
369 369 os.access(options.with_chg, os.X_OK)):
370 370 parser.error('--with-chg must specify a chg executable')
371 371 if options.chg and options.with_hg:
372 372 # chg shares installation location with hg
373 373 parser.error('--chg does not work when --with-hg is specified '
374 374 '(use --with-chg instead)')
375 375
376 376 global useipv6
377 377 if options.ipv6:
378 378 useipv6 = checksocketfamily('AF_INET6')
379 379 else:
380 380 # only use IPv6 if IPv4 is unavailable and IPv6 is available
381 381 useipv6 = ((not checksocketfamily('AF_INET'))
382 382 and checksocketfamily('AF_INET6'))
383 383
384 384 options.anycoverage = options.cover or options.annotate or options.htmlcov
385 385 if options.anycoverage:
386 386 try:
387 387 import coverage
388 388 covver = version.StrictVersion(coverage.__version__).version
389 389 if covver < (3, 3):
390 390 parser.error('coverage options require coverage 3.3 or later')
391 391 except ImportError:
392 392 parser.error('coverage options now require the coverage package')
393 393
394 394 if options.anycoverage and options.local:
395 395 # this needs some path mangling somewhere, I guess
396 396 parser.error("sorry, coverage options do not work when --local "
397 397 "is specified")
398 398
399 399 if options.anycoverage and options.with_hg:
400 400 parser.error("sorry, coverage options do not work when --with-hg "
401 401 "is specified")
402 402
403 403 global verbose
404 404 if options.verbose:
405 405 verbose = ''
406 406
407 407 if options.tmpdir:
408 408 options.tmpdir = canonpath(options.tmpdir)
409 409
410 410 if options.jobs < 1:
411 411 parser.error('--jobs must be positive')
412 412 if options.interactive and options.debug:
413 413 parser.error("-i/--interactive and -d/--debug are incompatible")
414 414 if options.debug:
415 415 if options.timeout != defaults['timeout']:
416 416 sys.stderr.write(
417 417 'warning: --timeout option ignored with --debug\n')
418 418 if options.slowtimeout != defaults['slowtimeout']:
419 419 sys.stderr.write(
420 420 'warning: --slowtimeout option ignored with --debug\n')
421 421 options.timeout = 0
422 422 options.slowtimeout = 0
423 423 if options.py3k_warnings:
424 424 if PYTHON3:
425 425 parser.error(
426 426 '--py3k-warnings can only be used on Python 2.6 and 2.7')
427 427 if options.with_python3:
428 428 if PYTHON3:
429 429 parser.error('--with-python3 cannot be used when executing with '
430 430 'Python 3')
431 431
432 432 options.with_python3 = canonpath(options.with_python3)
433 433 # Verify Python3 executable is acceptable.
434 434 proc = subprocess.Popen([options.with_python3, b'--version'],
435 435 stdout=subprocess.PIPE,
436 436 stderr=subprocess.STDOUT)
437 437 out, _err = proc.communicate()
438 438 ret = proc.wait()
439 439 if ret != 0:
440 440 parser.error('could not determine version of python 3')
441 441 if not out.startswith('Python '):
442 442 parser.error('unexpected output from python3 --version: %s' %
443 443 out)
444 444 vers = version.LooseVersion(out[len('Python '):])
445 445 if vers < version.LooseVersion('3.5.0'):
446 446 parser.error('--with-python3 version must be 3.5.0 or greater; '
447 447 'got %s' % out)
448 448
449 449 if options.blacklist:
450 450 options.blacklist = parselistfiles(options.blacklist, 'blacklist')
451 451 if options.whitelist:
452 452 options.whitelisted = parselistfiles(options.whitelist, 'whitelist')
453 453 else:
454 454 options.whitelisted = {}
455 455
456 456 if options.showchannels:
457 457 options.nodiff = True
458 458
459 459 return (options, args)
460 460
461 461 def rename(src, dst):
462 462 """Like os.rename(), trade atomicity and opened files friendliness
463 463 for existing destination support.
464 464 """
465 465 shutil.copy(src, dst)
466 466 os.remove(src)
467 467
468 468 _unified_diff = difflib.unified_diff
469 469 if PYTHON3:
470 470 import functools
471 471 _unified_diff = functools.partial(difflib.diff_bytes, difflib.unified_diff)
472 472
473 473 def getdiff(expected, output, ref, err):
474 474 servefail = False
475 475 lines = []
476 476 for line in _unified_diff(expected, output, ref, err):
477 477 if line.startswith(b'+++') or line.startswith(b'---'):
478 478 line = line.replace(b'\\', b'/')
479 479 if line.endswith(b' \n'):
480 480 line = line[:-2] + b'\n'
481 481 lines.append(line)
482 482 if not servefail and line.startswith(
483 483 b'+ abort: child process failed to start'):
484 484 servefail = True
485 485
486 486 return servefail, lines
487 487
488 488 verbose = False
489 489 def vlog(*msg):
490 490 """Log only when in verbose mode."""
491 491 if verbose is False:
492 492 return
493 493
494 494 return log(*msg)
495 495
496 496 # Bytes that break XML even in a CDATA block: control characters 0-31
497 497 # sans \t, \n and \r
498 498 CDATA_EVIL = re.compile(br"[\000-\010\013\014\016-\037]")
499 499
500 500 # Match feature conditionalized output lines in the form, capturing the feature
501 501 # list in group 2, and the preceeding line output in group 1:
502 502 #
503 503 # output..output (feature !)\n
504 504 optline = re.compile(b'(.+) \((.+?) !\)\n$')
505 505
506 506 def cdatasafe(data):
507 507 """Make a string safe to include in a CDATA block.
508 508
509 509 Certain control characters are illegal in a CDATA block, and
510 510 there's no way to include a ]]> in a CDATA either. This function
511 511 replaces illegal bytes with ? and adds a space between the ]] so
512 512 that it won't break the CDATA block.
513 513 """
514 514 return CDATA_EVIL.sub(b'?', data).replace(b']]>', b'] ]>')
515 515
516 516 def log(*msg):
517 517 """Log something to stdout.
518 518
519 519 Arguments are strings to print.
520 520 """
521 521 with iolock:
522 522 if verbose:
523 523 print(verbose, end=' ')
524 524 for m in msg:
525 525 print(m, end=' ')
526 526 print()
527 527 sys.stdout.flush()
528 528
529 529 def terminate(proc):
530 530 """Terminate subprocess (with fallback for Python versions < 2.6)"""
531 531 vlog('# Terminating process %d' % proc.pid)
532 532 try:
533 533 getattr(proc, 'terminate', lambda : os.kill(proc.pid, signal.SIGTERM))()
534 534 except OSError:
535 535 pass
536 536
537 537 def killdaemons(pidfile):
538 538 import killdaemons as killmod
539 539 return killmod.killdaemons(pidfile, tryhard=False, remove=True,
540 540 logfn=vlog)
541 541
542 542 class Test(unittest.TestCase):
543 543 """Encapsulates a single, runnable test.
544 544
545 545 While this class conforms to the unittest.TestCase API, it differs in that
546 546 instances need to be instantiated manually. (Typically, unittest.TestCase
547 547 classes are instantiated automatically by scanning modules.)
548 548 """
549 549
550 550 # Status code reserved for skipped tests (used by hghave).
551 551 SKIPPED_STATUS = 80
552 552
553 553 def __init__(self, path, tmpdir, keeptmpdir=False,
554 554 debug=False,
555 555 timeout=defaults['timeout'],
556 556 startport=defaults['port'], extraconfigopts=None,
557 557 py3kwarnings=False, shell=None, hgcommand=None,
558 558 slowtimeout=defaults['slowtimeout'], usechg=False,
559 559 useipv6=False):
560 560 """Create a test from parameters.
561 561
562 562 path is the full path to the file defining the test.
563 563
564 564 tmpdir is the main temporary directory to use for this test.
565 565
566 566 keeptmpdir determines whether to keep the test's temporary directory
567 567 after execution. It defaults to removal (False).
568 568
569 569 debug mode will make the test execute verbosely, with unfiltered
570 570 output.
571 571
572 572 timeout controls the maximum run time of the test. It is ignored when
573 573 debug is True. See slowtimeout for tests with #require slow.
574 574
575 575 slowtimeout overrides timeout if the test has #require slow.
576 576
577 577 startport controls the starting port number to use for this test. Each
578 578 test will reserve 3 port numbers for execution. It is the caller's
579 579 responsibility to allocate a non-overlapping port range to Test
580 580 instances.
581 581
582 582 extraconfigopts is an iterable of extra hgrc config options. Values
583 583 must have the form "key=value" (something understood by hgrc). Values
584 584 of the form "foo.key=value" will result in "[foo] key=value".
585 585
586 586 py3kwarnings enables Py3k warnings.
587 587
588 588 shell is the shell to execute tests in.
589 589 """
590 590 self.path = path
591 591 self.bname = os.path.basename(path)
592 592 self.name = _strpath(self.bname)
593 593 self._testdir = os.path.dirname(path)
594 594 self.errpath = os.path.join(self._testdir, b'%s.err' % self.bname)
595 595
596 596 self._threadtmp = tmpdir
597 597 self._keeptmpdir = keeptmpdir
598 598 self._debug = debug
599 599 self._timeout = timeout
600 600 self._slowtimeout = slowtimeout
601 601 self._startport = startport
602 602 self._extraconfigopts = extraconfigopts or []
603 603 self._py3kwarnings = py3kwarnings
604 604 self._shell = _bytespath(shell)
605 605 self._hgcommand = hgcommand or b'hg'
606 606 self._usechg = usechg
607 607 self._useipv6 = useipv6
608 608
609 609 self._aborted = False
610 610 self._daemonpids = []
611 611 self._finished = None
612 612 self._ret = None
613 613 self._out = None
614 614 self._skipped = None
615 615 self._testtmp = None
616 616 self._chgsockdir = None
617 617
618 618 # If we're not in --debug mode and reference output file exists,
619 619 # check test output against it.
620 620 if debug:
621 621 self._refout = None # to match "out is None"
622 622 elif os.path.exists(self.refpath):
623 623 f = open(self.refpath, 'rb')
624 624 self._refout = f.read().splitlines(True)
625 625 f.close()
626 626 else:
627 627 self._refout = []
628 628
629 629 # needed to get base class __repr__ running
630 630 @property
631 631 def _testMethodName(self):
632 632 return self.name
633 633
634 634 def __str__(self):
635 635 return self.name
636 636
637 637 def shortDescription(self):
638 638 return self.name
639 639
640 640 def setUp(self):
641 641 """Tasks to perform before run()."""
642 642 self._finished = False
643 643 self._ret = None
644 644 self._out = None
645 645 self._skipped = None
646 646
647 647 try:
648 648 os.mkdir(self._threadtmp)
649 649 except OSError as e:
650 650 if e.errno != errno.EEXIST:
651 651 raise
652 652
653 653 name = os.path.basename(self.path)
654 654 self._testtmp = os.path.join(self._threadtmp, name)
655 655 os.mkdir(self._testtmp)
656 656
657 657 # Remove any previous output files.
658 658 if os.path.exists(self.errpath):
659 659 try:
660 660 os.remove(self.errpath)
661 661 except OSError as e:
662 662 # We might have raced another test to clean up a .err
663 663 # file, so ignore ENOENT when removing a previous .err
664 664 # file.
665 665 if e.errno != errno.ENOENT:
666 666 raise
667 667
668 668 if self._usechg:
669 669 self._chgsockdir = os.path.join(self._threadtmp,
670 670 b'%s.chgsock' % name)
671 671 os.mkdir(self._chgsockdir)
672 672
673 673 def run(self, result):
674 674 """Run this test and report results against a TestResult instance."""
675 675 # This function is extremely similar to unittest.TestCase.run(). Once
676 676 # we require Python 2.7 (or at least its version of unittest), this
677 677 # function can largely go away.
678 678 self._result = result
679 679 result.startTest(self)
680 680 try:
681 681 try:
682 682 self.setUp()
683 683 except (KeyboardInterrupt, SystemExit):
684 684 self._aborted = True
685 685 raise
686 686 except Exception:
687 687 result.addError(self, sys.exc_info())
688 688 return
689 689
690 690 success = False
691 691 try:
692 692 self.runTest()
693 693 except KeyboardInterrupt:
694 694 self._aborted = True
695 695 raise
696 696 except SkipTest as e:
697 697 result.addSkip(self, str(e))
698 698 # The base class will have already counted this as a
699 699 # test we "ran", but we want to exclude skipped tests
700 700 # from those we count towards those run.
701 701 result.testsRun -= 1
702 702 except IgnoreTest as e:
703 703 result.addIgnore(self, str(e))
704 704 # As with skips, ignores also should be excluded from
705 705 # the number of tests executed.
706 706 result.testsRun -= 1
707 707 except WarnTest as e:
708 708 result.addWarn(self, str(e))
709 709 except ReportedTest as e:
710 710 pass
711 711 except self.failureException as e:
712 712 # This differs from unittest in that we don't capture
713 713 # the stack trace. This is for historical reasons and
714 714 # this decision could be revisited in the future,
715 715 # especially for PythonTest instances.
716 716 if result.addFailure(self, str(e)):
717 717 success = True
718 718 except Exception:
719 719 result.addError(self, sys.exc_info())
720 720 else:
721 721 success = True
722 722
723 723 try:
724 724 self.tearDown()
725 725 except (KeyboardInterrupt, SystemExit):
726 726 self._aborted = True
727 727 raise
728 728 except Exception:
729 729 result.addError(self, sys.exc_info())
730 730 success = False
731 731
732 732 if success:
733 733 result.addSuccess(self)
734 734 finally:
735 735 result.stopTest(self, interrupted=self._aborted)
736 736
737 737 def runTest(self):
738 738 """Run this test instance.
739 739
740 740 This will return a tuple describing the result of the test.
741 741 """
742 742 env = self._getenv()
743 743 self._daemonpids.append(env['DAEMON_PIDS'])
744 744 self._createhgrc(env['HGRCPATH'])
745 745
746 746 vlog('# Test', self.name)
747 747
748 748 ret, out = self._run(env)
749 749 self._finished = True
750 750 self._ret = ret
751 751 self._out = out
752 752
753 753 def describe(ret):
754 754 if ret < 0:
755 755 return 'killed by signal: %d' % -ret
756 756 return 'returned error code %d' % ret
757 757
758 758 self._skipped = False
759 759
760 760 if ret == self.SKIPPED_STATUS:
761 761 if out is None: # Debug mode, nothing to parse.
762 762 missing = ['unknown']
763 763 failed = None
764 764 else:
765 765 missing, failed = TTest.parsehghaveoutput(out)
766 766
767 767 if not missing:
768 768 missing = ['skipped']
769 769
770 770 if failed:
771 771 self.fail('hg have failed checking for %s' % failed[-1])
772 772 else:
773 773 self._skipped = True
774 774 raise SkipTest(missing[-1])
775 775 elif ret == 'timeout':
776 776 self.fail('timed out')
777 777 elif ret is False:
778 778 raise WarnTest('no result code from test')
779 779 elif out != self._refout:
780 780 # Diff generation may rely on written .err file.
781 781 if (ret != 0 or out != self._refout) and not self._skipped \
782 782 and not self._debug:
783 783 f = open(self.errpath, 'wb')
784 784 for line in out:
785 785 f.write(line)
786 786 f.close()
787 787
788 788 # The result object handles diff calculation for us.
789 789 if self._result.addOutputMismatch(self, ret, out, self._refout):
790 790 # change was accepted, skip failing
791 791 return
792 792
793 793 if ret:
794 794 msg = 'output changed and ' + describe(ret)
795 795 else:
796 796 msg = 'output changed'
797 797
798 798 self.fail(msg)
799 799 elif ret:
800 800 self.fail(describe(ret))
801 801
802 802 def tearDown(self):
803 803 """Tasks to perform after run()."""
804 804 for entry in self._daemonpids:
805 805 killdaemons(entry)
806 806 self._daemonpids = []
807 807
808 808 if self._keeptmpdir:
809 809 log('\nKeeping testtmp dir: %s\nKeeping threadtmp dir: %s' %
810 810 (self._testtmp.decode('utf-8'),
811 811 self._threadtmp.decode('utf-8')))
812 812 else:
813 813 shutil.rmtree(self._testtmp, True)
814 814 shutil.rmtree(self._threadtmp, True)
815 815
816 816 if self._usechg:
817 817 # chgservers will stop automatically after they find the socket
818 818 # files are deleted
819 819 shutil.rmtree(self._chgsockdir, True)
820 820
821 821 if (self._ret != 0 or self._out != self._refout) and not self._skipped \
822 822 and not self._debug and self._out:
823 823 f = open(self.errpath, 'wb')
824 824 for line in self._out:
825 825 f.write(line)
826 826 f.close()
827 827
828 828 vlog("# Ret was:", self._ret, '(%s)' % self.name)
829 829
830 830 def _run(self, env):
831 831 # This should be implemented in child classes to run tests.
832 832 raise SkipTest('unknown test type')
833 833
834 834 def abort(self):
835 835 """Terminate execution of this test."""
836 836 self._aborted = True
837 837
838 838 def _portmap(self, i):
839 839 offset = b'' if i == 0 else b'%d' % i
840 840 return (br':%d\b' % (self._startport + i), b':$HGPORT%s' % offset)
841 841
842 842 def _getreplacements(self):
843 843 """Obtain a mapping of text replacements to apply to test output.
844 844
845 845 Test output needs to be normalized so it can be compared to expected
846 846 output. This function defines how some of that normalization will
847 847 occur.
848 848 """
849 849 r = [
850 850 # This list should be parallel to defineport in _getenv
851 851 self._portmap(0),
852 852 self._portmap(1),
853 853 self._portmap(2),
854 854 (br'(?m)^(saved backup bundle to .*\.hg)( \(glob\))?$',
855 855 br'\1 (glob)'),
856 856 (br'([^0-9])%s' % re.escape(self._localip()), br'\1$LOCALIP'),
857 857 (br'\bHG_TXNID=TXN:[a-f0-9]{40}\b', br'HG_TXNID=TXN:$ID$'),
858 858 ]
859 859 r.append((self._escapepath(self._testtmp), b'$TESTTMP'))
860 860
861 861 return r
862 862
863 863 def _escapepath(self, p):
864 864 if os.name == 'nt':
865 865 return (
866 866 (b''.join(c.isalpha() and b'[%s%s]' % (c.lower(), c.upper()) or
867 867 c in b'/\\' and br'[/\\]' or c.isdigit() and c or b'\\' + c
868 868 for c in p))
869 869 )
870 870 else:
871 871 return re.escape(p)
872 872
873 873 def _localip(self):
874 874 if self._useipv6:
875 875 return b'::1'
876 876 else:
877 877 return b'127.0.0.1'
878 878
879 879 def _getenv(self):
880 880 """Obtain environment variables to use during test execution."""
881 881 def defineport(i):
882 882 offset = '' if i == 0 else '%s' % i
883 883 env["HGPORT%s" % offset] = '%s' % (self._startport + i)
884 884 env = os.environ.copy()
885 885 if sysconfig is not None:
886 886 env['PYTHONUSERBASE'] = sysconfig.get_config_var('userbase')
887 env['HGEMITWARNINGS'] = '1'
887 888 env['TESTTMP'] = self._testtmp
888 889 env['HOME'] = self._testtmp
889 890 # This number should match portneeded in _getport
890 891 for port in xrange(3):
891 892 # This list should be parallel to _portmap in _getreplacements
892 893 defineport(port)
893 894 env["HGRCPATH"] = os.path.join(self._threadtmp, b'.hgrc')
894 895 env["DAEMON_PIDS"] = os.path.join(self._threadtmp, b'daemon.pids')
895 896 env["HGEDITOR"] = ('"' + sys.executable + '"'
896 897 + ' -c "import sys; sys.exit(0)"')
897 898 env["HGMERGE"] = "internal:merge"
898 899 env["HGUSER"] = "test"
899 900 env["HGENCODING"] = "ascii"
900 901 env["HGENCODINGMODE"] = "strict"
901 902 env['HGIPV6'] = str(int(self._useipv6))
902 903
903 904 # LOCALIP could be ::1 or 127.0.0.1. Useful for tests that require raw
904 905 # IP addresses.
905 906 env['LOCALIP'] = self._localip()
906 907
907 908 # Reset some environment variables to well-known values so that
908 909 # the tests produce repeatable output.
909 910 env['LANG'] = env['LC_ALL'] = env['LANGUAGE'] = 'C'
910 911 env['TZ'] = 'GMT'
911 912 env["EMAIL"] = "Foo Bar <foo.bar@example.com>"
912 913 env['COLUMNS'] = '80'
913 914 env['TERM'] = 'xterm'
914 915
915 916 for k in ('HG HGPROF CDPATH GREP_OPTIONS http_proxy no_proxy ' +
916 917 'HGPLAIN HGPLAINEXCEPT EDITOR VISUAL PAGER ' +
917 918 'NO_PROXY CHGDEBUG').split():
918 919 if k in env:
919 920 del env[k]
920 921
921 922 # unset env related to hooks
922 923 for k in env.keys():
923 924 if k.startswith('HG_'):
924 925 del env[k]
925 926
926 927 if self._usechg:
927 928 env['CHGSOCKNAME'] = os.path.join(self._chgsockdir, b'server')
928 929
929 930 return env
930 931
931 932 def _createhgrc(self, path):
932 933 """Create an hgrc file for this test."""
933 934 hgrc = open(path, 'wb')
934 935 hgrc.write(b'[ui]\n')
935 936 hgrc.write(b'slash = True\n')
936 937 hgrc.write(b'interactive = False\n')
937 938 hgrc.write(b'mergemarkers = detailed\n')
938 939 hgrc.write(b'promptecho = True\n')
939 940 hgrc.write(b'[defaults]\n')
940 941 hgrc.write(b'backout = -d "0 0"\n')
941 942 hgrc.write(b'commit = -d "0 0"\n')
942 943 hgrc.write(b'shelve = --date "0 0"\n')
943 944 hgrc.write(b'tag = -d "0 0"\n')
944 945 hgrc.write(b'[devel]\n')
945 946 hgrc.write(b'all-warnings = true\n')
946 947 hgrc.write(b'[largefiles]\n')
947 948 hgrc.write(b'usercache = %s\n' %
948 949 (os.path.join(self._testtmp, b'.cache/largefiles')))
949 950 hgrc.write(b'[web]\n')
950 951 hgrc.write(b'address = localhost\n')
951 952 hgrc.write(b'ipv6 = %s\n' % str(self._useipv6).encode('ascii'))
952 953
953 954 for opt in self._extraconfigopts:
954 955 section, key = opt.split('.', 1)
955 956 assert '=' in key, ('extra config opt %s must '
956 957 'have an = for assignment' % opt)
957 958 hgrc.write(b'[%s]\n%s\n' % (section, key))
958 959 hgrc.close()
959 960
960 961 def fail(self, msg):
961 962 # unittest differentiates between errored and failed.
962 963 # Failed is denoted by AssertionError (by default at least).
963 964 raise AssertionError(msg)
964 965
965 966 def _runcommand(self, cmd, env, normalizenewlines=False):
966 967 """Run command in a sub-process, capturing the output (stdout and
967 968 stderr).
968 969
969 970 Return a tuple (exitcode, output). output is None in debug mode.
970 971 """
971 972 if self._debug:
972 973 proc = subprocess.Popen(cmd, shell=True, cwd=self._testtmp,
973 974 env=env)
974 975 ret = proc.wait()
975 976 return (ret, None)
976 977
977 978 proc = Popen4(cmd, self._testtmp, self._timeout, env)
978 979 def cleanup():
979 980 terminate(proc)
980 981 ret = proc.wait()
981 982 if ret == 0:
982 983 ret = signal.SIGTERM << 8
983 984 killdaemons(env['DAEMON_PIDS'])
984 985 return ret
985 986
986 987 output = ''
987 988 proc.tochild.close()
988 989
989 990 try:
990 991 output = proc.fromchild.read()
991 992 except KeyboardInterrupt:
992 993 vlog('# Handling keyboard interrupt')
993 994 cleanup()
994 995 raise
995 996
996 997 ret = proc.wait()
997 998 if wifexited(ret):
998 999 ret = os.WEXITSTATUS(ret)
999 1000
1000 1001 if proc.timeout:
1001 1002 ret = 'timeout'
1002 1003
1003 1004 if ret:
1004 1005 killdaemons(env['DAEMON_PIDS'])
1005 1006
1006 1007 for s, r in self._getreplacements():
1007 1008 output = re.sub(s, r, output)
1008 1009
1009 1010 if normalizenewlines:
1010 1011 output = output.replace('\r\n', '\n')
1011 1012
1012 1013 return ret, output.splitlines(True)
1013 1014
1014 1015 class PythonTest(Test):
1015 1016 """A Python-based test."""
1016 1017
1017 1018 @property
1018 1019 def refpath(self):
1019 1020 return os.path.join(self._testdir, b'%s.out' % self.bname)
1020 1021
1021 1022 def _run(self, env):
1022 1023 py3kswitch = self._py3kwarnings and b' -3' or b''
1023 1024 cmd = b'%s%s "%s"' % (PYTHON, py3kswitch, self.path)
1024 1025 vlog("# Running", cmd)
1025 1026 normalizenewlines = os.name == 'nt'
1026 1027 result = self._runcommand(cmd, env,
1027 1028 normalizenewlines=normalizenewlines)
1028 1029 if self._aborted:
1029 1030 raise KeyboardInterrupt()
1030 1031
1031 1032 return result
1032 1033
1033 1034 # Some glob patterns apply only in some circumstances, so the script
1034 1035 # might want to remove (glob) annotations that otherwise should be
1035 1036 # retained.
1036 1037 checkcodeglobpats = [
1037 1038 # On Windows it looks like \ doesn't require a (glob), but we know
1038 1039 # better.
1039 1040 re.compile(br'^pushing to \$TESTTMP/.*[^)]$'),
1040 1041 re.compile(br'^moving \S+/.*[^)]$'),
1041 1042 re.compile(br'^pulling from \$TESTTMP/.*[^)]$'),
1042 1043 # Not all platforms have 127.0.0.1 as loopback (though most do),
1043 1044 # so we always glob that too.
1044 1045 re.compile(br'.*\$LOCALIP.*$'),
1045 1046 ]
1046 1047
1047 1048 bchr = chr
1048 1049 if PYTHON3:
1049 1050 bchr = lambda x: bytes([x])
1050 1051
1051 1052 class TTest(Test):
1052 1053 """A "t test" is a test backed by a .t file."""
1053 1054
1054 1055 SKIPPED_PREFIX = b'skipped: '
1055 1056 FAILED_PREFIX = b'hghave check failed: '
1056 1057 NEEDESCAPE = re.compile(br'[\x00-\x08\x0b-\x1f\x7f-\xff]').search
1057 1058
1058 1059 ESCAPESUB = re.compile(br'[\x00-\x08\x0b-\x1f\\\x7f-\xff]').sub
1059 1060 ESCAPEMAP = dict((bchr(i), br'\x%02x' % i) for i in range(256))
1060 1061 ESCAPEMAP.update({b'\\': b'\\\\', b'\r': br'\r'})
1061 1062
1062 1063 @property
1063 1064 def refpath(self):
1064 1065 return os.path.join(self._testdir, self.bname)
1065 1066
1066 1067 def _run(self, env):
1067 1068 f = open(self.path, 'rb')
1068 1069 lines = f.readlines()
1069 1070 f.close()
1070 1071
1071 1072 salt, script, after, expected = self._parsetest(lines)
1072 1073
1073 1074 # Write out the generated script.
1074 1075 fname = b'%s.sh' % self._testtmp
1075 1076 f = open(fname, 'wb')
1076 1077 for l in script:
1077 1078 f.write(l)
1078 1079 f.close()
1079 1080
1080 1081 cmd = b'%s "%s"' % (self._shell, fname)
1081 1082 vlog("# Running", cmd)
1082 1083
1083 1084 exitcode, output = self._runcommand(cmd, env)
1084 1085
1085 1086 if self._aborted:
1086 1087 raise KeyboardInterrupt()
1087 1088
1088 1089 # Do not merge output if skipped. Return hghave message instead.
1089 1090 # Similarly, with --debug, output is None.
1090 1091 if exitcode == self.SKIPPED_STATUS or output is None:
1091 1092 return exitcode, output
1092 1093
1093 1094 return self._processoutput(exitcode, output, salt, after, expected)
1094 1095
1095 1096 def _hghave(self, reqs):
1096 1097 # TODO do something smarter when all other uses of hghave are gone.
1097 1098 runtestdir = os.path.abspath(os.path.dirname(_bytespath(__file__)))
1098 1099 tdir = runtestdir.replace(b'\\', b'/')
1099 1100 proc = Popen4(b'%s -c "%s/hghave %s"' %
1100 1101 (self._shell, tdir, b' '.join(reqs)),
1101 1102 self._testtmp, 0, self._getenv())
1102 1103 stdout, stderr = proc.communicate()
1103 1104 ret = proc.wait()
1104 1105 if wifexited(ret):
1105 1106 ret = os.WEXITSTATUS(ret)
1106 1107 if ret == 2:
1107 1108 print(stdout.decode('utf-8'))
1108 1109 sys.exit(1)
1109 1110
1110 1111 if ret != 0:
1111 1112 return False, stdout
1112 1113
1113 1114 if 'slow' in reqs:
1114 1115 self._timeout = self._slowtimeout
1115 1116 return True, None
1116 1117
1117 1118 def _parsetest(self, lines):
1118 1119 # We generate a shell script which outputs unique markers to line
1119 1120 # up script results with our source. These markers include input
1120 1121 # line number and the last return code.
1121 1122 salt = b"SALT%d" % time.time()
1122 1123 def addsalt(line, inpython):
1123 1124 if inpython:
1124 1125 script.append(b'%s %d 0\n' % (salt, line))
1125 1126 else:
1126 1127 script.append(b'echo %s %d $?\n' % (salt, line))
1127 1128
1128 1129 script = []
1129 1130
1130 1131 # After we run the shell script, we re-unify the script output
1131 1132 # with non-active parts of the source, with synchronization by our
1132 1133 # SALT line number markers. The after table contains the non-active
1133 1134 # components, ordered by line number.
1134 1135 after = {}
1135 1136
1136 1137 # Expected shell script output.
1137 1138 expected = {}
1138 1139
1139 1140 pos = prepos = -1
1140 1141
1141 1142 # True or False when in a true or false conditional section
1142 1143 skipping = None
1143 1144
1144 1145 # We keep track of whether or not we're in a Python block so we
1145 1146 # can generate the surrounding doctest magic.
1146 1147 inpython = False
1147 1148
1148 1149 if self._debug:
1149 1150 script.append(b'set -x\n')
1150 1151 if self._hgcommand != b'hg':
1151 1152 script.append(b'alias hg="%s"\n' % self._hgcommand)
1152 1153 if os.getenv('MSYSTEM'):
1153 1154 script.append(b'alias pwd="pwd -W"\n')
1154 1155
1155 1156 n = 0
1156 1157 for n, l in enumerate(lines):
1157 1158 if not l.endswith(b'\n'):
1158 1159 l += b'\n'
1159 1160 if l.startswith(b'#require'):
1160 1161 lsplit = l.split()
1161 1162 if len(lsplit) < 2 or lsplit[0] != b'#require':
1162 1163 after.setdefault(pos, []).append(' !!! invalid #require\n')
1163 1164 haveresult, message = self._hghave(lsplit[1:])
1164 1165 if not haveresult:
1165 1166 script = [b'echo "%s"\nexit 80\n' % message]
1166 1167 break
1167 1168 after.setdefault(pos, []).append(l)
1168 1169 elif l.startswith(b'#if'):
1169 1170 lsplit = l.split()
1170 1171 if len(lsplit) < 2 or lsplit[0] != b'#if':
1171 1172 after.setdefault(pos, []).append(' !!! invalid #if\n')
1172 1173 if skipping is not None:
1173 1174 after.setdefault(pos, []).append(' !!! nested #if\n')
1174 1175 skipping = not self._hghave(lsplit[1:])[0]
1175 1176 after.setdefault(pos, []).append(l)
1176 1177 elif l.startswith(b'#else'):
1177 1178 if skipping is None:
1178 1179 after.setdefault(pos, []).append(' !!! missing #if\n')
1179 1180 skipping = not skipping
1180 1181 after.setdefault(pos, []).append(l)
1181 1182 elif l.startswith(b'#endif'):
1182 1183 if skipping is None:
1183 1184 after.setdefault(pos, []).append(' !!! missing #if\n')
1184 1185 skipping = None
1185 1186 after.setdefault(pos, []).append(l)
1186 1187 elif skipping:
1187 1188 after.setdefault(pos, []).append(l)
1188 1189 elif l.startswith(b' >>> '): # python inlines
1189 1190 after.setdefault(pos, []).append(l)
1190 1191 prepos = pos
1191 1192 pos = n
1192 1193 if not inpython:
1193 1194 # We've just entered a Python block. Add the header.
1194 1195 inpython = True
1195 1196 addsalt(prepos, False) # Make sure we report the exit code.
1196 1197 script.append(b'%s -m heredoctest <<EOF\n' % PYTHON)
1197 1198 addsalt(n, True)
1198 1199 script.append(l[2:])
1199 1200 elif l.startswith(b' ... '): # python inlines
1200 1201 after.setdefault(prepos, []).append(l)
1201 1202 script.append(l[2:])
1202 1203 elif l.startswith(b' $ '): # commands
1203 1204 if inpython:
1204 1205 script.append(b'EOF\n')
1205 1206 inpython = False
1206 1207 after.setdefault(pos, []).append(l)
1207 1208 prepos = pos
1208 1209 pos = n
1209 1210 addsalt(n, False)
1210 1211 cmd = l[4:].split()
1211 1212 if len(cmd) == 2 and cmd[0] == b'cd':
1212 1213 l = b' $ cd %s || exit 1\n' % cmd[1]
1213 1214 script.append(l[4:])
1214 1215 elif l.startswith(b' > '): # continuations
1215 1216 after.setdefault(prepos, []).append(l)
1216 1217 script.append(l[4:])
1217 1218 elif l.startswith(b' '): # results
1218 1219 # Queue up a list of expected results.
1219 1220 expected.setdefault(pos, []).append(l[2:])
1220 1221 else:
1221 1222 if inpython:
1222 1223 script.append(b'EOF\n')
1223 1224 inpython = False
1224 1225 # Non-command/result. Queue up for merged output.
1225 1226 after.setdefault(pos, []).append(l)
1226 1227
1227 1228 if inpython:
1228 1229 script.append(b'EOF\n')
1229 1230 if skipping is not None:
1230 1231 after.setdefault(pos, []).append(' !!! missing #endif\n')
1231 1232 addsalt(n + 1, False)
1232 1233
1233 1234 return salt, script, after, expected
1234 1235
1235 1236 def _processoutput(self, exitcode, output, salt, after, expected):
1236 1237 # Merge the script output back into a unified test.
1237 1238 warnonly = 1 # 1: not yet; 2: yes; 3: for sure not
1238 1239 if exitcode != 0:
1239 1240 warnonly = 3
1240 1241
1241 1242 pos = -1
1242 1243 postout = []
1243 1244 for l in output:
1244 1245 lout, lcmd = l, None
1245 1246 if salt in l:
1246 1247 lout, lcmd = l.split(salt, 1)
1247 1248
1248 1249 while lout:
1249 1250 if not lout.endswith(b'\n'):
1250 1251 lout += b' (no-eol)\n'
1251 1252
1252 1253 # Find the expected output at the current position.
1253 1254 els = [None]
1254 1255 if expected.get(pos, None):
1255 1256 els = expected[pos]
1256 1257
1257 1258 i = 0
1258 1259 optional = []
1259 1260 while i < len(els):
1260 1261 el = els[i]
1261 1262
1262 1263 r = TTest.linematch(el, lout)
1263 1264 if isinstance(r, str):
1264 1265 if r == '+glob':
1265 1266 lout = el[:-1] + ' (glob)\n'
1266 1267 r = '' # Warn only this line.
1267 1268 elif r == '-glob':
1268 1269 lout = ''.join(el.rsplit(' (glob)', 1))
1269 1270 r = '' # Warn only this line.
1270 1271 elif r == "retry":
1271 1272 postout.append(b' ' + el)
1272 1273 els.pop(i)
1273 1274 break
1274 1275 else:
1275 1276 log('\ninfo, unknown linematch result: %r\n' % r)
1276 1277 r = False
1277 1278 if r:
1278 1279 els.pop(i)
1279 1280 break
1280 1281 if el:
1281 1282 if el.endswith(b" (?)\n"):
1282 1283 optional.append(i)
1283 1284 else:
1284 1285 m = optline.match(el)
1285 1286 if m:
1286 1287 conditions = [c for c in m.group(2).split(' ')]
1287 1288
1288 1289 if self._hghave(conditions)[0]:
1289 1290 lout = el
1290 1291 else:
1291 1292 optional.append(i)
1292 1293
1293 1294 i += 1
1294 1295
1295 1296 if r:
1296 1297 if r == "retry":
1297 1298 continue
1298 1299 # clean up any optional leftovers
1299 1300 for i in optional:
1300 1301 postout.append(b' ' + els[i])
1301 1302 for i in reversed(optional):
1302 1303 del els[i]
1303 1304 postout.append(b' ' + el)
1304 1305 else:
1305 1306 if self.NEEDESCAPE(lout):
1306 1307 lout = TTest._stringescape(b'%s (esc)\n' %
1307 1308 lout.rstrip(b'\n'))
1308 1309 postout.append(b' ' + lout) # Let diff deal with it.
1309 1310 if r != '': # If line failed.
1310 1311 warnonly = 3 # for sure not
1311 1312 elif warnonly == 1: # Is "not yet" and line is warn only.
1312 1313 warnonly = 2 # Yes do warn.
1313 1314 break
1314 1315 else:
1315 1316 # clean up any optional leftovers
1316 1317 while expected.get(pos, None):
1317 1318 el = expected[pos].pop(0)
1318 1319 if el:
1319 1320 if (not optline.match(el)
1320 1321 and not el.endswith(b" (?)\n")):
1321 1322 break
1322 1323 postout.append(b' ' + el)
1323 1324
1324 1325 if lcmd:
1325 1326 # Add on last return code.
1326 1327 ret = int(lcmd.split()[1])
1327 1328 if ret != 0:
1328 1329 postout.append(b' [%d]\n' % ret)
1329 1330 if pos in after:
1330 1331 # Merge in non-active test bits.
1331 1332 postout += after.pop(pos)
1332 1333 pos = int(lcmd.split()[0])
1333 1334
1334 1335 if pos in after:
1335 1336 postout += after.pop(pos)
1336 1337
1337 1338 if warnonly == 2:
1338 1339 exitcode = False # Set exitcode to warned.
1339 1340
1340 1341 return exitcode, postout
1341 1342
1342 1343 @staticmethod
1343 1344 def rematch(el, l):
1344 1345 try:
1345 1346 # use \Z to ensure that the regex matches to the end of the string
1346 1347 if os.name == 'nt':
1347 1348 return re.match(el + br'\r?\n\Z', l)
1348 1349 return re.match(el + br'\n\Z', l)
1349 1350 except re.error:
1350 1351 # el is an invalid regex
1351 1352 return False
1352 1353
1353 1354 @staticmethod
1354 1355 def globmatch(el, l):
1355 1356 # The only supported special characters are * and ? plus / which also
1356 1357 # matches \ on windows. Escaping of these characters is supported.
1357 1358 if el + b'\n' == l:
1358 1359 if os.altsep:
1359 1360 # matching on "/" is not needed for this line
1360 1361 for pat in checkcodeglobpats:
1361 1362 if pat.match(el):
1362 1363 return True
1363 1364 return b'-glob'
1364 1365 return True
1365 1366 el = el.replace(b'$LOCALIP', b'*')
1366 1367 i, n = 0, len(el)
1367 1368 res = b''
1368 1369 while i < n:
1369 1370 c = el[i:i + 1]
1370 1371 i += 1
1371 1372 if c == b'\\' and i < n and el[i:i + 1] in b'*?\\/':
1372 1373 res += el[i - 1:i + 1]
1373 1374 i += 1
1374 1375 elif c == b'*':
1375 1376 res += b'.*'
1376 1377 elif c == b'?':
1377 1378 res += b'.'
1378 1379 elif c == b'/' and os.altsep:
1379 1380 res += b'[/\\\\]'
1380 1381 else:
1381 1382 res += re.escape(c)
1382 1383 return TTest.rematch(res, l)
1383 1384
1384 1385 @staticmethod
1385 1386 def linematch(el, l):
1386 1387 retry = False
1387 1388 if el == l: # perfect match (fast)
1388 1389 return True
1389 1390 if el:
1390 1391 if el.endswith(b" (?)\n"):
1391 1392 retry = "retry"
1392 1393 el = el[:-5] + b"\n"
1393 1394 else:
1394 1395 m = optline.match(el)
1395 1396 if m:
1396 1397 el = m.group(1) + b"\n"
1397 1398 retry = "retry"
1398 1399
1399 1400 if el.endswith(b" (esc)\n"):
1400 1401 if PYTHON3:
1401 1402 el = el[:-7].decode('unicode_escape') + '\n'
1402 1403 el = el.encode('utf-8')
1403 1404 else:
1404 1405 el = el[:-7].decode('string-escape') + '\n'
1405 1406 if el == l or os.name == 'nt' and el[:-1] + b'\r\n' == l:
1406 1407 return True
1407 1408 if el.endswith(b" (re)\n"):
1408 1409 return TTest.rematch(el[:-6], l) or retry
1409 1410 if el.endswith(b" (glob)\n"):
1410 1411 # ignore '(glob)' added to l by 'replacements'
1411 1412 if l.endswith(b" (glob)\n"):
1412 1413 l = l[:-8] + b"\n"
1413 1414 return TTest.globmatch(el[:-8], l) or retry
1414 1415 if os.altsep and l.replace(b'\\', b'/') == el:
1415 1416 return b'+glob'
1416 1417 return retry
1417 1418
1418 1419 @staticmethod
1419 1420 def parsehghaveoutput(lines):
1420 1421 '''Parse hghave log lines.
1421 1422
1422 1423 Return tuple of lists (missing, failed):
1423 1424 * the missing/unknown features
1424 1425 * the features for which existence check failed'''
1425 1426 missing = []
1426 1427 failed = []
1427 1428 for line in lines:
1428 1429 if line.startswith(TTest.SKIPPED_PREFIX):
1429 1430 line = line.splitlines()[0]
1430 1431 missing.append(line[len(TTest.SKIPPED_PREFIX):].decode('utf-8'))
1431 1432 elif line.startswith(TTest.FAILED_PREFIX):
1432 1433 line = line.splitlines()[0]
1433 1434 failed.append(line[len(TTest.FAILED_PREFIX):].decode('utf-8'))
1434 1435
1435 1436 return missing, failed
1436 1437
1437 1438 @staticmethod
1438 1439 def _escapef(m):
1439 1440 return TTest.ESCAPEMAP[m.group(0)]
1440 1441
1441 1442 @staticmethod
1442 1443 def _stringescape(s):
1443 1444 return TTest.ESCAPESUB(TTest._escapef, s)
1444 1445
1445 1446 iolock = threading.RLock()
1446 1447
1447 1448 class SkipTest(Exception):
1448 1449 """Raised to indicate that a test is to be skipped."""
1449 1450
1450 1451 class IgnoreTest(Exception):
1451 1452 """Raised to indicate that a test is to be ignored."""
1452 1453
1453 1454 class WarnTest(Exception):
1454 1455 """Raised to indicate that a test warned."""
1455 1456
1456 1457 class ReportedTest(Exception):
1457 1458 """Raised to indicate that a test already reported."""
1458 1459
1459 1460 class TestResult(unittest._TextTestResult):
1460 1461 """Holds results when executing via unittest."""
1461 1462 # Don't worry too much about accessing the non-public _TextTestResult.
1462 1463 # It is relatively common in Python testing tools.
1463 1464 def __init__(self, options, *args, **kwargs):
1464 1465 super(TestResult, self).__init__(*args, **kwargs)
1465 1466
1466 1467 self._options = options
1467 1468
1468 1469 # unittest.TestResult didn't have skipped until 2.7. We need to
1469 1470 # polyfill it.
1470 1471 self.skipped = []
1471 1472
1472 1473 # We have a custom "ignored" result that isn't present in any Python
1473 1474 # unittest implementation. It is very similar to skipped. It may make
1474 1475 # sense to map it into skip some day.
1475 1476 self.ignored = []
1476 1477
1477 1478 # We have a custom "warned" result that isn't present in any Python
1478 1479 # unittest implementation. It is very similar to failed. It may make
1479 1480 # sense to map it into fail some day.
1480 1481 self.warned = []
1481 1482
1482 1483 self.times = []
1483 1484 self._firststarttime = None
1484 1485 # Data stored for the benefit of generating xunit reports.
1485 1486 self.successes = []
1486 1487 self.faildata = {}
1487 1488
1488 1489 def addFailure(self, test, reason):
1489 1490 self.failures.append((test, reason))
1490 1491
1491 1492 if self._options.first:
1492 1493 self.stop()
1493 1494 else:
1494 1495 with iolock:
1495 1496 if reason == "timed out":
1496 1497 self.stream.write('t')
1497 1498 else:
1498 1499 if not self._options.nodiff:
1499 1500 self.stream.write('\nERROR: %s output changed\n' % test)
1500 1501 self.stream.write('!')
1501 1502
1502 1503 self.stream.flush()
1503 1504
1504 1505 def addSuccess(self, test):
1505 1506 with iolock:
1506 1507 super(TestResult, self).addSuccess(test)
1507 1508 self.successes.append(test)
1508 1509
1509 1510 def addError(self, test, err):
1510 1511 super(TestResult, self).addError(test, err)
1511 1512 if self._options.first:
1512 1513 self.stop()
1513 1514
1514 1515 # Polyfill.
1515 1516 def addSkip(self, test, reason):
1516 1517 self.skipped.append((test, reason))
1517 1518 with iolock:
1518 1519 if self.showAll:
1519 1520 self.stream.writeln('skipped %s' % reason)
1520 1521 else:
1521 1522 self.stream.write('s')
1522 1523 self.stream.flush()
1523 1524
1524 1525 def addIgnore(self, test, reason):
1525 1526 self.ignored.append((test, reason))
1526 1527 with iolock:
1527 1528 if self.showAll:
1528 1529 self.stream.writeln('ignored %s' % reason)
1529 1530 else:
1530 1531 if reason not in ('not retesting', "doesn't match keyword"):
1531 1532 self.stream.write('i')
1532 1533 else:
1533 1534 self.testsRun += 1
1534 1535 self.stream.flush()
1535 1536
1536 1537 def addWarn(self, test, reason):
1537 1538 self.warned.append((test, reason))
1538 1539
1539 1540 if self._options.first:
1540 1541 self.stop()
1541 1542
1542 1543 with iolock:
1543 1544 if self.showAll:
1544 1545 self.stream.writeln('warned %s' % reason)
1545 1546 else:
1546 1547 self.stream.write('~')
1547 1548 self.stream.flush()
1548 1549
1549 1550 def addOutputMismatch(self, test, ret, got, expected):
1550 1551 """Record a mismatch in test output for a particular test."""
1551 1552 if self.shouldStop:
1552 1553 # don't print, some other test case already failed and
1553 1554 # printed, we're just stale and probably failed due to our
1554 1555 # temp dir getting cleaned up.
1555 1556 return
1556 1557
1557 1558 accepted = False
1558 1559 lines = []
1559 1560
1560 1561 with iolock:
1561 1562 if self._options.nodiff:
1562 1563 pass
1563 1564 elif self._options.view:
1564 1565 v = self._options.view
1565 1566 if PYTHON3:
1566 1567 v = _bytespath(v)
1567 1568 os.system(b"%s %s %s" %
1568 1569 (v, test.refpath, test.errpath))
1569 1570 else:
1570 1571 servefail, lines = getdiff(expected, got,
1571 1572 test.refpath, test.errpath)
1572 1573 if servefail:
1573 1574 self.addFailure(
1574 1575 test,
1575 1576 'server failed to start (HGPORT=%s)' % test._startport)
1576 1577 raise ReportedTest('server failed to start')
1577 1578 else:
1578 1579 self.stream.write('\n')
1579 1580 for line in lines:
1580 1581 if PYTHON3:
1581 1582 self.stream.flush()
1582 1583 self.stream.buffer.write(line)
1583 1584 self.stream.buffer.flush()
1584 1585 else:
1585 1586 self.stream.write(line)
1586 1587 self.stream.flush()
1587 1588
1588 1589 # handle interactive prompt without releasing iolock
1589 1590 if self._options.interactive:
1590 1591 self.stream.write('Accept this change? [n] ')
1591 1592 answer = sys.stdin.readline().strip()
1592 1593 if answer.lower() in ('y', 'yes'):
1593 1594 if test.name.endswith('.t'):
1594 1595 rename(test.errpath, test.path)
1595 1596 else:
1596 1597 rename(test.errpath, '%s.out' % test.path)
1597 1598 accepted = True
1598 1599 if not accepted:
1599 1600 self.faildata[test.name] = b''.join(lines)
1600 1601
1601 1602 return accepted
1602 1603
1603 1604 def startTest(self, test):
1604 1605 super(TestResult, self).startTest(test)
1605 1606
1606 1607 # os.times module computes the user time and system time spent by
1607 1608 # child's processes along with real elapsed time taken by a process.
1608 1609 # This module has one limitation. It can only work for Linux user
1609 1610 # and not for Windows.
1610 1611 test.started = os.times()
1611 1612 if self._firststarttime is None: # thread racy but irrelevant
1612 1613 self._firststarttime = test.started[4]
1613 1614
1614 1615 def stopTest(self, test, interrupted=False):
1615 1616 super(TestResult, self).stopTest(test)
1616 1617
1617 1618 test.stopped = os.times()
1618 1619
1619 1620 starttime = test.started
1620 1621 endtime = test.stopped
1621 1622 origin = self._firststarttime
1622 1623 self.times.append((test.name,
1623 1624 endtime[2] - starttime[2], # user space CPU time
1624 1625 endtime[3] - starttime[3], # sys space CPU time
1625 1626 endtime[4] - starttime[4], # real time
1626 1627 starttime[4] - origin, # start date in run context
1627 1628 endtime[4] - origin, # end date in run context
1628 1629 ))
1629 1630
1630 1631 if interrupted:
1631 1632 with iolock:
1632 1633 self.stream.writeln('INTERRUPTED: %s (after %d seconds)' % (
1633 1634 test.name, self.times[-1][3]))
1634 1635
1635 1636 class TestSuite(unittest.TestSuite):
1636 1637 """Custom unittest TestSuite that knows how to execute Mercurial tests."""
1637 1638
1638 1639 def __init__(self, testdir, jobs=1, whitelist=None, blacklist=None,
1639 1640 retest=False, keywords=None, loop=False, runs_per_test=1,
1640 1641 loadtest=None, showchannels=False,
1641 1642 *args, **kwargs):
1642 1643 """Create a new instance that can run tests with a configuration.
1643 1644
1644 1645 testdir specifies the directory where tests are executed from. This
1645 1646 is typically the ``tests`` directory from Mercurial's source
1646 1647 repository.
1647 1648
1648 1649 jobs specifies the number of jobs to run concurrently. Each test
1649 1650 executes on its own thread. Tests actually spawn new processes, so
1650 1651 state mutation should not be an issue.
1651 1652
1652 1653 If there is only one job, it will use the main thread.
1653 1654
1654 1655 whitelist and blacklist denote tests that have been whitelisted and
1655 1656 blacklisted, respectively. These arguments don't belong in TestSuite.
1656 1657 Instead, whitelist and blacklist should be handled by the thing that
1657 1658 populates the TestSuite with tests. They are present to preserve
1658 1659 backwards compatible behavior which reports skipped tests as part
1659 1660 of the results.
1660 1661
1661 1662 retest denotes whether to retest failed tests. This arguably belongs
1662 1663 outside of TestSuite.
1663 1664
1664 1665 keywords denotes key words that will be used to filter which tests
1665 1666 to execute. This arguably belongs outside of TestSuite.
1666 1667
1667 1668 loop denotes whether to loop over tests forever.
1668 1669 """
1669 1670 super(TestSuite, self).__init__(*args, **kwargs)
1670 1671
1671 1672 self._jobs = jobs
1672 1673 self._whitelist = whitelist
1673 1674 self._blacklist = blacklist
1674 1675 self._retest = retest
1675 1676 self._keywords = keywords
1676 1677 self._loop = loop
1677 1678 self._runs_per_test = runs_per_test
1678 1679 self._loadtest = loadtest
1679 1680 self._showchannels = showchannels
1680 1681
1681 1682 def run(self, result):
1682 1683 # We have a number of filters that need to be applied. We do this
1683 1684 # here instead of inside Test because it makes the running logic for
1684 1685 # Test simpler.
1685 1686 tests = []
1686 1687 num_tests = [0]
1687 1688 for test in self._tests:
1688 1689 def get():
1689 1690 num_tests[0] += 1
1690 1691 if getattr(test, 'should_reload', False):
1691 1692 return self._loadtest(test.path, num_tests[0])
1692 1693 return test
1693 1694 if not os.path.exists(test.path):
1694 1695 result.addSkip(test, "Doesn't exist")
1695 1696 continue
1696 1697
1697 1698 if not (self._whitelist and test.name in self._whitelist):
1698 1699 if self._blacklist and test.bname in self._blacklist:
1699 1700 result.addSkip(test, 'blacklisted')
1700 1701 continue
1701 1702
1702 1703 if self._retest and not os.path.exists(test.errpath):
1703 1704 result.addIgnore(test, 'not retesting')
1704 1705 continue
1705 1706
1706 1707 if self._keywords:
1707 1708 f = open(test.path, 'rb')
1708 1709 t = f.read().lower() + test.bname.lower()
1709 1710 f.close()
1710 1711 ignored = False
1711 1712 for k in self._keywords.lower().split():
1712 1713 if k not in t:
1713 1714 result.addIgnore(test, "doesn't match keyword")
1714 1715 ignored = True
1715 1716 break
1716 1717
1717 1718 if ignored:
1718 1719 continue
1719 1720 for _ in xrange(self._runs_per_test):
1720 1721 tests.append(get())
1721 1722
1722 1723 runtests = list(tests)
1723 1724 done = queue.Queue()
1724 1725 running = 0
1725 1726
1726 1727 channels = [""] * self._jobs
1727 1728
1728 1729 def job(test, result):
1729 1730 for n, v in enumerate(channels):
1730 1731 if not v:
1731 1732 channel = n
1732 1733 break
1733 1734 channels[channel] = "=" + test.name[5:].split(".")[0]
1734 1735 try:
1735 1736 test(result)
1736 1737 done.put(None)
1737 1738 except KeyboardInterrupt:
1738 1739 pass
1739 1740 except: # re-raises
1740 1741 done.put(('!', test, 'run-test raised an error, see traceback'))
1741 1742 raise
1742 1743 try:
1743 1744 channels[channel] = ''
1744 1745 except IndexError:
1745 1746 pass
1746 1747
1747 1748 def stat():
1748 1749 count = 0
1749 1750 while channels:
1750 1751 d = '\n%03s ' % count
1751 1752 for n, v in enumerate(channels):
1752 1753 if v:
1753 1754 d += v[0]
1754 1755 channels[n] = v[1:] or '.'
1755 1756 else:
1756 1757 d += ' '
1757 1758 d += ' '
1758 1759 with iolock:
1759 1760 sys.stdout.write(d + ' ')
1760 1761 sys.stdout.flush()
1761 1762 for x in xrange(10):
1762 1763 if channels:
1763 1764 time.sleep(.1)
1764 1765 count += 1
1765 1766
1766 1767 stoppedearly = False
1767 1768
1768 1769 if self._showchannels:
1769 1770 statthread = threading.Thread(target=stat, name="stat")
1770 1771 statthread.start()
1771 1772
1772 1773 try:
1773 1774 while tests or running:
1774 1775 if not done.empty() or running == self._jobs or not tests:
1775 1776 try:
1776 1777 done.get(True, 1)
1777 1778 running -= 1
1778 1779 if result and result.shouldStop:
1779 1780 stoppedearly = True
1780 1781 break
1781 1782 except queue.Empty:
1782 1783 continue
1783 1784 if tests and not running == self._jobs:
1784 1785 test = tests.pop(0)
1785 1786 if self._loop:
1786 1787 if getattr(test, 'should_reload', False):
1787 1788 num_tests[0] += 1
1788 1789 tests.append(
1789 1790 self._loadtest(test.name, num_tests[0]))
1790 1791 else:
1791 1792 tests.append(test)
1792 1793 if self._jobs == 1:
1793 1794 job(test, result)
1794 1795 else:
1795 1796 t = threading.Thread(target=job, name=test.name,
1796 1797 args=(test, result))
1797 1798 t.start()
1798 1799 running += 1
1799 1800
1800 1801 # If we stop early we still need to wait on started tests to
1801 1802 # finish. Otherwise, there is a race between the test completing
1802 1803 # and the test's cleanup code running. This could result in the
1803 1804 # test reporting incorrect.
1804 1805 if stoppedearly:
1805 1806 while running:
1806 1807 try:
1807 1808 done.get(True, 1)
1808 1809 running -= 1
1809 1810 except queue.Empty:
1810 1811 continue
1811 1812 except KeyboardInterrupt:
1812 1813 for test in runtests:
1813 1814 test.abort()
1814 1815
1815 1816 channels = []
1816 1817
1817 1818 return result
1818 1819
1819 1820 # Save the most recent 5 wall-clock runtimes of each test to a
1820 1821 # human-readable text file named .testtimes. Tests are sorted
1821 1822 # alphabetically, while times for each test are listed from oldest to
1822 1823 # newest.
1823 1824
1824 1825 def loadtimes(testdir):
1825 1826 times = []
1826 1827 try:
1827 1828 with open(os.path.join(testdir, b'.testtimes-')) as fp:
1828 1829 for line in fp:
1829 1830 ts = line.split()
1830 1831 times.append((ts[0], [float(t) for t in ts[1:]]))
1831 1832 except IOError as err:
1832 1833 if err.errno != errno.ENOENT:
1833 1834 raise
1834 1835 return times
1835 1836
1836 1837 def savetimes(testdir, result):
1837 1838 saved = dict(loadtimes(testdir))
1838 1839 maxruns = 5
1839 1840 skipped = set([str(t[0]) for t in result.skipped])
1840 1841 for tdata in result.times:
1841 1842 test, real = tdata[0], tdata[3]
1842 1843 if test not in skipped:
1843 1844 ts = saved.setdefault(test, [])
1844 1845 ts.append(real)
1845 1846 ts[:] = ts[-maxruns:]
1846 1847
1847 1848 fd, tmpname = tempfile.mkstemp(prefix=b'.testtimes',
1848 1849 dir=testdir, text=True)
1849 1850 with os.fdopen(fd, 'w') as fp:
1850 1851 for name, ts in sorted(saved.items()):
1851 1852 fp.write('%s %s\n' % (name, ' '.join(['%.3f' % (t,) for t in ts])))
1852 1853 timepath = os.path.join(testdir, b'.testtimes')
1853 1854 try:
1854 1855 os.unlink(timepath)
1855 1856 except OSError:
1856 1857 pass
1857 1858 try:
1858 1859 os.rename(tmpname, timepath)
1859 1860 except OSError:
1860 1861 pass
1861 1862
1862 1863 class TextTestRunner(unittest.TextTestRunner):
1863 1864 """Custom unittest test runner that uses appropriate settings."""
1864 1865
1865 1866 def __init__(self, runner, *args, **kwargs):
1866 1867 super(TextTestRunner, self).__init__(*args, **kwargs)
1867 1868
1868 1869 self._runner = runner
1869 1870
1870 1871 def run(self, test):
1871 1872 result = TestResult(self._runner.options, self.stream,
1872 1873 self.descriptions, self.verbosity)
1873 1874
1874 1875 test(result)
1875 1876
1876 1877 failed = len(result.failures)
1877 1878 warned = len(result.warned)
1878 1879 skipped = len(result.skipped)
1879 1880 ignored = len(result.ignored)
1880 1881
1881 1882 with iolock:
1882 1883 self.stream.writeln('')
1883 1884
1884 1885 if not self._runner.options.noskips:
1885 1886 for test, msg in result.skipped:
1886 1887 self.stream.writeln('Skipped %s: %s' % (test.name, msg))
1887 1888 for test, msg in result.warned:
1888 1889 self.stream.writeln('Warned %s: %s' % (test.name, msg))
1889 1890 for test, msg in result.failures:
1890 1891 self.stream.writeln('Failed %s: %s' % (test.name, msg))
1891 1892 for test, msg in result.errors:
1892 1893 self.stream.writeln('Errored %s: %s' % (test.name, msg))
1893 1894
1894 1895 if self._runner.options.xunit:
1895 1896 with open(self._runner.options.xunit, 'wb') as xuf:
1896 1897 timesd = dict((t[0], t[3]) for t in result.times)
1897 1898 doc = minidom.Document()
1898 1899 s = doc.createElement('testsuite')
1899 1900 s.setAttribute('name', 'run-tests')
1900 1901 s.setAttribute('tests', str(result.testsRun))
1901 1902 s.setAttribute('errors', "0") # TODO
1902 1903 s.setAttribute('failures', str(failed))
1903 1904 s.setAttribute('skipped', str(skipped + ignored))
1904 1905 doc.appendChild(s)
1905 1906 for tc in result.successes:
1906 1907 t = doc.createElement('testcase')
1907 1908 t.setAttribute('name', tc.name)
1908 1909 t.setAttribute('time', '%.3f' % timesd[tc.name])
1909 1910 s.appendChild(t)
1910 1911 for tc, err in sorted(result.faildata.items()):
1911 1912 t = doc.createElement('testcase')
1912 1913 t.setAttribute('name', tc)
1913 1914 t.setAttribute('time', '%.3f' % timesd[tc])
1914 1915 # createCDATASection expects a unicode or it will
1915 1916 # convert using default conversion rules, which will
1916 1917 # fail if string isn't ASCII.
1917 1918 err = cdatasafe(err).decode('utf-8', 'replace')
1918 1919 cd = doc.createCDATASection(err)
1919 1920 t.appendChild(cd)
1920 1921 s.appendChild(t)
1921 1922 xuf.write(doc.toprettyxml(indent=' ', encoding='utf-8'))
1922 1923
1923 1924 if self._runner.options.json:
1924 1925 jsonpath = os.path.join(self._runner._testdir, b'report.json')
1925 1926 with open(jsonpath, 'w') as fp:
1926 1927 timesd = {}
1927 1928 for tdata in result.times:
1928 1929 test = tdata[0]
1929 1930 timesd[test] = tdata[1:]
1930 1931
1931 1932 outcome = {}
1932 1933 groups = [('success', ((tc, None)
1933 1934 for tc in result.successes)),
1934 1935 ('failure', result.failures),
1935 1936 ('skip', result.skipped)]
1936 1937 for res, testcases in groups:
1937 1938 for tc, __ in testcases:
1938 1939 if tc.name in timesd:
1939 1940 diff = result.faildata.get(tc.name, b'')
1940 1941 tres = {'result': res,
1941 1942 'time': ('%0.3f' % timesd[tc.name][2]),
1942 1943 'cuser': ('%0.3f' % timesd[tc.name][0]),
1943 1944 'csys': ('%0.3f' % timesd[tc.name][1]),
1944 1945 'start': ('%0.3f' % timesd[tc.name][3]),
1945 1946 'end': ('%0.3f' % timesd[tc.name][4]),
1946 1947 'diff': diff.decode('unicode_escape'),
1947 1948 }
1948 1949 else:
1949 1950 # blacklisted test
1950 1951 tres = {'result': res}
1951 1952
1952 1953 outcome[tc.name] = tres
1953 1954 jsonout = json.dumps(outcome, sort_keys=True, indent=4,
1954 1955 separators=(',', ': '))
1955 1956 fp.writelines(("testreport =", jsonout))
1956 1957
1957 1958 self._runner._checkhglib('Tested')
1958 1959
1959 1960 savetimes(self._runner._testdir, result)
1960 1961
1961 1962 if failed and self._runner.options.known_good_rev:
1962 1963 def nooutput(args):
1963 1964 p = subprocess.Popen(args, stderr=subprocess.STDOUT,
1964 1965 stdout=subprocess.PIPE)
1965 1966 p.stdout.read()
1966 1967 p.wait()
1967 1968 for test, msg in result.failures:
1968 1969 nooutput(['hg', 'bisect', '--reset']),
1969 1970 nooutput(['hg', 'bisect', '--bad', '.'])
1970 1971 nooutput(['hg', 'bisect', '--good',
1971 1972 self._runner.options.known_good_rev])
1972 1973 # TODO: we probably need to forward some options
1973 1974 # that alter hg's behavior inside the tests.
1974 1975 rtc = '%s %s %s' % (sys.executable, sys.argv[0], test)
1975 1976 sub = subprocess.Popen(['hg', 'bisect', '--command', rtc],
1976 1977 stderr=subprocess.STDOUT,
1977 1978 stdout=subprocess.PIPE)
1978 1979 data = sub.stdout.read()
1979 1980 sub.wait()
1980 1981 m = re.search(
1981 1982 (r'\nThe first (?P<goodbad>bad|good) revision '
1982 1983 r'is:\nchangeset: +\d+:(?P<node>[a-f0-9]+)\n.*\n'
1983 1984 r'summary: +(?P<summary>[^\n]+)\n'),
1984 1985 data, (re.MULTILINE | re.DOTALL))
1985 1986 if m is None:
1986 1987 self.stream.writeln(
1987 1988 'Failed to identify failure point for %s' % test)
1988 1989 continue
1989 1990 dat = m.groupdict()
1990 1991 verb = 'broken' if dat['goodbad'] == 'bad' else 'fixed'
1991 1992 self.stream.writeln(
1992 1993 '%s %s by %s (%s)' % (
1993 1994 test, verb, dat['node'], dat['summary']))
1994 1995 self.stream.writeln(
1995 1996 '# Ran %d tests, %d skipped, %d warned, %d failed.'
1996 1997 % (result.testsRun,
1997 1998 skipped + ignored, warned, failed))
1998 1999 if failed:
1999 2000 self.stream.writeln('python hash seed: %s' %
2000 2001 os.environ['PYTHONHASHSEED'])
2001 2002 if self._runner.options.time:
2002 2003 self.printtimes(result.times)
2003 2004
2004 2005 return result
2005 2006
2006 2007 def printtimes(self, times):
2007 2008 # iolock held by run
2008 2009 self.stream.writeln('# Producing time report')
2009 2010 times.sort(key=lambda t: (t[3]))
2010 2011 cols = '%7.3f %7.3f %7.3f %7.3f %7.3f %s'
2011 2012 self.stream.writeln('%-7s %-7s %-7s %-7s %-7s %s' %
2012 2013 ('start', 'end', 'cuser', 'csys', 'real', 'Test'))
2013 2014 for tdata in times:
2014 2015 test = tdata[0]
2015 2016 cuser, csys, real, start, end = tdata[1:6]
2016 2017 self.stream.writeln(cols % (start, end, cuser, csys, real, test))
2017 2018
2018 2019 class TestRunner(object):
2019 2020 """Holds context for executing tests.
2020 2021
2021 2022 Tests rely on a lot of state. This object holds it for them.
2022 2023 """
2023 2024
2024 2025 # Programs required to run tests.
2025 2026 REQUIREDTOOLS = [
2026 2027 os.path.basename(_bytespath(sys.executable)),
2027 2028 b'diff',
2028 2029 b'grep',
2029 2030 b'unzip',
2030 2031 b'gunzip',
2031 2032 b'bunzip2',
2032 2033 b'sed',
2033 2034 ]
2034 2035
2035 2036 # Maps file extensions to test class.
2036 2037 TESTTYPES = [
2037 2038 (b'.py', PythonTest),
2038 2039 (b'.t', TTest),
2039 2040 ]
2040 2041
2041 2042 def __init__(self):
2042 2043 self.options = None
2043 2044 self._hgroot = None
2044 2045 self._testdir = None
2045 2046 self._hgtmp = None
2046 2047 self._installdir = None
2047 2048 self._bindir = None
2048 2049 self._tmpbinddir = None
2049 2050 self._pythondir = None
2050 2051 self._coveragefile = None
2051 2052 self._createdfiles = []
2052 2053 self._hgcommand = None
2053 2054 self._hgpath = None
2054 2055 self._portoffset = 0
2055 2056 self._ports = {}
2056 2057
2057 2058 def run(self, args, parser=None):
2058 2059 """Run the test suite."""
2059 2060 oldmask = os.umask(0o22)
2060 2061 try:
2061 2062 parser = parser or getparser()
2062 2063 options, args = parseargs(args, parser)
2063 2064 # positional arguments are paths to test files to run, so
2064 2065 # we make sure they're all bytestrings
2065 2066 args = [_bytespath(a) for a in args]
2066 2067 self.options = options
2067 2068
2068 2069 self._checktools()
2069 2070 tests = self.findtests(args)
2070 2071 if options.profile_runner:
2071 2072 import statprof
2072 2073 statprof.start()
2073 2074 result = self._run(tests)
2074 2075 if options.profile_runner:
2075 2076 statprof.stop()
2076 2077 statprof.display()
2077 2078 return result
2078 2079
2079 2080 finally:
2080 2081 os.umask(oldmask)
2081 2082
2082 2083 def _run(self, tests):
2083 2084 if self.options.random:
2084 2085 random.shuffle(tests)
2085 2086 else:
2086 2087 # keywords for slow tests
2087 2088 slow = {b'svn': 10,
2088 2089 b'cvs': 10,
2089 2090 b'hghave': 10,
2090 2091 b'largefiles-update': 10,
2091 2092 b'run-tests': 10,
2092 2093 b'corruption': 10,
2093 2094 b'race': 10,
2094 2095 b'i18n': 10,
2095 2096 b'check': 100,
2096 2097 b'gendoc': 100,
2097 2098 b'contrib-perf': 200,
2098 2099 }
2099 2100 perf = {}
2100 2101 def sortkey(f):
2101 2102 # run largest tests first, as they tend to take the longest
2102 2103 try:
2103 2104 return perf[f]
2104 2105 except KeyError:
2105 2106 try:
2106 2107 val = -os.stat(f).st_size
2107 2108 except OSError as e:
2108 2109 if e.errno != errno.ENOENT:
2109 2110 raise
2110 2111 perf[f] = -1e9 # file does not exist, tell early
2111 2112 return -1e9
2112 2113 for kw, mul in slow.items():
2113 2114 if kw in f:
2114 2115 val *= mul
2115 2116 if f.endswith(b'.py'):
2116 2117 val /= 10.0
2117 2118 perf[f] = val / 1000.0
2118 2119 return perf[f]
2119 2120 tests.sort(key=sortkey)
2120 2121
2121 2122 self._testdir = osenvironb[b'TESTDIR'] = getattr(
2122 2123 os, 'getcwdb', os.getcwd)()
2123 2124
2124 2125 if 'PYTHONHASHSEED' not in os.environ:
2125 2126 # use a random python hash seed all the time
2126 2127 # we do the randomness ourself to know what seed is used
2127 2128 os.environ['PYTHONHASHSEED'] = str(random.getrandbits(32))
2128 2129
2129 2130 if self.options.tmpdir:
2130 2131 self.options.keep_tmpdir = True
2131 2132 tmpdir = _bytespath(self.options.tmpdir)
2132 2133 if os.path.exists(tmpdir):
2133 2134 # Meaning of tmpdir has changed since 1.3: we used to create
2134 2135 # HGTMP inside tmpdir; now HGTMP is tmpdir. So fail if
2135 2136 # tmpdir already exists.
2136 2137 print("error: temp dir %r already exists" % tmpdir)
2137 2138 return 1
2138 2139
2139 2140 # Automatically removing tmpdir sounds convenient, but could
2140 2141 # really annoy anyone in the habit of using "--tmpdir=/tmp"
2141 2142 # or "--tmpdir=$HOME".
2142 2143 #vlog("# Removing temp dir", tmpdir)
2143 2144 #shutil.rmtree(tmpdir)
2144 2145 os.makedirs(tmpdir)
2145 2146 else:
2146 2147 d = None
2147 2148 if os.name == 'nt':
2148 2149 # without this, we get the default temp dir location, but
2149 2150 # in all lowercase, which causes troubles with paths (issue3490)
2150 2151 d = osenvironb.get(b'TMP', None)
2151 2152 tmpdir = tempfile.mkdtemp(b'', b'hgtests.', d)
2152 2153
2153 2154 self._hgtmp = osenvironb[b'HGTMP'] = (
2154 2155 os.path.realpath(tmpdir))
2155 2156
2156 2157 if self.options.with_hg:
2157 2158 self._installdir = None
2158 2159 whg = self.options.with_hg
2159 2160 self._bindir = os.path.dirname(os.path.realpath(whg))
2160 2161 assert isinstance(self._bindir, bytes)
2161 2162 self._hgcommand = os.path.basename(whg)
2162 2163 self._tmpbindir = os.path.join(self._hgtmp, b'install', b'bin')
2163 2164 os.makedirs(self._tmpbindir)
2164 2165
2165 2166 # This looks redundant with how Python initializes sys.path from
2166 2167 # the location of the script being executed. Needed because the
2167 2168 # "hg" specified by --with-hg is not the only Python script
2168 2169 # executed in the test suite that needs to import 'mercurial'
2169 2170 # ... which means it's not really redundant at all.
2170 2171 self._pythondir = self._bindir
2171 2172 else:
2172 2173 self._installdir = os.path.join(self._hgtmp, b"install")
2173 2174 self._bindir = os.path.join(self._installdir, b"bin")
2174 2175 self._hgcommand = b'hg'
2175 2176 self._tmpbindir = self._bindir
2176 2177 self._pythondir = os.path.join(self._installdir, b"lib", b"python")
2177 2178
2178 2179 # set CHGHG, then replace "hg" command by "chg"
2179 2180 chgbindir = self._bindir
2180 2181 if self.options.chg or self.options.with_chg:
2181 2182 osenvironb[b'CHGHG'] = os.path.join(self._bindir, self._hgcommand)
2182 2183 else:
2183 2184 osenvironb.pop(b'CHGHG', None) # drop flag for hghave
2184 2185 if self.options.chg:
2185 2186 self._hgcommand = b'chg'
2186 2187 elif self.options.with_chg:
2187 2188 chgbindir = os.path.dirname(os.path.realpath(self.options.with_chg))
2188 2189 self._hgcommand = os.path.basename(self.options.with_chg)
2189 2190
2190 2191 osenvironb[b"BINDIR"] = self._bindir
2191 2192 osenvironb[b"PYTHON"] = PYTHON
2192 2193
2193 2194 if self.options.with_python3:
2194 2195 osenvironb[b'PYTHON3'] = self.options.with_python3
2195 2196
2196 2197 fileb = _bytespath(__file__)
2197 2198 runtestdir = os.path.abspath(os.path.dirname(fileb))
2198 2199 osenvironb[b'RUNTESTDIR'] = runtestdir
2199 2200 if PYTHON3:
2200 2201 sepb = _bytespath(os.pathsep)
2201 2202 else:
2202 2203 sepb = os.pathsep
2203 2204 path = [self._bindir, runtestdir] + osenvironb[b"PATH"].split(sepb)
2204 2205 if os.path.islink(__file__):
2205 2206 # test helper will likely be at the end of the symlink
2206 2207 realfile = os.path.realpath(fileb)
2207 2208 realdir = os.path.abspath(os.path.dirname(realfile))
2208 2209 path.insert(2, realdir)
2209 2210 if chgbindir != self._bindir:
2210 2211 path.insert(1, chgbindir)
2211 2212 if self._testdir != runtestdir:
2212 2213 path = [self._testdir] + path
2213 2214 if self._tmpbindir != self._bindir:
2214 2215 path = [self._tmpbindir] + path
2215 2216 osenvironb[b"PATH"] = sepb.join(path)
2216 2217
2217 2218 # Include TESTDIR in PYTHONPATH so that out-of-tree extensions
2218 2219 # can run .../tests/run-tests.py test-foo where test-foo
2219 2220 # adds an extension to HGRC. Also include run-test.py directory to
2220 2221 # import modules like heredoctest.
2221 2222 pypath = [self._pythondir, self._testdir, runtestdir]
2222 2223 # We have to augment PYTHONPATH, rather than simply replacing
2223 2224 # it, in case external libraries are only available via current
2224 2225 # PYTHONPATH. (In particular, the Subversion bindings on OS X
2225 2226 # are in /opt/subversion.)
2226 2227 oldpypath = osenvironb.get(IMPL_PATH)
2227 2228 if oldpypath:
2228 2229 pypath.append(oldpypath)
2229 2230 osenvironb[IMPL_PATH] = sepb.join(pypath)
2230 2231
2231 2232 if self.options.pure:
2232 2233 os.environ["HGTEST_RUN_TESTS_PURE"] = "--pure"
2233 2234 os.environ["HGMODULEPOLICY"] = "py"
2234 2235
2235 2236 if self.options.allow_slow_tests:
2236 2237 os.environ["HGTEST_SLOW"] = "slow"
2237 2238 elif 'HGTEST_SLOW' in os.environ:
2238 2239 del os.environ['HGTEST_SLOW']
2239 2240
2240 2241 self._coveragefile = os.path.join(self._testdir, b'.coverage')
2241 2242
2242 2243 vlog("# Using TESTDIR", self._testdir)
2243 2244 vlog("# Using RUNTESTDIR", osenvironb[b'RUNTESTDIR'])
2244 2245 vlog("# Using HGTMP", self._hgtmp)
2245 2246 vlog("# Using PATH", os.environ["PATH"])
2246 2247 vlog("# Using", IMPL_PATH, osenvironb[IMPL_PATH])
2247 2248
2248 2249 try:
2249 2250 return self._runtests(tests) or 0
2250 2251 finally:
2251 2252 time.sleep(.1)
2252 2253 self._cleanup()
2253 2254
2254 2255 def findtests(self, args):
2255 2256 """Finds possible test files from arguments.
2256 2257
2257 2258 If you wish to inject custom tests into the test harness, this would
2258 2259 be a good function to monkeypatch or override in a derived class.
2259 2260 """
2260 2261 if not args:
2261 2262 if self.options.changed:
2262 2263 proc = Popen4('hg st --rev "%s" -man0 .' %
2263 2264 self.options.changed, None, 0)
2264 2265 stdout, stderr = proc.communicate()
2265 2266 args = stdout.strip(b'\0').split(b'\0')
2266 2267 else:
2267 2268 args = os.listdir(b'.')
2268 2269
2269 2270 return [t for t in args
2270 2271 if os.path.basename(t).startswith(b'test-')
2271 2272 and (t.endswith(b'.py') or t.endswith(b'.t'))]
2272 2273
2273 2274 def _runtests(self, tests):
2274 2275 try:
2275 2276 if self._installdir:
2276 2277 self._installhg()
2277 2278 self._checkhglib("Testing")
2278 2279 else:
2279 2280 self._usecorrectpython()
2280 2281 if self.options.chg:
2281 2282 assert self._installdir
2282 2283 self._installchg()
2283 2284
2284 2285 if self.options.restart:
2285 2286 orig = list(tests)
2286 2287 while tests:
2287 2288 if os.path.exists(tests[0] + ".err"):
2288 2289 break
2289 2290 tests.pop(0)
2290 2291 if not tests:
2291 2292 print("running all tests")
2292 2293 tests = orig
2293 2294
2294 2295 tests = [self._gettest(t, i) for i, t in enumerate(tests)]
2295 2296
2296 2297 failed = False
2297 2298 warned = False
2298 2299 kws = self.options.keywords
2299 2300 if kws is not None and PYTHON3:
2300 2301 kws = kws.encode('utf-8')
2301 2302
2302 2303 suite = TestSuite(self._testdir,
2303 2304 jobs=self.options.jobs,
2304 2305 whitelist=self.options.whitelisted,
2305 2306 blacklist=self.options.blacklist,
2306 2307 retest=self.options.retest,
2307 2308 keywords=kws,
2308 2309 loop=self.options.loop,
2309 2310 runs_per_test=self.options.runs_per_test,
2310 2311 showchannels=self.options.showchannels,
2311 2312 tests=tests, loadtest=self._gettest)
2312 2313 verbosity = 1
2313 2314 if self.options.verbose:
2314 2315 verbosity = 2
2315 2316 runner = TextTestRunner(self, verbosity=verbosity)
2316 2317 result = runner.run(suite)
2317 2318
2318 2319 if result.failures:
2319 2320 failed = True
2320 2321 if result.warned:
2321 2322 warned = True
2322 2323
2323 2324 if self.options.anycoverage:
2324 2325 self._outputcoverage()
2325 2326 except KeyboardInterrupt:
2326 2327 failed = True
2327 2328 print("\ninterrupted!")
2328 2329
2329 2330 if failed:
2330 2331 return 1
2331 2332 if warned:
2332 2333 return 80
2333 2334
2334 2335 def _getport(self, count):
2335 2336 port = self._ports.get(count) # do we have a cached entry?
2336 2337 if port is None:
2337 2338 portneeded = 3
2338 2339 # above 100 tries we just give up and let test reports failure
2339 2340 for tries in xrange(100):
2340 2341 allfree = True
2341 2342 port = self.options.port + self._portoffset
2342 2343 for idx in xrange(portneeded):
2343 2344 if not checkportisavailable(port + idx):
2344 2345 allfree = False
2345 2346 break
2346 2347 self._portoffset += portneeded
2347 2348 if allfree:
2348 2349 break
2349 2350 self._ports[count] = port
2350 2351 return port
2351 2352
2352 2353 def _gettest(self, test, count):
2353 2354 """Obtain a Test by looking at its filename.
2354 2355
2355 2356 Returns a Test instance. The Test may not be runnable if it doesn't
2356 2357 map to a known type.
2357 2358 """
2358 2359 lctest = test.lower()
2359 2360 testcls = Test
2360 2361
2361 2362 for ext, cls in self.TESTTYPES:
2362 2363 if lctest.endswith(ext):
2363 2364 testcls = cls
2364 2365 break
2365 2366
2366 2367 refpath = os.path.join(self._testdir, test)
2367 2368 tmpdir = os.path.join(self._hgtmp, b'child%d' % count)
2368 2369
2369 2370 t = testcls(refpath, tmpdir,
2370 2371 keeptmpdir=self.options.keep_tmpdir,
2371 2372 debug=self.options.debug,
2372 2373 timeout=self.options.timeout,
2373 2374 startport=self._getport(count),
2374 2375 extraconfigopts=self.options.extra_config_opt,
2375 2376 py3kwarnings=self.options.py3k_warnings,
2376 2377 shell=self.options.shell,
2377 2378 hgcommand=self._hgcommand,
2378 2379 usechg=bool(self.options.with_chg or self.options.chg),
2379 2380 useipv6=useipv6)
2380 2381 t.should_reload = True
2381 2382 return t
2382 2383
2383 2384 def _cleanup(self):
2384 2385 """Clean up state from this test invocation."""
2385 2386 if self.options.keep_tmpdir:
2386 2387 return
2387 2388
2388 2389 vlog("# Cleaning up HGTMP", self._hgtmp)
2389 2390 shutil.rmtree(self._hgtmp, True)
2390 2391 for f in self._createdfiles:
2391 2392 try:
2392 2393 os.remove(f)
2393 2394 except OSError:
2394 2395 pass
2395 2396
2396 2397 def _usecorrectpython(self):
2397 2398 """Configure the environment to use the appropriate Python in tests."""
2398 2399 # Tests must use the same interpreter as us or bad things will happen.
2399 2400 pyexename = sys.platform == 'win32' and b'python.exe' or b'python'
2400 2401 if getattr(os, 'symlink', None):
2401 2402 vlog("# Making python executable in test path a symlink to '%s'" %
2402 2403 sys.executable)
2403 2404 mypython = os.path.join(self._tmpbindir, pyexename)
2404 2405 try:
2405 2406 if os.readlink(mypython) == sys.executable:
2406 2407 return
2407 2408 os.unlink(mypython)
2408 2409 except OSError as err:
2409 2410 if err.errno != errno.ENOENT:
2410 2411 raise
2411 2412 if self._findprogram(pyexename) != sys.executable:
2412 2413 try:
2413 2414 os.symlink(sys.executable, mypython)
2414 2415 self._createdfiles.append(mypython)
2415 2416 except OSError as err:
2416 2417 # child processes may race, which is harmless
2417 2418 if err.errno != errno.EEXIST:
2418 2419 raise
2419 2420 else:
2420 2421 exedir, exename = os.path.split(sys.executable)
2421 2422 vlog("# Modifying search path to find %s as %s in '%s'" %
2422 2423 (exename, pyexename, exedir))
2423 2424 path = os.environ['PATH'].split(os.pathsep)
2424 2425 while exedir in path:
2425 2426 path.remove(exedir)
2426 2427 os.environ['PATH'] = os.pathsep.join([exedir] + path)
2427 2428 if not self._findprogram(pyexename):
2428 2429 print("WARNING: Cannot find %s in search path" % pyexename)
2429 2430
2430 2431 def _installhg(self):
2431 2432 """Install hg into the test environment.
2432 2433
2433 2434 This will also configure hg with the appropriate testing settings.
2434 2435 """
2435 2436 vlog("# Performing temporary installation of HG")
2436 2437 installerrs = os.path.join(self._hgtmp, b"install.err")
2437 2438 compiler = ''
2438 2439 if self.options.compiler:
2439 2440 compiler = '--compiler ' + self.options.compiler
2440 2441 if self.options.pure:
2441 2442 pure = b"--pure"
2442 2443 else:
2443 2444 pure = b""
2444 2445
2445 2446 # Run installer in hg root
2446 2447 script = os.path.realpath(sys.argv[0])
2447 2448 exe = sys.executable
2448 2449 if PYTHON3:
2449 2450 compiler = _bytespath(compiler)
2450 2451 script = _bytespath(script)
2451 2452 exe = _bytespath(exe)
2452 2453 hgroot = os.path.dirname(os.path.dirname(script))
2453 2454 self._hgroot = hgroot
2454 2455 os.chdir(hgroot)
2455 2456 nohome = b'--home=""'
2456 2457 if os.name == 'nt':
2457 2458 # The --home="" trick works only on OS where os.sep == '/'
2458 2459 # because of a distutils convert_path() fast-path. Avoid it at
2459 2460 # least on Windows for now, deal with .pydistutils.cfg bugs
2460 2461 # when they happen.
2461 2462 nohome = b''
2462 2463 cmd = (b'%(exe)s setup.py %(pure)s clean --all'
2463 2464 b' build %(compiler)s --build-base="%(base)s"'
2464 2465 b' install --force --prefix="%(prefix)s"'
2465 2466 b' --install-lib="%(libdir)s"'
2466 2467 b' --install-scripts="%(bindir)s" %(nohome)s >%(logfile)s 2>&1'
2467 2468 % {b'exe': exe, b'pure': pure,
2468 2469 b'compiler': compiler,
2469 2470 b'base': os.path.join(self._hgtmp, b"build"),
2470 2471 b'prefix': self._installdir, b'libdir': self._pythondir,
2471 2472 b'bindir': self._bindir,
2472 2473 b'nohome': nohome, b'logfile': installerrs})
2473 2474
2474 2475 # setuptools requires install directories to exist.
2475 2476 def makedirs(p):
2476 2477 try:
2477 2478 os.makedirs(p)
2478 2479 except OSError as e:
2479 2480 if e.errno != errno.EEXIST:
2480 2481 raise
2481 2482 makedirs(self._pythondir)
2482 2483 makedirs(self._bindir)
2483 2484
2484 2485 vlog("# Running", cmd)
2485 2486 if os.system(cmd) == 0:
2486 2487 if not self.options.verbose:
2487 2488 try:
2488 2489 os.remove(installerrs)
2489 2490 except OSError as e:
2490 2491 if e.errno != errno.ENOENT:
2491 2492 raise
2492 2493 else:
2493 2494 f = open(installerrs, 'rb')
2494 2495 for line in f:
2495 2496 if PYTHON3:
2496 2497 sys.stdout.buffer.write(line)
2497 2498 else:
2498 2499 sys.stdout.write(line)
2499 2500 f.close()
2500 2501 sys.exit(1)
2501 2502 os.chdir(self._testdir)
2502 2503
2503 2504 self._usecorrectpython()
2504 2505
2505 2506 if self.options.py3k_warnings and not self.options.anycoverage:
2506 2507 vlog("# Updating hg command to enable Py3k Warnings switch")
2507 2508 f = open(os.path.join(self._bindir, 'hg'), 'rb')
2508 2509 lines = [line.rstrip() for line in f]
2509 2510 lines[0] += ' -3'
2510 2511 f.close()
2511 2512 f = open(os.path.join(self._bindir, 'hg'), 'wb')
2512 2513 for line in lines:
2513 2514 f.write(line + '\n')
2514 2515 f.close()
2515 2516
2516 2517 hgbat = os.path.join(self._bindir, b'hg.bat')
2517 2518 if os.path.isfile(hgbat):
2518 2519 # hg.bat expects to be put in bin/scripts while run-tests.py
2519 2520 # installation layout put it in bin/ directly. Fix it
2520 2521 f = open(hgbat, 'rb')
2521 2522 data = f.read()
2522 2523 f.close()
2523 2524 if b'"%~dp0..\python" "%~dp0hg" %*' in data:
2524 2525 data = data.replace(b'"%~dp0..\python" "%~dp0hg" %*',
2525 2526 b'"%~dp0python" "%~dp0hg" %*')
2526 2527 f = open(hgbat, 'wb')
2527 2528 f.write(data)
2528 2529 f.close()
2529 2530 else:
2530 2531 print('WARNING: cannot fix hg.bat reference to python.exe')
2531 2532
2532 2533 if self.options.anycoverage:
2533 2534 custom = os.path.join(self._testdir, 'sitecustomize.py')
2534 2535 target = os.path.join(self._pythondir, 'sitecustomize.py')
2535 2536 vlog('# Installing coverage trigger to %s' % target)
2536 2537 shutil.copyfile(custom, target)
2537 2538 rc = os.path.join(self._testdir, '.coveragerc')
2538 2539 vlog('# Installing coverage rc to %s' % rc)
2539 2540 os.environ['COVERAGE_PROCESS_START'] = rc
2540 2541 covdir = os.path.join(self._installdir, '..', 'coverage')
2541 2542 try:
2542 2543 os.mkdir(covdir)
2543 2544 except OSError as e:
2544 2545 if e.errno != errno.EEXIST:
2545 2546 raise
2546 2547
2547 2548 os.environ['COVERAGE_DIR'] = covdir
2548 2549
2549 2550 def _checkhglib(self, verb):
2550 2551 """Ensure that the 'mercurial' package imported by python is
2551 2552 the one we expect it to be. If not, print a warning to stderr."""
2552 2553 if ((self._bindir == self._pythondir) and
2553 2554 (self._bindir != self._tmpbindir)):
2554 2555 # The pythondir has been inferred from --with-hg flag.
2555 2556 # We cannot expect anything sensible here.
2556 2557 return
2557 2558 expecthg = os.path.join(self._pythondir, b'mercurial')
2558 2559 actualhg = self._gethgpath()
2559 2560 if os.path.abspath(actualhg) != os.path.abspath(expecthg):
2560 2561 sys.stderr.write('warning: %s with unexpected mercurial lib: %s\n'
2561 2562 ' (expected %s)\n'
2562 2563 % (verb, actualhg, expecthg))
2563 2564 def _gethgpath(self):
2564 2565 """Return the path to the mercurial package that is actually found by
2565 2566 the current Python interpreter."""
2566 2567 if self._hgpath is not None:
2567 2568 return self._hgpath
2568 2569
2569 2570 cmd = b'%s -c "import mercurial; print (mercurial.__path__[0])"'
2570 2571 cmd = cmd % PYTHON
2571 2572 if PYTHON3:
2572 2573 cmd = _strpath(cmd)
2573 2574 pipe = os.popen(cmd)
2574 2575 try:
2575 2576 self._hgpath = _bytespath(pipe.read().strip())
2576 2577 finally:
2577 2578 pipe.close()
2578 2579
2579 2580 return self._hgpath
2580 2581
2581 2582 def _installchg(self):
2582 2583 """Install chg into the test environment"""
2583 2584 vlog('# Performing temporary installation of CHG')
2584 2585 assert os.path.dirname(self._bindir) == self._installdir
2585 2586 assert self._hgroot, 'must be called after _installhg()'
2586 2587 cmd = (b'"%(make)s" clean install PREFIX="%(prefix)s"'
2587 2588 % {b'make': 'make', # TODO: switch by option or environment?
2588 2589 b'prefix': self._installdir})
2589 2590 cwd = os.path.join(self._hgroot, b'contrib', b'chg')
2590 2591 vlog("# Running", cmd)
2591 2592 proc = subprocess.Popen(cmd, shell=True, cwd=cwd,
2592 2593 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
2593 2594 stderr=subprocess.STDOUT)
2594 2595 out, _err = proc.communicate()
2595 2596 if proc.returncode != 0:
2596 2597 if PYTHON3:
2597 2598 sys.stdout.buffer.write(out)
2598 2599 else:
2599 2600 sys.stdout.write(out)
2600 2601 sys.exit(1)
2601 2602
2602 2603 def _outputcoverage(self):
2603 2604 """Produce code coverage output."""
2604 2605 import coverage
2605 2606 coverage = coverage.coverage
2606 2607
2607 2608 vlog('# Producing coverage report')
2608 2609 # chdir is the easiest way to get short, relative paths in the
2609 2610 # output.
2610 2611 os.chdir(self._hgroot)
2611 2612 covdir = os.path.join(self._installdir, '..', 'coverage')
2612 2613 cov = coverage(data_file=os.path.join(covdir, 'cov'))
2613 2614
2614 2615 # Map install directory paths back to source directory.
2615 2616 cov.config.paths['srcdir'] = ['.', self._pythondir]
2616 2617
2617 2618 cov.combine()
2618 2619
2619 2620 omit = [os.path.join(x, '*') for x in [self._bindir, self._testdir]]
2620 2621 cov.report(ignore_errors=True, omit=omit)
2621 2622
2622 2623 if self.options.htmlcov:
2623 2624 htmldir = os.path.join(self._testdir, 'htmlcov')
2624 2625 cov.html_report(directory=htmldir, omit=omit)
2625 2626 if self.options.annotate:
2626 2627 adir = os.path.join(self._testdir, 'annotated')
2627 2628 if not os.path.isdir(adir):
2628 2629 os.mkdir(adir)
2629 2630 cov.annotate(directory=adir, omit=omit)
2630 2631
2631 2632 def _findprogram(self, program):
2632 2633 """Search PATH for a executable program"""
2633 2634 dpb = _bytespath(os.defpath)
2634 2635 sepb = _bytespath(os.pathsep)
2635 2636 for p in osenvironb.get(b'PATH', dpb).split(sepb):
2636 2637 name = os.path.join(p, program)
2637 2638 if os.name == 'nt' or os.access(name, os.X_OK):
2638 2639 return name
2639 2640 return None
2640 2641
2641 2642 def _checktools(self):
2642 2643 """Ensure tools required to run tests are present."""
2643 2644 for p in self.REQUIREDTOOLS:
2644 2645 if os.name == 'nt' and not p.endswith('.exe'):
2645 2646 p += '.exe'
2646 2647 found = self._findprogram(p)
2647 2648 if found:
2648 2649 vlog("# Found prerequisite", p, "at", found)
2649 2650 else:
2650 2651 print("WARNING: Did not find prerequisite tool: %s " %
2651 2652 p.decode("utf-8"))
2652 2653
2653 2654 if __name__ == '__main__':
2654 2655 runner = TestRunner()
2655 2656
2656 2657 try:
2657 2658 import msvcrt
2658 2659 msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
2659 2660 msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
2660 2661 msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
2661 2662 except ImportError:
2662 2663 pass
2663 2664
2664 2665 sys.exit(runner.run(sys.argv[1:]))
@@ -1,166 +1,180 b''
1 1
2 2 $ cat << EOF > buggylocking.py
3 3 > """A small extension that tests our developer warnings
4 4 > """
5 5 >
6 > from mercurial import cmdutil, repair
6 > from mercurial import cmdutil, repair, util
7 7 >
8 8 > cmdtable = {}
9 9 > command = cmdutil.command(cmdtable)
10 10 >
11 11 > @command('buggylocking', [], '')
12 12 > def buggylocking(ui, repo):
13 13 > lo = repo.lock()
14 14 > wl = repo.wlock()
15 15 > wl.release()
16 16 > lo.release()
17 17 >
18 18 > @command('buggytransaction', [], '')
19 19 > def buggylocking(ui, repo):
20 20 > tr = repo.transaction('buggy')
21 21 > # make sure we rollback the transaction as we don't want to rely on the__del__
22 22 > tr.release()
23 23 >
24 24 > @command('properlocking', [], '')
25 25 > def properlocking(ui, repo):
26 26 > """check that reentrance is fine"""
27 27 > wl = repo.wlock()
28 28 > lo = repo.lock()
29 29 > tr = repo.transaction('proper')
30 30 > tr2 = repo.transaction('proper')
31 31 > lo2 = repo.lock()
32 32 > wl2 = repo.wlock()
33 33 > wl2.release()
34 34 > lo2.release()
35 35 > tr2.close()
36 36 > tr.close()
37 37 > lo.release()
38 38 > wl.release()
39 39 >
40 40 > @command('nowaitlocking', [], '')
41 41 > def nowaitlocking(ui, repo):
42 42 > lo = repo.lock()
43 43 > wl = repo.wlock(wait=False)
44 44 > wl.release()
45 45 > lo.release()
46 46 >
47 47 > @command('stripintr', [], '')
48 48 > def stripintr(ui, repo):
49 49 > lo = repo.lock()
50 50 > tr = repo.transaction('foobar')
51 51 > try:
52 52 > repair.strip(repo.ui, repo, [repo['.'].node()])
53 53 > finally:
54 54 > lo.release()
55 55 > @command('oldanddeprecated', [], '')
56 56 > def oldanddeprecated(ui, repo):
57 57 > """test deprecation warning API"""
58 58 > def foobar(ui):
59 59 > ui.deprecwarn('foorbar is deprecated, go shopping', '42.1337')
60 60 > foobar(ui)
61 > @command('nouiwarning', [], '')
62 > def nouiwarning(ui, repo):
63 > util.nouideprecwarn('this is a test', '13.37')
61 64 > EOF
62 65
63 66 $ cat << EOF >> $HGRCPATH
64 67 > [extensions]
65 68 > buggylocking=$TESTTMP/buggylocking.py
66 69 > mock=$TESTDIR/mockblackbox.py
67 70 > blackbox=
68 71 > [devel]
69 72 > all-warnings=1
70 73 > EOF
71 74
72 75 $ hg init lock-checker
73 76 $ cd lock-checker
74 77 $ hg buggylocking
75 78 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
76 79 $ cat << EOF >> $HGRCPATH
77 80 > [devel]
78 81 > all=0
79 82 > check-locks=1
80 83 > EOF
81 84 $ hg buggylocking
82 85 devel-warn: "wlock" acquired after "lock" at: $TESTTMP/buggylocking.py:* (buggylocking) (glob)
83 86 $ hg buggylocking --traceback
84 87 devel-warn: "wlock" acquired after "lock" at:
85 88 */hg:* in * (glob)
86 89 */mercurial/dispatch.py:* in run (glob)
87 90 */mercurial/dispatch.py:* in dispatch (glob)
88 91 */mercurial/dispatch.py:* in _runcatch (glob)
89 92 */mercurial/dispatch.py:* in callcatch (glob)
90 93 */mercurial/scmutil.py* in callcatch (glob)
91 94 */mercurial/dispatch.py:* in _runcatchfunc (glob)
92 95 */mercurial/dispatch.py:* in _dispatch (glob)
93 96 */mercurial/dispatch.py:* in runcommand (glob)
94 97 */mercurial/dispatch.py:* in _runcommand (glob)
95 98 */mercurial/dispatch.py:* in <lambda> (glob)
96 99 */mercurial/util.py:* in check (glob)
97 100 $TESTTMP/buggylocking.py:* in buggylocking (glob)
98 101 $ hg properlocking
99 102 $ hg nowaitlocking
100 103
101 104 $ echo a > a
102 105 $ hg add a
103 106 $ hg commit -m a
104 107 $ hg stripintr 2>&1 | egrep -v '^(\*\*| )'
105 108 saved backup bundle to $TESTTMP/lock-checker/.hg/strip-backup/*-backup.hg (glob)
106 109 Traceback (most recent call last):
107 110 mercurial.error.ProgrammingError: cannot strip from inside a transaction
108 111
109 112 $ hg oldanddeprecated
110 113 devel-warn: foorbar is deprecated, go shopping
111 114 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
112 115
113 116 $ hg oldanddeprecated --traceback
114 117 devel-warn: foorbar is deprecated, go shopping
115 118 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
116 119 */hg:* in <module> (glob)
117 120 */mercurial/dispatch.py:* in run (glob)
118 121 */mercurial/dispatch.py:* in dispatch (glob)
119 122 */mercurial/dispatch.py:* in _runcatch (glob)
120 123 */mercurial/dispatch.py:* in callcatch (glob)
121 124 */mercurial/scmutil.py* in callcatch (glob)
122 125 */mercurial/dispatch.py:* in _runcatchfunc (glob)
123 126 */mercurial/dispatch.py:* in _dispatch (glob)
124 127 */mercurial/dispatch.py:* in runcommand (glob)
125 128 */mercurial/dispatch.py:* in _runcommand (glob)
126 129 */mercurial/dispatch.py:* in <lambda> (glob)
127 130 */mercurial/util.py:* in check (glob)
128 131 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
129 132 $ hg blackbox -l 7
130 133 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated
131 134 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
132 135 (compatibility will be dropped after Mercurial-42.1337, update your code.) at: $TESTTMP/buggylocking.py:* (oldanddeprecated) (glob)
133 136 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated exited 0 after * seconds (glob)
134 137 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback
135 138 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> devel-warn: foorbar is deprecated, go shopping
136 139 (compatibility will be dropped after Mercurial-42.1337, update your code.) at:
137 140 */hg:* in <module> (glob)
138 141 */mercurial/dispatch.py:* in run (glob)
139 142 */mercurial/dispatch.py:* in dispatch (glob)
140 143 */mercurial/dispatch.py:* in _runcatch (glob)
141 144 */mercurial/dispatch.py:* in callcatch (glob)
142 145 */mercurial/scmutil.py* in callcatch (glob)
143 146 */mercurial/dispatch.py:* in _runcatchfunc (glob)
144 147 */mercurial/dispatch.py:* in _dispatch (glob)
145 148 */mercurial/dispatch.py:* in runcommand (glob)
146 149 */mercurial/dispatch.py:* in _runcommand (glob)
147 150 */mercurial/dispatch.py:* in <lambda> (glob)
148 151 */mercurial/util.py:* in check (glob)
149 152 $TESTTMP/buggylocking.py:* in oldanddeprecated (glob)
150 153 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> oldanddeprecated --traceback exited 0 after * seconds (glob)
151 154 1970/01/01 00:00:00 bob @cb9a9f314b8b07ba71012fcdbc544b5a4d82ff5b (5000)> blackbox -l 7
152 155
153 156 Test programming error failure:
154 157
155 158 $ hg buggytransaction 2>&1 | egrep -v '^ '
156 159 ** Unknown exception encountered with possibly-broken third-party extension buggylocking
157 160 ** which supports versions unknown of Mercurial.
158 161 ** Please disable buggylocking and try your action again.
159 162 ** If that fixes the bug please report it to the extension author.
160 163 ** Python * (glob)
161 164 ** Mercurial Distributed SCM (*) (glob)
162 165 ** Extensions loaded: * (glob)
163 166 Traceback (most recent call last):
164 167 mercurial.error.ProgrammingError: transaction requires locking
165 168
169 Old style deprecation warning
170
171 $ hg nouiwarning
172 $TESTTMP/buggylocking.py:61: DeprecationWarning: this is a test
173 (compatibility will be dropped after Mercurial-13.37, update your code.)
174 util.nouideprecwarn('this is a test', '13.37')
175
176 (disabled outside of test run)
177
178 $ HGEMITWARNINGS= hg nouiwarning
179
166 180 $ cd ..
General Comments 0
You need to be logged in to leave comments. Login now