##// END OF EJS Templates
util: rename argument of isatty()...
Yuya Nishihara -
r27363:c7ab2087 default
parent child Browse files
Show More
@@ -1,2504 +1,2504
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from __future__ import absolute_import
17 17
18 18 import bz2
19 19 import calendar
20 20 import collections
21 21 import datetime
22 22 import errno
23 23 import gc
24 24 import hashlib
25 25 import imp
26 26 import os
27 27 import re as remod
28 28 import shutil
29 29 import signal
30 30 import socket
31 31 import subprocess
32 32 import sys
33 33 import tempfile
34 34 import textwrap
35 35 import time
36 36 import traceback
37 37 import urllib
38 38 import zlib
39 39
40 40 from . import (
41 41 encoding,
42 42 error,
43 43 i18n,
44 44 osutil,
45 45 parsers,
46 46 )
47 47
48 48 if os.name == 'nt':
49 49 from . import windows as platform
50 50 else:
51 51 from . import posix as platform
52 52
53 53 md5 = hashlib.md5
54 54 sha1 = hashlib.sha1
55 55 sha512 = hashlib.sha512
56 56 _ = i18n._
57 57
58 58 cachestat = platform.cachestat
59 59 checkexec = platform.checkexec
60 60 checklink = platform.checklink
61 61 copymode = platform.copymode
62 62 executablepath = platform.executablepath
63 63 expandglobs = platform.expandglobs
64 64 explainexit = platform.explainexit
65 65 findexe = platform.findexe
66 66 gethgcmd = platform.gethgcmd
67 67 getuser = platform.getuser
68 68 groupmembers = platform.groupmembers
69 69 groupname = platform.groupname
70 70 hidewindow = platform.hidewindow
71 71 isexec = platform.isexec
72 72 isowner = platform.isowner
73 73 localpath = platform.localpath
74 74 lookupreg = platform.lookupreg
75 75 makedir = platform.makedir
76 76 nlinks = platform.nlinks
77 77 normpath = platform.normpath
78 78 normcase = platform.normcase
79 79 normcasespec = platform.normcasespec
80 80 normcasefallback = platform.normcasefallback
81 81 openhardlinks = platform.openhardlinks
82 82 oslink = platform.oslink
83 83 parsepatchoutput = platform.parsepatchoutput
84 84 pconvert = platform.pconvert
85 85 poll = platform.poll
86 86 popen = platform.popen
87 87 posixfile = platform.posixfile
88 88 quotecommand = platform.quotecommand
89 89 readpipe = platform.readpipe
90 90 rename = platform.rename
91 91 removedirs = platform.removedirs
92 92 samedevice = platform.samedevice
93 93 samefile = platform.samefile
94 94 samestat = platform.samestat
95 95 setbinary = platform.setbinary
96 96 setflags = platform.setflags
97 97 setsignalhandler = platform.setsignalhandler
98 98 shellquote = platform.shellquote
99 99 spawndetached = platform.spawndetached
100 100 split = platform.split
101 101 sshargs = platform.sshargs
102 102 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
103 103 statisexec = platform.statisexec
104 104 statislink = platform.statislink
105 105 termwidth = platform.termwidth
106 106 testpid = platform.testpid
107 107 umask = platform.umask
108 108 unlink = platform.unlink
109 109 unlinkpath = platform.unlinkpath
110 110 username = platform.username
111 111
112 112 # Python compatibility
113 113
114 114 _notset = object()
115 115
116 116 # disable Python's problematic floating point timestamps (issue4836)
117 117 # (Python hypocritically says you shouldn't change this behavior in
118 118 # libraries, and sure enough Mercurial is not a library.)
119 119 os.stat_float_times(False)
120 120
121 121 def safehasattr(thing, attr):
122 122 return getattr(thing, attr, _notset) is not _notset
123 123
124 124 DIGESTS = {
125 125 'md5': md5,
126 126 'sha1': sha1,
127 127 'sha512': sha512,
128 128 }
129 129 # List of digest types from strongest to weakest
130 130 DIGESTS_BY_STRENGTH = ['sha512', 'sha1', 'md5']
131 131
132 132 for k in DIGESTS_BY_STRENGTH:
133 133 assert k in DIGESTS
134 134
135 135 class digester(object):
136 136 """helper to compute digests.
137 137
138 138 This helper can be used to compute one or more digests given their name.
139 139
140 140 >>> d = digester(['md5', 'sha1'])
141 141 >>> d.update('foo')
142 142 >>> [k for k in sorted(d)]
143 143 ['md5', 'sha1']
144 144 >>> d['md5']
145 145 'acbd18db4cc2f85cedef654fccc4a4d8'
146 146 >>> d['sha1']
147 147 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
148 148 >>> digester.preferred(['md5', 'sha1'])
149 149 'sha1'
150 150 """
151 151
152 152 def __init__(self, digests, s=''):
153 153 self._hashes = {}
154 154 for k in digests:
155 155 if k not in DIGESTS:
156 156 raise Abort(_('unknown digest type: %s') % k)
157 157 self._hashes[k] = DIGESTS[k]()
158 158 if s:
159 159 self.update(s)
160 160
161 161 def update(self, data):
162 162 for h in self._hashes.values():
163 163 h.update(data)
164 164
165 165 def __getitem__(self, key):
166 166 if key not in DIGESTS:
167 167 raise Abort(_('unknown digest type: %s') % k)
168 168 return self._hashes[key].hexdigest()
169 169
170 170 def __iter__(self):
171 171 return iter(self._hashes)
172 172
173 173 @staticmethod
174 174 def preferred(supported):
175 175 """returns the strongest digest type in both supported and DIGESTS."""
176 176
177 177 for k in DIGESTS_BY_STRENGTH:
178 178 if k in supported:
179 179 return k
180 180 return None
181 181
182 182 class digestchecker(object):
183 183 """file handle wrapper that additionally checks content against a given
184 184 size and digests.
185 185
186 186 d = digestchecker(fh, size, {'md5': '...'})
187 187
188 188 When multiple digests are given, all of them are validated.
189 189 """
190 190
191 191 def __init__(self, fh, size, digests):
192 192 self._fh = fh
193 193 self._size = size
194 194 self._got = 0
195 195 self._digests = dict(digests)
196 196 self._digester = digester(self._digests.keys())
197 197
198 198 def read(self, length=-1):
199 199 content = self._fh.read(length)
200 200 self._digester.update(content)
201 201 self._got += len(content)
202 202 return content
203 203
204 204 def validate(self):
205 205 if self._size != self._got:
206 206 raise Abort(_('size mismatch: expected %d, got %d') %
207 207 (self._size, self._got))
208 208 for k, v in self._digests.items():
209 209 if v != self._digester[k]:
210 210 # i18n: first parameter is a digest name
211 211 raise Abort(_('%s mismatch: expected %s, got %s') %
212 212 (k, v, self._digester[k]))
213 213
214 214 try:
215 215 buffer = buffer
216 216 except NameError:
217 217 if sys.version_info[0] < 3:
218 218 def buffer(sliceable, offset=0):
219 219 return sliceable[offset:]
220 220 else:
221 221 def buffer(sliceable, offset=0):
222 222 return memoryview(sliceable)[offset:]
223 223
224 224 closefds = os.name == 'posix'
225 225
226 226 _chunksize = 4096
227 227
228 228 class bufferedinputpipe(object):
229 229 """a manually buffered input pipe
230 230
231 231 Python will not let us use buffered IO and lazy reading with 'polling' at
232 232 the same time. We cannot probe the buffer state and select will not detect
233 233 that data are ready to read if they are already buffered.
234 234
235 235 This class let us work around that by implementing its own buffering
236 236 (allowing efficient readline) while offering a way to know if the buffer is
237 237 empty from the output (allowing collaboration of the buffer with polling).
238 238
239 239 This class lives in the 'util' module because it makes use of the 'os'
240 240 module from the python stdlib.
241 241 """
242 242
243 243 def __init__(self, input):
244 244 self._input = input
245 245 self._buffer = []
246 246 self._eof = False
247 247 self._lenbuf = 0
248 248
249 249 @property
250 250 def hasbuffer(self):
251 251 """True is any data is currently buffered
252 252
253 253 This will be used externally a pre-step for polling IO. If there is
254 254 already data then no polling should be set in place."""
255 255 return bool(self._buffer)
256 256
257 257 @property
258 258 def closed(self):
259 259 return self._input.closed
260 260
261 261 def fileno(self):
262 262 return self._input.fileno()
263 263
264 264 def close(self):
265 265 return self._input.close()
266 266
267 267 def read(self, size):
268 268 while (not self._eof) and (self._lenbuf < size):
269 269 self._fillbuffer()
270 270 return self._frombuffer(size)
271 271
272 272 def readline(self, *args, **kwargs):
273 273 if 1 < len(self._buffer):
274 274 # this should not happen because both read and readline end with a
275 275 # _frombuffer call that collapse it.
276 276 self._buffer = [''.join(self._buffer)]
277 277 self._lenbuf = len(self._buffer[0])
278 278 lfi = -1
279 279 if self._buffer:
280 280 lfi = self._buffer[-1].find('\n')
281 281 while (not self._eof) and lfi < 0:
282 282 self._fillbuffer()
283 283 if self._buffer:
284 284 lfi = self._buffer[-1].find('\n')
285 285 size = lfi + 1
286 286 if lfi < 0: # end of file
287 287 size = self._lenbuf
288 288 elif 1 < len(self._buffer):
289 289 # we need to take previous chunks into account
290 290 size += self._lenbuf - len(self._buffer[-1])
291 291 return self._frombuffer(size)
292 292
293 293 def _frombuffer(self, size):
294 294 """return at most 'size' data from the buffer
295 295
296 296 The data are removed from the buffer."""
297 297 if size == 0 or not self._buffer:
298 298 return ''
299 299 buf = self._buffer[0]
300 300 if 1 < len(self._buffer):
301 301 buf = ''.join(self._buffer)
302 302
303 303 data = buf[:size]
304 304 buf = buf[len(data):]
305 305 if buf:
306 306 self._buffer = [buf]
307 307 self._lenbuf = len(buf)
308 308 else:
309 309 self._buffer = []
310 310 self._lenbuf = 0
311 311 return data
312 312
313 313 def _fillbuffer(self):
314 314 """read data to the buffer"""
315 315 data = os.read(self._input.fileno(), _chunksize)
316 316 if not data:
317 317 self._eof = True
318 318 else:
319 319 self._lenbuf += len(data)
320 320 self._buffer.append(data)
321 321
322 322 def popen2(cmd, env=None, newlines=False):
323 323 # Setting bufsize to -1 lets the system decide the buffer size.
324 324 # The default for bufsize is 0, meaning unbuffered. This leads to
325 325 # poor performance on Mac OS X: http://bugs.python.org/issue4194
326 326 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
327 327 close_fds=closefds,
328 328 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
329 329 universal_newlines=newlines,
330 330 env=env)
331 331 return p.stdin, p.stdout
332 332
333 333 def popen3(cmd, env=None, newlines=False):
334 334 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
335 335 return stdin, stdout, stderr
336 336
337 337 def popen4(cmd, env=None, newlines=False, bufsize=-1):
338 338 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
339 339 close_fds=closefds,
340 340 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
341 341 stderr=subprocess.PIPE,
342 342 universal_newlines=newlines,
343 343 env=env)
344 344 return p.stdin, p.stdout, p.stderr, p
345 345
346 346 def version():
347 347 """Return version information if available."""
348 348 try:
349 349 from . import __version__
350 350 return __version__.version
351 351 except ImportError:
352 352 return 'unknown'
353 353
354 354 def versiontuple(v=None, n=4):
355 355 """Parses a Mercurial version string into an N-tuple.
356 356
357 357 The version string to be parsed is specified with the ``v`` argument.
358 358 If it isn't defined, the current Mercurial version string will be parsed.
359 359
360 360 ``n`` can be 2, 3, or 4. Here is how some version strings map to
361 361 returned values:
362 362
363 363 >>> v = '3.6.1+190-df9b73d2d444'
364 364 >>> versiontuple(v, 2)
365 365 (3, 6)
366 366 >>> versiontuple(v, 3)
367 367 (3, 6, 1)
368 368 >>> versiontuple(v, 4)
369 369 (3, 6, 1, '190-df9b73d2d444')
370 370
371 371 >>> versiontuple('3.6.1+190-df9b73d2d444+20151118')
372 372 (3, 6, 1, '190-df9b73d2d444+20151118')
373 373
374 374 >>> v = '3.6'
375 375 >>> versiontuple(v, 2)
376 376 (3, 6)
377 377 >>> versiontuple(v, 3)
378 378 (3, 6, None)
379 379 >>> versiontuple(v, 4)
380 380 (3, 6, None, None)
381 381 """
382 382 if not v:
383 383 v = version()
384 384 parts = v.split('+', 1)
385 385 if len(parts) == 1:
386 386 vparts, extra = parts[0], None
387 387 else:
388 388 vparts, extra = parts
389 389
390 390 vints = []
391 391 for i in vparts.split('.'):
392 392 try:
393 393 vints.append(int(i))
394 394 except ValueError:
395 395 break
396 396 # (3, 6) -> (3, 6, None)
397 397 while len(vints) < 3:
398 398 vints.append(None)
399 399
400 400 if n == 2:
401 401 return (vints[0], vints[1])
402 402 if n == 3:
403 403 return (vints[0], vints[1], vints[2])
404 404 if n == 4:
405 405 return (vints[0], vints[1], vints[2], extra)
406 406
407 407 # used by parsedate
408 408 defaultdateformats = (
409 409 '%Y-%m-%d %H:%M:%S',
410 410 '%Y-%m-%d %I:%M:%S%p',
411 411 '%Y-%m-%d %H:%M',
412 412 '%Y-%m-%d %I:%M%p',
413 413 '%Y-%m-%d',
414 414 '%m-%d',
415 415 '%m/%d',
416 416 '%m/%d/%y',
417 417 '%m/%d/%Y',
418 418 '%a %b %d %H:%M:%S %Y',
419 419 '%a %b %d %I:%M:%S%p %Y',
420 420 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
421 421 '%b %d %H:%M:%S %Y',
422 422 '%b %d %I:%M:%S%p %Y',
423 423 '%b %d %H:%M:%S',
424 424 '%b %d %I:%M:%S%p',
425 425 '%b %d %H:%M',
426 426 '%b %d %I:%M%p',
427 427 '%b %d %Y',
428 428 '%b %d',
429 429 '%H:%M:%S',
430 430 '%I:%M:%S%p',
431 431 '%H:%M',
432 432 '%I:%M%p',
433 433 )
434 434
435 435 extendeddateformats = defaultdateformats + (
436 436 "%Y",
437 437 "%Y-%m",
438 438 "%b",
439 439 "%b %Y",
440 440 )
441 441
442 442 def cachefunc(func):
443 443 '''cache the result of function calls'''
444 444 # XXX doesn't handle keywords args
445 445 if func.func_code.co_argcount == 0:
446 446 cache = []
447 447 def f():
448 448 if len(cache) == 0:
449 449 cache.append(func())
450 450 return cache[0]
451 451 return f
452 452 cache = {}
453 453 if func.func_code.co_argcount == 1:
454 454 # we gain a small amount of time because
455 455 # we don't need to pack/unpack the list
456 456 def f(arg):
457 457 if arg not in cache:
458 458 cache[arg] = func(arg)
459 459 return cache[arg]
460 460 else:
461 461 def f(*args):
462 462 if args not in cache:
463 463 cache[args] = func(*args)
464 464 return cache[args]
465 465
466 466 return f
467 467
468 468 class sortdict(dict):
469 469 '''a simple sorted dictionary'''
470 470 def __init__(self, data=None):
471 471 self._list = []
472 472 if data:
473 473 self.update(data)
474 474 def copy(self):
475 475 return sortdict(self)
476 476 def __setitem__(self, key, val):
477 477 if key in self:
478 478 self._list.remove(key)
479 479 self._list.append(key)
480 480 dict.__setitem__(self, key, val)
481 481 def __iter__(self):
482 482 return self._list.__iter__()
483 483 def update(self, src):
484 484 if isinstance(src, dict):
485 485 src = src.iteritems()
486 486 for k, v in src:
487 487 self[k] = v
488 488 def clear(self):
489 489 dict.clear(self)
490 490 self._list = []
491 491 def items(self):
492 492 return [(k, self[k]) for k in self._list]
493 493 def __delitem__(self, key):
494 494 dict.__delitem__(self, key)
495 495 self._list.remove(key)
496 496 def pop(self, key, *args, **kwargs):
497 497 dict.pop(self, key, *args, **kwargs)
498 498 try:
499 499 self._list.remove(key)
500 500 except ValueError:
501 501 pass
502 502 def keys(self):
503 503 return self._list
504 504 def iterkeys(self):
505 505 return self._list.__iter__()
506 506 def iteritems(self):
507 507 for k in self._list:
508 508 yield k, self[k]
509 509 def insert(self, index, key, val):
510 510 self._list.insert(index, key)
511 511 dict.__setitem__(self, key, val)
512 512
513 513 class lrucachedict(object):
514 514 '''cache most recent gets from or sets to this dictionary'''
515 515 def __init__(self, maxsize):
516 516 self._cache = {}
517 517 self._maxsize = maxsize
518 518 self._order = collections.deque()
519 519
520 520 def __getitem__(self, key):
521 521 value = self._cache[key]
522 522 self._order.remove(key)
523 523 self._order.append(key)
524 524 return value
525 525
526 526 def __setitem__(self, key, value):
527 527 if key not in self._cache:
528 528 if len(self._cache) >= self._maxsize:
529 529 del self._cache[self._order.popleft()]
530 530 else:
531 531 self._order.remove(key)
532 532 self._cache[key] = value
533 533 self._order.append(key)
534 534
535 535 def __contains__(self, key):
536 536 return key in self._cache
537 537
538 538 def clear(self):
539 539 self._cache.clear()
540 540 self._order = collections.deque()
541 541
542 542 def lrucachefunc(func):
543 543 '''cache most recent results of function calls'''
544 544 cache = {}
545 545 order = collections.deque()
546 546 if func.func_code.co_argcount == 1:
547 547 def f(arg):
548 548 if arg not in cache:
549 549 if len(cache) > 20:
550 550 del cache[order.popleft()]
551 551 cache[arg] = func(arg)
552 552 else:
553 553 order.remove(arg)
554 554 order.append(arg)
555 555 return cache[arg]
556 556 else:
557 557 def f(*args):
558 558 if args not in cache:
559 559 if len(cache) > 20:
560 560 del cache[order.popleft()]
561 561 cache[args] = func(*args)
562 562 else:
563 563 order.remove(args)
564 564 order.append(args)
565 565 return cache[args]
566 566
567 567 return f
568 568
569 569 class propertycache(object):
570 570 def __init__(self, func):
571 571 self.func = func
572 572 self.name = func.__name__
573 573 def __get__(self, obj, type=None):
574 574 result = self.func(obj)
575 575 self.cachevalue(obj, result)
576 576 return result
577 577
578 578 def cachevalue(self, obj, value):
579 579 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
580 580 obj.__dict__[self.name] = value
581 581
582 582 def pipefilter(s, cmd):
583 583 '''filter string S through command CMD, returning its output'''
584 584 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
585 585 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
586 586 pout, perr = p.communicate(s)
587 587 return pout
588 588
589 589 def tempfilter(s, cmd):
590 590 '''filter string S through a pair of temporary files with CMD.
591 591 CMD is used as a template to create the real command to be run,
592 592 with the strings INFILE and OUTFILE replaced by the real names of
593 593 the temporary files generated.'''
594 594 inname, outname = None, None
595 595 try:
596 596 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
597 597 fp = os.fdopen(infd, 'wb')
598 598 fp.write(s)
599 599 fp.close()
600 600 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
601 601 os.close(outfd)
602 602 cmd = cmd.replace('INFILE', inname)
603 603 cmd = cmd.replace('OUTFILE', outname)
604 604 code = os.system(cmd)
605 605 if sys.platform == 'OpenVMS' and code & 1:
606 606 code = 0
607 607 if code:
608 608 raise Abort(_("command '%s' failed: %s") %
609 609 (cmd, explainexit(code)))
610 610 fp = open(outname, 'rb')
611 611 r = fp.read()
612 612 fp.close()
613 613 return r
614 614 finally:
615 615 try:
616 616 if inname:
617 617 os.unlink(inname)
618 618 except OSError:
619 619 pass
620 620 try:
621 621 if outname:
622 622 os.unlink(outname)
623 623 except OSError:
624 624 pass
625 625
626 626 filtertable = {
627 627 'tempfile:': tempfilter,
628 628 'pipe:': pipefilter,
629 629 }
630 630
631 631 def filter(s, cmd):
632 632 "filter a string through a command that transforms its input to its output"
633 633 for name, fn in filtertable.iteritems():
634 634 if cmd.startswith(name):
635 635 return fn(s, cmd[len(name):].lstrip())
636 636 return pipefilter(s, cmd)
637 637
638 638 def binary(s):
639 639 """return true if a string is binary data"""
640 640 return bool(s and '\0' in s)
641 641
642 642 def increasingchunks(source, min=1024, max=65536):
643 643 '''return no less than min bytes per chunk while data remains,
644 644 doubling min after each chunk until it reaches max'''
645 645 def log2(x):
646 646 if not x:
647 647 return 0
648 648 i = 0
649 649 while x:
650 650 x >>= 1
651 651 i += 1
652 652 return i - 1
653 653
654 654 buf = []
655 655 blen = 0
656 656 for chunk in source:
657 657 buf.append(chunk)
658 658 blen += len(chunk)
659 659 if blen >= min:
660 660 if min < max:
661 661 min = min << 1
662 662 nmin = 1 << log2(blen)
663 663 if nmin > min:
664 664 min = nmin
665 665 if min > max:
666 666 min = max
667 667 yield ''.join(buf)
668 668 blen = 0
669 669 buf = []
670 670 if buf:
671 671 yield ''.join(buf)
672 672
673 673 Abort = error.Abort
674 674
675 675 def always(fn):
676 676 return True
677 677
678 678 def never(fn):
679 679 return False
680 680
681 681 def nogc(func):
682 682 """disable garbage collector
683 683
684 684 Python's garbage collector triggers a GC each time a certain number of
685 685 container objects (the number being defined by gc.get_threshold()) are
686 686 allocated even when marked not to be tracked by the collector. Tracking has
687 687 no effect on when GCs are triggered, only on what objects the GC looks
688 688 into. As a workaround, disable GC while building complex (huge)
689 689 containers.
690 690
691 691 This garbage collector issue have been fixed in 2.7.
692 692 """
693 693 def wrapper(*args, **kwargs):
694 694 gcenabled = gc.isenabled()
695 695 gc.disable()
696 696 try:
697 697 return func(*args, **kwargs)
698 698 finally:
699 699 if gcenabled:
700 700 gc.enable()
701 701 return wrapper
702 702
703 703 def pathto(root, n1, n2):
704 704 '''return the relative path from one place to another.
705 705 root should use os.sep to separate directories
706 706 n1 should use os.sep to separate directories
707 707 n2 should use "/" to separate directories
708 708 returns an os.sep-separated path.
709 709
710 710 If n1 is a relative path, it's assumed it's
711 711 relative to root.
712 712 n2 should always be relative to root.
713 713 '''
714 714 if not n1:
715 715 return localpath(n2)
716 716 if os.path.isabs(n1):
717 717 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
718 718 return os.path.join(root, localpath(n2))
719 719 n2 = '/'.join((pconvert(root), n2))
720 720 a, b = splitpath(n1), n2.split('/')
721 721 a.reverse()
722 722 b.reverse()
723 723 while a and b and a[-1] == b[-1]:
724 724 a.pop()
725 725 b.pop()
726 726 b.reverse()
727 727 return os.sep.join((['..'] * len(a)) + b) or '.'
728 728
729 729 def mainfrozen():
730 730 """return True if we are a frozen executable.
731 731
732 732 The code supports py2exe (most common, Windows only) and tools/freeze
733 733 (portable, not much used).
734 734 """
735 735 return (safehasattr(sys, "frozen") or # new py2exe
736 736 safehasattr(sys, "importers") or # old py2exe
737 737 imp.is_frozen("__main__")) # tools/freeze
738 738
739 739 # the location of data files matching the source code
740 740 if mainfrozen():
741 741 # executable version (py2exe) doesn't support __file__
742 742 datapath = os.path.dirname(sys.executable)
743 743 else:
744 744 datapath = os.path.dirname(__file__)
745 745
746 746 i18n.setdatapath(datapath)
747 747
748 748 _hgexecutable = None
749 749
750 750 def hgexecutable():
751 751 """return location of the 'hg' executable.
752 752
753 753 Defaults to $HG or 'hg' in the search path.
754 754 """
755 755 if _hgexecutable is None:
756 756 hg = os.environ.get('HG')
757 757 mainmod = sys.modules['__main__']
758 758 if hg:
759 759 _sethgexecutable(hg)
760 760 elif mainfrozen():
761 761 _sethgexecutable(sys.executable)
762 762 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
763 763 _sethgexecutable(mainmod.__file__)
764 764 else:
765 765 exe = findexe('hg') or os.path.basename(sys.argv[0])
766 766 _sethgexecutable(exe)
767 767 return _hgexecutable
768 768
769 769 def _sethgexecutable(path):
770 770 """set location of the 'hg' executable"""
771 771 global _hgexecutable
772 772 _hgexecutable = path
773 773
774 774 def _isstdout(f):
775 775 fileno = getattr(f, 'fileno', None)
776 776 return fileno and fileno() == sys.__stdout__.fileno()
777 777
778 778 def system(cmd, environ=None, cwd=None, onerr=None, errprefix=None, out=None):
779 779 '''enhanced shell command execution.
780 780 run with environment maybe modified, maybe in different dir.
781 781
782 782 if command fails and onerr is None, return status, else raise onerr
783 783 object as exception.
784 784
785 785 if out is specified, it is assumed to be a file-like object that has a
786 786 write() method. stdout and stderr will be redirected to out.'''
787 787 if environ is None:
788 788 environ = {}
789 789 try:
790 790 sys.stdout.flush()
791 791 except Exception:
792 792 pass
793 793 def py2shell(val):
794 794 'convert python object into string that is useful to shell'
795 795 if val is None or val is False:
796 796 return '0'
797 797 if val is True:
798 798 return '1'
799 799 return str(val)
800 800 origcmd = cmd
801 801 cmd = quotecommand(cmd)
802 802 if sys.platform == 'plan9' and (sys.version_info[0] == 2
803 803 and sys.version_info[1] < 7):
804 804 # subprocess kludge to work around issues in half-baked Python
805 805 # ports, notably bichued/python:
806 806 if not cwd is None:
807 807 os.chdir(cwd)
808 808 rc = os.system(cmd)
809 809 else:
810 810 env = dict(os.environ)
811 811 env.update((k, py2shell(v)) for k, v in environ.iteritems())
812 812 env['HG'] = hgexecutable()
813 813 if out is None or _isstdout(out):
814 814 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
815 815 env=env, cwd=cwd)
816 816 else:
817 817 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
818 818 env=env, cwd=cwd, stdout=subprocess.PIPE,
819 819 stderr=subprocess.STDOUT)
820 820 while True:
821 821 line = proc.stdout.readline()
822 822 if not line:
823 823 break
824 824 out.write(line)
825 825 proc.wait()
826 826 rc = proc.returncode
827 827 if sys.platform == 'OpenVMS' and rc & 1:
828 828 rc = 0
829 829 if rc and onerr:
830 830 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
831 831 explainexit(rc)[0])
832 832 if errprefix:
833 833 errmsg = '%s: %s' % (errprefix, errmsg)
834 834 raise onerr(errmsg)
835 835 return rc
836 836
837 837 def checksignature(func):
838 838 '''wrap a function with code to check for calling errors'''
839 839 def check(*args, **kwargs):
840 840 try:
841 841 return func(*args, **kwargs)
842 842 except TypeError:
843 843 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
844 844 raise error.SignatureError
845 845 raise
846 846
847 847 return check
848 848
849 849 def copyfile(src, dest, hardlink=False):
850 850 "copy a file, preserving mode and atime/mtime"
851 851 if os.path.lexists(dest):
852 852 unlink(dest)
853 853 # hardlinks are problematic on CIFS, quietly ignore this flag
854 854 # until we find a way to work around it cleanly (issue4546)
855 855 if False and hardlink:
856 856 try:
857 857 oslink(src, dest)
858 858 return
859 859 except (IOError, OSError):
860 860 pass # fall back to normal copy
861 861 if os.path.islink(src):
862 862 os.symlink(os.readlink(src), dest)
863 863 else:
864 864 try:
865 865 shutil.copyfile(src, dest)
866 866 shutil.copymode(src, dest)
867 867 except shutil.Error as inst:
868 868 raise Abort(str(inst))
869 869
870 870 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
871 871 """Copy a directory tree using hardlinks if possible."""
872 872 num = 0
873 873
874 874 if hardlink is None:
875 875 hardlink = (os.stat(src).st_dev ==
876 876 os.stat(os.path.dirname(dst)).st_dev)
877 877 if hardlink:
878 878 topic = _('linking')
879 879 else:
880 880 topic = _('copying')
881 881
882 882 if os.path.isdir(src):
883 883 os.mkdir(dst)
884 884 for name, kind in osutil.listdir(src):
885 885 srcname = os.path.join(src, name)
886 886 dstname = os.path.join(dst, name)
887 887 def nprog(t, pos):
888 888 if pos is not None:
889 889 return progress(t, pos + num)
890 890 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
891 891 num += n
892 892 else:
893 893 if hardlink:
894 894 try:
895 895 oslink(src, dst)
896 896 except (IOError, OSError):
897 897 hardlink = False
898 898 shutil.copy(src, dst)
899 899 else:
900 900 shutil.copy(src, dst)
901 901 num += 1
902 902 progress(topic, num)
903 903 progress(topic, None)
904 904
905 905 return hardlink, num
906 906
907 907 _winreservednames = '''con prn aux nul
908 908 com1 com2 com3 com4 com5 com6 com7 com8 com9
909 909 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
910 910 _winreservedchars = ':*?"<>|'
911 911 def checkwinfilename(path):
912 912 r'''Check that the base-relative path is a valid filename on Windows.
913 913 Returns None if the path is ok, or a UI string describing the problem.
914 914
915 915 >>> checkwinfilename("just/a/normal/path")
916 916 >>> checkwinfilename("foo/bar/con.xml")
917 917 "filename contains 'con', which is reserved on Windows"
918 918 >>> checkwinfilename("foo/con.xml/bar")
919 919 "filename contains 'con', which is reserved on Windows"
920 920 >>> checkwinfilename("foo/bar/xml.con")
921 921 >>> checkwinfilename("foo/bar/AUX/bla.txt")
922 922 "filename contains 'AUX', which is reserved on Windows"
923 923 >>> checkwinfilename("foo/bar/bla:.txt")
924 924 "filename contains ':', which is reserved on Windows"
925 925 >>> checkwinfilename("foo/bar/b\07la.txt")
926 926 "filename contains '\\x07', which is invalid on Windows"
927 927 >>> checkwinfilename("foo/bar/bla ")
928 928 "filename ends with ' ', which is not allowed on Windows"
929 929 >>> checkwinfilename("../bar")
930 930 >>> checkwinfilename("foo\\")
931 931 "filename ends with '\\', which is invalid on Windows"
932 932 >>> checkwinfilename("foo\\/bar")
933 933 "directory name ends with '\\', which is invalid on Windows"
934 934 '''
935 935 if path.endswith('\\'):
936 936 return _("filename ends with '\\', which is invalid on Windows")
937 937 if '\\/' in path:
938 938 return _("directory name ends with '\\', which is invalid on Windows")
939 939 for n in path.replace('\\', '/').split('/'):
940 940 if not n:
941 941 continue
942 942 for c in n:
943 943 if c in _winreservedchars:
944 944 return _("filename contains '%s', which is reserved "
945 945 "on Windows") % c
946 946 if ord(c) <= 31:
947 947 return _("filename contains %r, which is invalid "
948 948 "on Windows") % c
949 949 base = n.split('.')[0]
950 950 if base and base.lower() in _winreservednames:
951 951 return _("filename contains '%s', which is reserved "
952 952 "on Windows") % base
953 953 t = n[-1]
954 954 if t in '. ' and n not in '..':
955 955 return _("filename ends with '%s', which is not allowed "
956 956 "on Windows") % t
957 957
958 958 if os.name == 'nt':
959 959 checkosfilename = checkwinfilename
960 960 else:
961 961 checkosfilename = platform.checkosfilename
962 962
963 963 def makelock(info, pathname):
964 964 try:
965 965 return os.symlink(info, pathname)
966 966 except OSError as why:
967 967 if why.errno == errno.EEXIST:
968 968 raise
969 969 except AttributeError: # no symlink in os
970 970 pass
971 971
972 972 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
973 973 os.write(ld, info)
974 974 os.close(ld)
975 975
976 976 def readlock(pathname):
977 977 try:
978 978 return os.readlink(pathname)
979 979 except OSError as why:
980 980 if why.errno not in (errno.EINVAL, errno.ENOSYS):
981 981 raise
982 982 except AttributeError: # no symlink in os
983 983 pass
984 984 fp = posixfile(pathname)
985 985 r = fp.read()
986 986 fp.close()
987 987 return r
988 988
989 989 def fstat(fp):
990 990 '''stat file object that may not have fileno method.'''
991 991 try:
992 992 return os.fstat(fp.fileno())
993 993 except AttributeError:
994 994 return os.stat(fp.name)
995 995
996 996 # File system features
997 997
998 998 def checkcase(path):
999 999 """
1000 1000 Return true if the given path is on a case-sensitive filesystem
1001 1001
1002 1002 Requires a path (like /foo/.hg) ending with a foldable final
1003 1003 directory component.
1004 1004 """
1005 1005 s1 = os.lstat(path)
1006 1006 d, b = os.path.split(path)
1007 1007 b2 = b.upper()
1008 1008 if b == b2:
1009 1009 b2 = b.lower()
1010 1010 if b == b2:
1011 1011 return True # no evidence against case sensitivity
1012 1012 p2 = os.path.join(d, b2)
1013 1013 try:
1014 1014 s2 = os.lstat(p2)
1015 1015 if s2 == s1:
1016 1016 return False
1017 1017 return True
1018 1018 except OSError:
1019 1019 return True
1020 1020
1021 1021 try:
1022 1022 import re2
1023 1023 _re2 = None
1024 1024 except ImportError:
1025 1025 _re2 = False
1026 1026
1027 1027 class _re(object):
1028 1028 def _checkre2(self):
1029 1029 global _re2
1030 1030 try:
1031 1031 # check if match works, see issue3964
1032 1032 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
1033 1033 except ImportError:
1034 1034 _re2 = False
1035 1035
1036 1036 def compile(self, pat, flags=0):
1037 1037 '''Compile a regular expression, using re2 if possible
1038 1038
1039 1039 For best performance, use only re2-compatible regexp features. The
1040 1040 only flags from the re module that are re2-compatible are
1041 1041 IGNORECASE and MULTILINE.'''
1042 1042 if _re2 is None:
1043 1043 self._checkre2()
1044 1044 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
1045 1045 if flags & remod.IGNORECASE:
1046 1046 pat = '(?i)' + pat
1047 1047 if flags & remod.MULTILINE:
1048 1048 pat = '(?m)' + pat
1049 1049 try:
1050 1050 return re2.compile(pat)
1051 1051 except re2.error:
1052 1052 pass
1053 1053 return remod.compile(pat, flags)
1054 1054
1055 1055 @propertycache
1056 1056 def escape(self):
1057 1057 '''Return the version of escape corresponding to self.compile.
1058 1058
1059 1059 This is imperfect because whether re2 or re is used for a particular
1060 1060 function depends on the flags, etc, but it's the best we can do.
1061 1061 '''
1062 1062 global _re2
1063 1063 if _re2 is None:
1064 1064 self._checkre2()
1065 1065 if _re2:
1066 1066 return re2.escape
1067 1067 else:
1068 1068 return remod.escape
1069 1069
1070 1070 re = _re()
1071 1071
1072 1072 _fspathcache = {}
1073 1073 def fspath(name, root):
1074 1074 '''Get name in the case stored in the filesystem
1075 1075
1076 1076 The name should be relative to root, and be normcase-ed for efficiency.
1077 1077
1078 1078 Note that this function is unnecessary, and should not be
1079 1079 called, for case-sensitive filesystems (simply because it's expensive).
1080 1080
1081 1081 The root should be normcase-ed, too.
1082 1082 '''
1083 1083 def _makefspathcacheentry(dir):
1084 1084 return dict((normcase(n), n) for n in os.listdir(dir))
1085 1085
1086 1086 seps = os.sep
1087 1087 if os.altsep:
1088 1088 seps = seps + os.altsep
1089 1089 # Protect backslashes. This gets silly very quickly.
1090 1090 seps.replace('\\','\\\\')
1091 1091 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
1092 1092 dir = os.path.normpath(root)
1093 1093 result = []
1094 1094 for part, sep in pattern.findall(name):
1095 1095 if sep:
1096 1096 result.append(sep)
1097 1097 continue
1098 1098
1099 1099 if dir not in _fspathcache:
1100 1100 _fspathcache[dir] = _makefspathcacheentry(dir)
1101 1101 contents = _fspathcache[dir]
1102 1102
1103 1103 found = contents.get(part)
1104 1104 if not found:
1105 1105 # retry "once per directory" per "dirstate.walk" which
1106 1106 # may take place for each patches of "hg qpush", for example
1107 1107 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
1108 1108 found = contents.get(part)
1109 1109
1110 1110 result.append(found or part)
1111 1111 dir = os.path.join(dir, part)
1112 1112
1113 1113 return ''.join(result)
1114 1114
1115 1115 def checknlink(testfile):
1116 1116 '''check whether hardlink count reporting works properly'''
1117 1117
1118 1118 # testfile may be open, so we need a separate file for checking to
1119 1119 # work around issue2543 (or testfile may get lost on Samba shares)
1120 1120 f1 = testfile + ".hgtmp1"
1121 1121 if os.path.lexists(f1):
1122 1122 return False
1123 1123 try:
1124 1124 posixfile(f1, 'w').close()
1125 1125 except IOError:
1126 1126 return False
1127 1127
1128 1128 f2 = testfile + ".hgtmp2"
1129 1129 fd = None
1130 1130 try:
1131 1131 oslink(f1, f2)
1132 1132 # nlinks() may behave differently for files on Windows shares if
1133 1133 # the file is open.
1134 1134 fd = posixfile(f2)
1135 1135 return nlinks(f2) > 1
1136 1136 except OSError:
1137 1137 return False
1138 1138 finally:
1139 1139 if fd is not None:
1140 1140 fd.close()
1141 1141 for f in (f1, f2):
1142 1142 try:
1143 1143 os.unlink(f)
1144 1144 except OSError:
1145 1145 pass
1146 1146
1147 1147 def endswithsep(path):
1148 1148 '''Check path ends with os.sep or os.altsep.'''
1149 1149 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1150 1150
1151 1151 def splitpath(path):
1152 1152 '''Split path by os.sep.
1153 1153 Note that this function does not use os.altsep because this is
1154 1154 an alternative of simple "xxx.split(os.sep)".
1155 1155 It is recommended to use os.path.normpath() before using this
1156 1156 function if need.'''
1157 1157 return path.split(os.sep)
1158 1158
1159 1159 def gui():
1160 1160 '''Are we running in a GUI?'''
1161 1161 if sys.platform == 'darwin':
1162 1162 if 'SSH_CONNECTION' in os.environ:
1163 1163 # handle SSH access to a box where the user is logged in
1164 1164 return False
1165 1165 elif getattr(osutil, 'isgui', None):
1166 1166 # check if a CoreGraphics session is available
1167 1167 return osutil.isgui()
1168 1168 else:
1169 1169 # pure build; use a safe default
1170 1170 return True
1171 1171 else:
1172 1172 return os.name == "nt" or os.environ.get("DISPLAY")
1173 1173
1174 1174 def mktempcopy(name, emptyok=False, createmode=None):
1175 1175 """Create a temporary file with the same contents from name
1176 1176
1177 1177 The permission bits are copied from the original file.
1178 1178
1179 1179 If the temporary file is going to be truncated immediately, you
1180 1180 can use emptyok=True as an optimization.
1181 1181
1182 1182 Returns the name of the temporary file.
1183 1183 """
1184 1184 d, fn = os.path.split(name)
1185 1185 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1186 1186 os.close(fd)
1187 1187 # Temporary files are created with mode 0600, which is usually not
1188 1188 # what we want. If the original file already exists, just copy
1189 1189 # its mode. Otherwise, manually obey umask.
1190 1190 copymode(name, temp, createmode)
1191 1191 if emptyok:
1192 1192 return temp
1193 1193 try:
1194 1194 try:
1195 1195 ifp = posixfile(name, "rb")
1196 1196 except IOError as inst:
1197 1197 if inst.errno == errno.ENOENT:
1198 1198 return temp
1199 1199 if not getattr(inst, 'filename', None):
1200 1200 inst.filename = name
1201 1201 raise
1202 1202 ofp = posixfile(temp, "wb")
1203 1203 for chunk in filechunkiter(ifp):
1204 1204 ofp.write(chunk)
1205 1205 ifp.close()
1206 1206 ofp.close()
1207 1207 except: # re-raises
1208 1208 try: os.unlink(temp)
1209 1209 except OSError: pass
1210 1210 raise
1211 1211 return temp
1212 1212
1213 1213 class atomictempfile(object):
1214 1214 '''writable file object that atomically updates a file
1215 1215
1216 1216 All writes will go to a temporary copy of the original file. Call
1217 1217 close() when you are done writing, and atomictempfile will rename
1218 1218 the temporary copy to the original name, making the changes
1219 1219 visible. If the object is destroyed without being closed, all your
1220 1220 writes are discarded.
1221 1221 '''
1222 1222 def __init__(self, name, mode='w+b', createmode=None):
1223 1223 self.__name = name # permanent name
1224 1224 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1225 1225 createmode=createmode)
1226 1226 self._fp = posixfile(self._tempname, mode)
1227 1227
1228 1228 # delegated methods
1229 1229 self.write = self._fp.write
1230 1230 self.seek = self._fp.seek
1231 1231 self.tell = self._fp.tell
1232 1232 self.fileno = self._fp.fileno
1233 1233
1234 1234 def close(self):
1235 1235 if not self._fp.closed:
1236 1236 self._fp.close()
1237 1237 rename(self._tempname, localpath(self.__name))
1238 1238
1239 1239 def discard(self):
1240 1240 if not self._fp.closed:
1241 1241 try:
1242 1242 os.unlink(self._tempname)
1243 1243 except OSError:
1244 1244 pass
1245 1245 self._fp.close()
1246 1246
1247 1247 def __del__(self):
1248 1248 if safehasattr(self, '_fp'): # constructor actually did something
1249 1249 self.discard()
1250 1250
1251 1251 def makedirs(name, mode=None, notindexed=False):
1252 1252 """recursive directory creation with parent mode inheritance"""
1253 1253 try:
1254 1254 makedir(name, notindexed)
1255 1255 except OSError as err:
1256 1256 if err.errno == errno.EEXIST:
1257 1257 return
1258 1258 if err.errno != errno.ENOENT or not name:
1259 1259 raise
1260 1260 parent = os.path.dirname(os.path.abspath(name))
1261 1261 if parent == name:
1262 1262 raise
1263 1263 makedirs(parent, mode, notindexed)
1264 1264 makedir(name, notindexed)
1265 1265 if mode is not None:
1266 1266 os.chmod(name, mode)
1267 1267
1268 1268 def ensuredirs(name, mode=None, notindexed=False):
1269 1269 """race-safe recursive directory creation
1270 1270
1271 1271 Newly created directories are marked as "not to be indexed by
1272 1272 the content indexing service", if ``notindexed`` is specified
1273 1273 for "write" mode access.
1274 1274 """
1275 1275 if os.path.isdir(name):
1276 1276 return
1277 1277 parent = os.path.dirname(os.path.abspath(name))
1278 1278 if parent != name:
1279 1279 ensuredirs(parent, mode, notindexed)
1280 1280 try:
1281 1281 makedir(name, notindexed)
1282 1282 except OSError as err:
1283 1283 if err.errno == errno.EEXIST and os.path.isdir(name):
1284 1284 # someone else seems to have won a directory creation race
1285 1285 return
1286 1286 raise
1287 1287 if mode is not None:
1288 1288 os.chmod(name, mode)
1289 1289
1290 1290 def readfile(path):
1291 1291 fp = open(path, 'rb')
1292 1292 try:
1293 1293 return fp.read()
1294 1294 finally:
1295 1295 fp.close()
1296 1296
1297 1297 def writefile(path, text):
1298 1298 fp = open(path, 'wb')
1299 1299 try:
1300 1300 fp.write(text)
1301 1301 finally:
1302 1302 fp.close()
1303 1303
1304 1304 def appendfile(path, text):
1305 1305 fp = open(path, 'ab')
1306 1306 try:
1307 1307 fp.write(text)
1308 1308 finally:
1309 1309 fp.close()
1310 1310
1311 1311 class chunkbuffer(object):
1312 1312 """Allow arbitrary sized chunks of data to be efficiently read from an
1313 1313 iterator over chunks of arbitrary size."""
1314 1314
1315 1315 def __init__(self, in_iter):
1316 1316 """in_iter is the iterator that's iterating over the input chunks.
1317 1317 targetsize is how big a buffer to try to maintain."""
1318 1318 def splitbig(chunks):
1319 1319 for chunk in chunks:
1320 1320 if len(chunk) > 2**20:
1321 1321 pos = 0
1322 1322 while pos < len(chunk):
1323 1323 end = pos + 2 ** 18
1324 1324 yield chunk[pos:end]
1325 1325 pos = end
1326 1326 else:
1327 1327 yield chunk
1328 1328 self.iter = splitbig(in_iter)
1329 1329 self._queue = collections.deque()
1330 1330 self._chunkoffset = 0
1331 1331
1332 1332 def read(self, l=None):
1333 1333 """Read L bytes of data from the iterator of chunks of data.
1334 1334 Returns less than L bytes if the iterator runs dry.
1335 1335
1336 1336 If size parameter is omitted, read everything"""
1337 1337 if l is None:
1338 1338 return ''.join(self.iter)
1339 1339
1340 1340 left = l
1341 1341 buf = []
1342 1342 queue = self._queue
1343 1343 while left > 0:
1344 1344 # refill the queue
1345 1345 if not queue:
1346 1346 target = 2**18
1347 1347 for chunk in self.iter:
1348 1348 queue.append(chunk)
1349 1349 target -= len(chunk)
1350 1350 if target <= 0:
1351 1351 break
1352 1352 if not queue:
1353 1353 break
1354 1354
1355 1355 # The easy way to do this would be to queue.popleft(), modify the
1356 1356 # chunk (if necessary), then queue.appendleft(). However, for cases
1357 1357 # where we read partial chunk content, this incurs 2 dequeue
1358 1358 # mutations and creates a new str for the remaining chunk in the
1359 1359 # queue. Our code below avoids this overhead.
1360 1360
1361 1361 chunk = queue[0]
1362 1362 chunkl = len(chunk)
1363 1363 offset = self._chunkoffset
1364 1364
1365 1365 # Use full chunk.
1366 1366 if offset == 0 and left >= chunkl:
1367 1367 left -= chunkl
1368 1368 queue.popleft()
1369 1369 buf.append(chunk)
1370 1370 # self._chunkoffset remains at 0.
1371 1371 continue
1372 1372
1373 1373 chunkremaining = chunkl - offset
1374 1374
1375 1375 # Use all of unconsumed part of chunk.
1376 1376 if left >= chunkremaining:
1377 1377 left -= chunkremaining
1378 1378 queue.popleft()
1379 1379 # offset == 0 is enabled by block above, so this won't merely
1380 1380 # copy via ``chunk[0:]``.
1381 1381 buf.append(chunk[offset:])
1382 1382 self._chunkoffset = 0
1383 1383
1384 1384 # Partial chunk needed.
1385 1385 else:
1386 1386 buf.append(chunk[offset:offset + left])
1387 1387 self._chunkoffset += left
1388 1388 left -= chunkremaining
1389 1389
1390 1390 return ''.join(buf)
1391 1391
1392 1392 def filechunkiter(f, size=65536, limit=None):
1393 1393 """Create a generator that produces the data in the file size
1394 1394 (default 65536) bytes at a time, up to optional limit (default is
1395 1395 to read all data). Chunks may be less than size bytes if the
1396 1396 chunk is the last chunk in the file, or the file is a socket or
1397 1397 some other type of file that sometimes reads less data than is
1398 1398 requested."""
1399 1399 assert size >= 0
1400 1400 assert limit is None or limit >= 0
1401 1401 while True:
1402 1402 if limit is None:
1403 1403 nbytes = size
1404 1404 else:
1405 1405 nbytes = min(limit, size)
1406 1406 s = nbytes and f.read(nbytes)
1407 1407 if not s:
1408 1408 break
1409 1409 if limit:
1410 1410 limit -= len(s)
1411 1411 yield s
1412 1412
1413 1413 def makedate(timestamp=None):
1414 1414 '''Return a unix timestamp (or the current time) as a (unixtime,
1415 1415 offset) tuple based off the local timezone.'''
1416 1416 if timestamp is None:
1417 1417 timestamp = time.time()
1418 1418 if timestamp < 0:
1419 1419 hint = _("check your clock")
1420 1420 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1421 1421 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1422 1422 datetime.datetime.fromtimestamp(timestamp))
1423 1423 tz = delta.days * 86400 + delta.seconds
1424 1424 return timestamp, tz
1425 1425
1426 1426 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1427 1427 """represent a (unixtime, offset) tuple as a localized time.
1428 1428 unixtime is seconds since the epoch, and offset is the time zone's
1429 1429 number of seconds away from UTC. if timezone is false, do not
1430 1430 append time zone to string."""
1431 1431 t, tz = date or makedate()
1432 1432 if t < 0:
1433 1433 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1434 1434 tz = 0
1435 1435 if "%1" in format or "%2" in format or "%z" in format:
1436 1436 sign = (tz > 0) and "-" or "+"
1437 1437 minutes = abs(tz) // 60
1438 1438 q, r = divmod(minutes, 60)
1439 1439 format = format.replace("%z", "%1%2")
1440 1440 format = format.replace("%1", "%c%02d" % (sign, q))
1441 1441 format = format.replace("%2", "%02d" % r)
1442 1442 try:
1443 1443 t = time.gmtime(float(t) - tz)
1444 1444 except ValueError:
1445 1445 # time was out of range
1446 1446 t = time.gmtime(sys.maxint)
1447 1447 s = time.strftime(format, t)
1448 1448 return s
1449 1449
1450 1450 def shortdate(date=None):
1451 1451 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1452 1452 return datestr(date, format='%Y-%m-%d')
1453 1453
1454 1454 def parsetimezone(tz):
1455 1455 """parse a timezone string and return an offset integer"""
1456 1456 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1457 1457 sign = (tz[0] == "+") and 1 or -1
1458 1458 hours = int(tz[1:3])
1459 1459 minutes = int(tz[3:5])
1460 1460 return -sign * (hours * 60 + minutes) * 60
1461 1461 if tz == "GMT" or tz == "UTC":
1462 1462 return 0
1463 1463 return None
1464 1464
1465 1465 def strdate(string, format, defaults=[]):
1466 1466 """parse a localized time string and return a (unixtime, offset) tuple.
1467 1467 if the string cannot be parsed, ValueError is raised."""
1468 1468 # NOTE: unixtime = localunixtime + offset
1469 1469 offset, date = parsetimezone(string.split()[-1]), string
1470 1470 if offset is not None:
1471 1471 date = " ".join(string.split()[:-1])
1472 1472
1473 1473 # add missing elements from defaults
1474 1474 usenow = False # default to using biased defaults
1475 1475 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1476 1476 found = [True for p in part if ("%"+p) in format]
1477 1477 if not found:
1478 1478 date += "@" + defaults[part][usenow]
1479 1479 format += "@%" + part[0]
1480 1480 else:
1481 1481 # We've found a specific time element, less specific time
1482 1482 # elements are relative to today
1483 1483 usenow = True
1484 1484
1485 1485 timetuple = time.strptime(date, format)
1486 1486 localunixtime = int(calendar.timegm(timetuple))
1487 1487 if offset is None:
1488 1488 # local timezone
1489 1489 unixtime = int(time.mktime(timetuple))
1490 1490 offset = unixtime - localunixtime
1491 1491 else:
1492 1492 unixtime = localunixtime + offset
1493 1493 return unixtime, offset
1494 1494
1495 1495 def parsedate(date, formats=None, bias=None):
1496 1496 """parse a localized date/time and return a (unixtime, offset) tuple.
1497 1497
1498 1498 The date may be a "unixtime offset" string or in one of the specified
1499 1499 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1500 1500
1501 1501 >>> parsedate(' today ') == parsedate(\
1502 1502 datetime.date.today().strftime('%b %d'))
1503 1503 True
1504 1504 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1505 1505 datetime.timedelta(days=1)\
1506 1506 ).strftime('%b %d'))
1507 1507 True
1508 1508 >>> now, tz = makedate()
1509 1509 >>> strnow, strtz = parsedate('now')
1510 1510 >>> (strnow - now) < 1
1511 1511 True
1512 1512 >>> tz == strtz
1513 1513 True
1514 1514 """
1515 1515 if bias is None:
1516 1516 bias = {}
1517 1517 if not date:
1518 1518 return 0, 0
1519 1519 if isinstance(date, tuple) and len(date) == 2:
1520 1520 return date
1521 1521 if not formats:
1522 1522 formats = defaultdateformats
1523 1523 date = date.strip()
1524 1524
1525 1525 if date == 'now' or date == _('now'):
1526 1526 return makedate()
1527 1527 if date == 'today' or date == _('today'):
1528 1528 date = datetime.date.today().strftime('%b %d')
1529 1529 elif date == 'yesterday' or date == _('yesterday'):
1530 1530 date = (datetime.date.today() -
1531 1531 datetime.timedelta(days=1)).strftime('%b %d')
1532 1532
1533 1533 try:
1534 1534 when, offset = map(int, date.split(' '))
1535 1535 except ValueError:
1536 1536 # fill out defaults
1537 1537 now = makedate()
1538 1538 defaults = {}
1539 1539 for part in ("d", "mb", "yY", "HI", "M", "S"):
1540 1540 # this piece is for rounding the specific end of unknowns
1541 1541 b = bias.get(part)
1542 1542 if b is None:
1543 1543 if part[0] in "HMS":
1544 1544 b = "00"
1545 1545 else:
1546 1546 b = "0"
1547 1547
1548 1548 # this piece is for matching the generic end to today's date
1549 1549 n = datestr(now, "%" + part[0])
1550 1550
1551 1551 defaults[part] = (b, n)
1552 1552
1553 1553 for format in formats:
1554 1554 try:
1555 1555 when, offset = strdate(date, format, defaults)
1556 1556 except (ValueError, OverflowError):
1557 1557 pass
1558 1558 else:
1559 1559 break
1560 1560 else:
1561 1561 raise Abort(_('invalid date: %r') % date)
1562 1562 # validate explicit (probably user-specified) date and
1563 1563 # time zone offset. values must fit in signed 32 bits for
1564 1564 # current 32-bit linux runtimes. timezones go from UTC-12
1565 1565 # to UTC+14
1566 1566 if abs(when) > 0x7fffffff:
1567 1567 raise Abort(_('date exceeds 32 bits: %d') % when)
1568 1568 if when < 0:
1569 1569 raise Abort(_('negative date value: %d') % when)
1570 1570 if offset < -50400 or offset > 43200:
1571 1571 raise Abort(_('impossible time zone offset: %d') % offset)
1572 1572 return when, offset
1573 1573
1574 1574 def matchdate(date):
1575 1575 """Return a function that matches a given date match specifier
1576 1576
1577 1577 Formats include:
1578 1578
1579 1579 '{date}' match a given date to the accuracy provided
1580 1580
1581 1581 '<{date}' on or before a given date
1582 1582
1583 1583 '>{date}' on or after a given date
1584 1584
1585 1585 >>> p1 = parsedate("10:29:59")
1586 1586 >>> p2 = parsedate("10:30:00")
1587 1587 >>> p3 = parsedate("10:30:59")
1588 1588 >>> p4 = parsedate("10:31:00")
1589 1589 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1590 1590 >>> f = matchdate("10:30")
1591 1591 >>> f(p1[0])
1592 1592 False
1593 1593 >>> f(p2[0])
1594 1594 True
1595 1595 >>> f(p3[0])
1596 1596 True
1597 1597 >>> f(p4[0])
1598 1598 False
1599 1599 >>> f(p5[0])
1600 1600 False
1601 1601 """
1602 1602
1603 1603 def lower(date):
1604 1604 d = {'mb': "1", 'd': "1"}
1605 1605 return parsedate(date, extendeddateformats, d)[0]
1606 1606
1607 1607 def upper(date):
1608 1608 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1609 1609 for days in ("31", "30", "29"):
1610 1610 try:
1611 1611 d["d"] = days
1612 1612 return parsedate(date, extendeddateformats, d)[0]
1613 1613 except Abort:
1614 1614 pass
1615 1615 d["d"] = "28"
1616 1616 return parsedate(date, extendeddateformats, d)[0]
1617 1617
1618 1618 date = date.strip()
1619 1619
1620 1620 if not date:
1621 1621 raise Abort(_("dates cannot consist entirely of whitespace"))
1622 1622 elif date[0] == "<":
1623 1623 if not date[1:]:
1624 1624 raise Abort(_("invalid day spec, use '<DATE'"))
1625 1625 when = upper(date[1:])
1626 1626 return lambda x: x <= when
1627 1627 elif date[0] == ">":
1628 1628 if not date[1:]:
1629 1629 raise Abort(_("invalid day spec, use '>DATE'"))
1630 1630 when = lower(date[1:])
1631 1631 return lambda x: x >= when
1632 1632 elif date[0] == "-":
1633 1633 try:
1634 1634 days = int(date[1:])
1635 1635 except ValueError:
1636 1636 raise Abort(_("invalid day spec: %s") % date[1:])
1637 1637 if days < 0:
1638 1638 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1639 1639 % date[1:])
1640 1640 when = makedate()[0] - days * 3600 * 24
1641 1641 return lambda x: x >= when
1642 1642 elif " to " in date:
1643 1643 a, b = date.split(" to ")
1644 1644 start, stop = lower(a), upper(b)
1645 1645 return lambda x: x >= start and x <= stop
1646 1646 else:
1647 1647 start, stop = lower(date), upper(date)
1648 1648 return lambda x: x >= start and x <= stop
1649 1649
1650 1650 def stringmatcher(pattern):
1651 1651 """
1652 1652 accepts a string, possibly starting with 're:' or 'literal:' prefix.
1653 1653 returns the matcher name, pattern, and matcher function.
1654 1654 missing or unknown prefixes are treated as literal matches.
1655 1655
1656 1656 helper for tests:
1657 1657 >>> def test(pattern, *tests):
1658 1658 ... kind, pattern, matcher = stringmatcher(pattern)
1659 1659 ... return (kind, pattern, [bool(matcher(t)) for t in tests])
1660 1660
1661 1661 exact matching (no prefix):
1662 1662 >>> test('abcdefg', 'abc', 'def', 'abcdefg')
1663 1663 ('literal', 'abcdefg', [False, False, True])
1664 1664
1665 1665 regex matching ('re:' prefix)
1666 1666 >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
1667 1667 ('re', 'a.+b', [False, False, True])
1668 1668
1669 1669 force exact matches ('literal:' prefix)
1670 1670 >>> test('literal:re:foobar', 'foobar', 're:foobar')
1671 1671 ('literal', 're:foobar', [False, True])
1672 1672
1673 1673 unknown prefixes are ignored and treated as literals
1674 1674 >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
1675 1675 ('literal', 'foo:bar', [False, False, True])
1676 1676 """
1677 1677 if pattern.startswith('re:'):
1678 1678 pattern = pattern[3:]
1679 1679 try:
1680 1680 regex = remod.compile(pattern)
1681 1681 except remod.error as e:
1682 1682 raise error.ParseError(_('invalid regular expression: %s')
1683 1683 % e)
1684 1684 return 're', pattern, regex.search
1685 1685 elif pattern.startswith('literal:'):
1686 1686 pattern = pattern[8:]
1687 1687 return 'literal', pattern, pattern.__eq__
1688 1688
1689 1689 def shortuser(user):
1690 1690 """Return a short representation of a user name or email address."""
1691 1691 f = user.find('@')
1692 1692 if f >= 0:
1693 1693 user = user[:f]
1694 1694 f = user.find('<')
1695 1695 if f >= 0:
1696 1696 user = user[f + 1:]
1697 1697 f = user.find(' ')
1698 1698 if f >= 0:
1699 1699 user = user[:f]
1700 1700 f = user.find('.')
1701 1701 if f >= 0:
1702 1702 user = user[:f]
1703 1703 return user
1704 1704
1705 1705 def emailuser(user):
1706 1706 """Return the user portion of an email address."""
1707 1707 f = user.find('@')
1708 1708 if f >= 0:
1709 1709 user = user[:f]
1710 1710 f = user.find('<')
1711 1711 if f >= 0:
1712 1712 user = user[f + 1:]
1713 1713 return user
1714 1714
1715 1715 def email(author):
1716 1716 '''get email of author.'''
1717 1717 r = author.find('>')
1718 1718 if r == -1:
1719 1719 r = None
1720 1720 return author[author.find('<') + 1:r]
1721 1721
1722 1722 def ellipsis(text, maxlength=400):
1723 1723 """Trim string to at most maxlength (default: 400) columns in display."""
1724 1724 return encoding.trim(text, maxlength, ellipsis='...')
1725 1725
1726 1726 def unitcountfn(*unittable):
1727 1727 '''return a function that renders a readable count of some quantity'''
1728 1728
1729 1729 def go(count):
1730 1730 for multiplier, divisor, format in unittable:
1731 1731 if count >= divisor * multiplier:
1732 1732 return format % (count / float(divisor))
1733 1733 return unittable[-1][2] % count
1734 1734
1735 1735 return go
1736 1736
1737 1737 bytecount = unitcountfn(
1738 1738 (100, 1 << 30, _('%.0f GB')),
1739 1739 (10, 1 << 30, _('%.1f GB')),
1740 1740 (1, 1 << 30, _('%.2f GB')),
1741 1741 (100, 1 << 20, _('%.0f MB')),
1742 1742 (10, 1 << 20, _('%.1f MB')),
1743 1743 (1, 1 << 20, _('%.2f MB')),
1744 1744 (100, 1 << 10, _('%.0f KB')),
1745 1745 (10, 1 << 10, _('%.1f KB')),
1746 1746 (1, 1 << 10, _('%.2f KB')),
1747 1747 (1, 1, _('%.0f bytes')),
1748 1748 )
1749 1749
1750 1750 def uirepr(s):
1751 1751 # Avoid double backslash in Windows path repr()
1752 1752 return repr(s).replace('\\\\', '\\')
1753 1753
1754 1754 # delay import of textwrap
1755 1755 def MBTextWrapper(**kwargs):
1756 1756 class tw(textwrap.TextWrapper):
1757 1757 """
1758 1758 Extend TextWrapper for width-awareness.
1759 1759
1760 1760 Neither number of 'bytes' in any encoding nor 'characters' is
1761 1761 appropriate to calculate terminal columns for specified string.
1762 1762
1763 1763 Original TextWrapper implementation uses built-in 'len()' directly,
1764 1764 so overriding is needed to use width information of each characters.
1765 1765
1766 1766 In addition, characters classified into 'ambiguous' width are
1767 1767 treated as wide in East Asian area, but as narrow in other.
1768 1768
1769 1769 This requires use decision to determine width of such characters.
1770 1770 """
1771 1771 def _cutdown(self, ucstr, space_left):
1772 1772 l = 0
1773 1773 colwidth = encoding.ucolwidth
1774 1774 for i in xrange(len(ucstr)):
1775 1775 l += colwidth(ucstr[i])
1776 1776 if space_left < l:
1777 1777 return (ucstr[:i], ucstr[i:])
1778 1778 return ucstr, ''
1779 1779
1780 1780 # overriding of base class
1781 1781 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1782 1782 space_left = max(width - cur_len, 1)
1783 1783
1784 1784 if self.break_long_words:
1785 1785 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1786 1786 cur_line.append(cut)
1787 1787 reversed_chunks[-1] = res
1788 1788 elif not cur_line:
1789 1789 cur_line.append(reversed_chunks.pop())
1790 1790
1791 1791 # this overriding code is imported from TextWrapper of Python 2.6
1792 1792 # to calculate columns of string by 'encoding.ucolwidth()'
1793 1793 def _wrap_chunks(self, chunks):
1794 1794 colwidth = encoding.ucolwidth
1795 1795
1796 1796 lines = []
1797 1797 if self.width <= 0:
1798 1798 raise ValueError("invalid width %r (must be > 0)" % self.width)
1799 1799
1800 1800 # Arrange in reverse order so items can be efficiently popped
1801 1801 # from a stack of chucks.
1802 1802 chunks.reverse()
1803 1803
1804 1804 while chunks:
1805 1805
1806 1806 # Start the list of chunks that will make up the current line.
1807 1807 # cur_len is just the length of all the chunks in cur_line.
1808 1808 cur_line = []
1809 1809 cur_len = 0
1810 1810
1811 1811 # Figure out which static string will prefix this line.
1812 1812 if lines:
1813 1813 indent = self.subsequent_indent
1814 1814 else:
1815 1815 indent = self.initial_indent
1816 1816
1817 1817 # Maximum width for this line.
1818 1818 width = self.width - len(indent)
1819 1819
1820 1820 # First chunk on line is whitespace -- drop it, unless this
1821 1821 # is the very beginning of the text (i.e. no lines started yet).
1822 1822 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1823 1823 del chunks[-1]
1824 1824
1825 1825 while chunks:
1826 1826 l = colwidth(chunks[-1])
1827 1827
1828 1828 # Can at least squeeze this chunk onto the current line.
1829 1829 if cur_len + l <= width:
1830 1830 cur_line.append(chunks.pop())
1831 1831 cur_len += l
1832 1832
1833 1833 # Nope, this line is full.
1834 1834 else:
1835 1835 break
1836 1836
1837 1837 # The current line is full, and the next chunk is too big to
1838 1838 # fit on *any* line (not just this one).
1839 1839 if chunks and colwidth(chunks[-1]) > width:
1840 1840 self._handle_long_word(chunks, cur_line, cur_len, width)
1841 1841
1842 1842 # If the last chunk on this line is all whitespace, drop it.
1843 1843 if (self.drop_whitespace and
1844 1844 cur_line and cur_line[-1].strip() == ''):
1845 1845 del cur_line[-1]
1846 1846
1847 1847 # Convert current line back to a string and store it in list
1848 1848 # of all lines (return value).
1849 1849 if cur_line:
1850 1850 lines.append(indent + ''.join(cur_line))
1851 1851
1852 1852 return lines
1853 1853
1854 1854 global MBTextWrapper
1855 1855 MBTextWrapper = tw
1856 1856 return tw(**kwargs)
1857 1857
1858 1858 def wrap(line, width, initindent='', hangindent=''):
1859 1859 maxindent = max(len(hangindent), len(initindent))
1860 1860 if width <= maxindent:
1861 1861 # adjust for weird terminal size
1862 1862 width = max(78, maxindent + 1)
1863 1863 line = line.decode(encoding.encoding, encoding.encodingmode)
1864 1864 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1865 1865 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1866 1866 wrapper = MBTextWrapper(width=width,
1867 1867 initial_indent=initindent,
1868 1868 subsequent_indent=hangindent)
1869 1869 return wrapper.fill(line).encode(encoding.encoding)
1870 1870
1871 1871 def iterlines(iterator):
1872 1872 for chunk in iterator:
1873 1873 for line in chunk.splitlines():
1874 1874 yield line
1875 1875
1876 1876 def expandpath(path):
1877 1877 return os.path.expanduser(os.path.expandvars(path))
1878 1878
1879 1879 def hgcmd():
1880 1880 """Return the command used to execute current hg
1881 1881
1882 1882 This is different from hgexecutable() because on Windows we want
1883 1883 to avoid things opening new shell windows like batch files, so we
1884 1884 get either the python call or current executable.
1885 1885 """
1886 1886 if mainfrozen():
1887 1887 return [sys.executable]
1888 1888 return gethgcmd()
1889 1889
1890 1890 def rundetached(args, condfn):
1891 1891 """Execute the argument list in a detached process.
1892 1892
1893 1893 condfn is a callable which is called repeatedly and should return
1894 1894 True once the child process is known to have started successfully.
1895 1895 At this point, the child process PID is returned. If the child
1896 1896 process fails to start or finishes before condfn() evaluates to
1897 1897 True, return -1.
1898 1898 """
1899 1899 # Windows case is easier because the child process is either
1900 1900 # successfully starting and validating the condition or exiting
1901 1901 # on failure. We just poll on its PID. On Unix, if the child
1902 1902 # process fails to start, it will be left in a zombie state until
1903 1903 # the parent wait on it, which we cannot do since we expect a long
1904 1904 # running process on success. Instead we listen for SIGCHLD telling
1905 1905 # us our child process terminated.
1906 1906 terminated = set()
1907 1907 def handler(signum, frame):
1908 1908 terminated.add(os.wait())
1909 1909 prevhandler = None
1910 1910 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1911 1911 if SIGCHLD is not None:
1912 1912 prevhandler = signal.signal(SIGCHLD, handler)
1913 1913 try:
1914 1914 pid = spawndetached(args)
1915 1915 while not condfn():
1916 1916 if ((pid in terminated or not testpid(pid))
1917 1917 and not condfn()):
1918 1918 return -1
1919 1919 time.sleep(0.1)
1920 1920 return pid
1921 1921 finally:
1922 1922 if prevhandler is not None:
1923 1923 signal.signal(signal.SIGCHLD, prevhandler)
1924 1924
1925 1925 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1926 1926 """Return the result of interpolating items in the mapping into string s.
1927 1927
1928 1928 prefix is a single character string, or a two character string with
1929 1929 a backslash as the first character if the prefix needs to be escaped in
1930 1930 a regular expression.
1931 1931
1932 1932 fn is an optional function that will be applied to the replacement text
1933 1933 just before replacement.
1934 1934
1935 1935 escape_prefix is an optional flag that allows using doubled prefix for
1936 1936 its escaping.
1937 1937 """
1938 1938 fn = fn or (lambda s: s)
1939 1939 patterns = '|'.join(mapping.keys())
1940 1940 if escape_prefix:
1941 1941 patterns += '|' + prefix
1942 1942 if len(prefix) > 1:
1943 1943 prefix_char = prefix[1:]
1944 1944 else:
1945 1945 prefix_char = prefix
1946 1946 mapping[prefix_char] = prefix_char
1947 1947 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1948 1948 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1949 1949
1950 1950 def getport(port):
1951 1951 """Return the port for a given network service.
1952 1952
1953 1953 If port is an integer, it's returned as is. If it's a string, it's
1954 1954 looked up using socket.getservbyname(). If there's no matching
1955 1955 service, error.Abort is raised.
1956 1956 """
1957 1957 try:
1958 1958 return int(port)
1959 1959 except ValueError:
1960 1960 pass
1961 1961
1962 1962 try:
1963 1963 return socket.getservbyname(port)
1964 1964 except socket.error:
1965 1965 raise Abort(_("no port number associated with service '%s'") % port)
1966 1966
1967 1967 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1968 1968 '0': False, 'no': False, 'false': False, 'off': False,
1969 1969 'never': False}
1970 1970
1971 1971 def parsebool(s):
1972 1972 """Parse s into a boolean.
1973 1973
1974 1974 If s is not a valid boolean, returns None.
1975 1975 """
1976 1976 return _booleans.get(s.lower(), None)
1977 1977
1978 1978 _hexdig = '0123456789ABCDEFabcdef'
1979 1979 _hextochr = dict((a + b, chr(int(a + b, 16)))
1980 1980 for a in _hexdig for b in _hexdig)
1981 1981
1982 1982 def _urlunquote(s):
1983 1983 """Decode HTTP/HTML % encoding.
1984 1984
1985 1985 >>> _urlunquote('abc%20def')
1986 1986 'abc def'
1987 1987 """
1988 1988 res = s.split('%')
1989 1989 # fastpath
1990 1990 if len(res) == 1:
1991 1991 return s
1992 1992 s = res[0]
1993 1993 for item in res[1:]:
1994 1994 try:
1995 1995 s += _hextochr[item[:2]] + item[2:]
1996 1996 except KeyError:
1997 1997 s += '%' + item
1998 1998 except UnicodeDecodeError:
1999 1999 s += unichr(int(item[:2], 16)) + item[2:]
2000 2000 return s
2001 2001
2002 2002 class url(object):
2003 2003 r"""Reliable URL parser.
2004 2004
2005 2005 This parses URLs and provides attributes for the following
2006 2006 components:
2007 2007
2008 2008 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
2009 2009
2010 2010 Missing components are set to None. The only exception is
2011 2011 fragment, which is set to '' if present but empty.
2012 2012
2013 2013 If parsefragment is False, fragment is included in query. If
2014 2014 parsequery is False, query is included in path. If both are
2015 2015 False, both fragment and query are included in path.
2016 2016
2017 2017 See http://www.ietf.org/rfc/rfc2396.txt for more information.
2018 2018
2019 2019 Note that for backward compatibility reasons, bundle URLs do not
2020 2020 take host names. That means 'bundle://../' has a path of '../'.
2021 2021
2022 2022 Examples:
2023 2023
2024 2024 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
2025 2025 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
2026 2026 >>> url('ssh://[::1]:2200//home/joe/repo')
2027 2027 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
2028 2028 >>> url('file:///home/joe/repo')
2029 2029 <url scheme: 'file', path: '/home/joe/repo'>
2030 2030 >>> url('file:///c:/temp/foo/')
2031 2031 <url scheme: 'file', path: 'c:/temp/foo/'>
2032 2032 >>> url('bundle:foo')
2033 2033 <url scheme: 'bundle', path: 'foo'>
2034 2034 >>> url('bundle://../foo')
2035 2035 <url scheme: 'bundle', path: '../foo'>
2036 2036 >>> url(r'c:\foo\bar')
2037 2037 <url path: 'c:\\foo\\bar'>
2038 2038 >>> url(r'\\blah\blah\blah')
2039 2039 <url path: '\\\\blah\\blah\\blah'>
2040 2040 >>> url(r'\\blah\blah\blah#baz')
2041 2041 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
2042 2042 >>> url(r'file:///C:\users\me')
2043 2043 <url scheme: 'file', path: 'C:\\users\\me'>
2044 2044
2045 2045 Authentication credentials:
2046 2046
2047 2047 >>> url('ssh://joe:xyz@x/repo')
2048 2048 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
2049 2049 >>> url('ssh://joe@x/repo')
2050 2050 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
2051 2051
2052 2052 Query strings and fragments:
2053 2053
2054 2054 >>> url('http://host/a?b#c')
2055 2055 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
2056 2056 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
2057 2057 <url scheme: 'http', host: 'host', path: 'a?b#c'>
2058 2058 """
2059 2059
2060 2060 _safechars = "!~*'()+"
2061 2061 _safepchars = "/!~*'()+:\\"
2062 2062 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
2063 2063
2064 2064 def __init__(self, path, parsequery=True, parsefragment=True):
2065 2065 # We slowly chomp away at path until we have only the path left
2066 2066 self.scheme = self.user = self.passwd = self.host = None
2067 2067 self.port = self.path = self.query = self.fragment = None
2068 2068 self._localpath = True
2069 2069 self._hostport = ''
2070 2070 self._origpath = path
2071 2071
2072 2072 if parsefragment and '#' in path:
2073 2073 path, self.fragment = path.split('#', 1)
2074 2074 if not path:
2075 2075 path = None
2076 2076
2077 2077 # special case for Windows drive letters and UNC paths
2078 2078 if hasdriveletter(path) or path.startswith(r'\\'):
2079 2079 self.path = path
2080 2080 return
2081 2081
2082 2082 # For compatibility reasons, we can't handle bundle paths as
2083 2083 # normal URLS
2084 2084 if path.startswith('bundle:'):
2085 2085 self.scheme = 'bundle'
2086 2086 path = path[7:]
2087 2087 if path.startswith('//'):
2088 2088 path = path[2:]
2089 2089 self.path = path
2090 2090 return
2091 2091
2092 2092 if self._matchscheme(path):
2093 2093 parts = path.split(':', 1)
2094 2094 if parts[0]:
2095 2095 self.scheme, path = parts
2096 2096 self._localpath = False
2097 2097
2098 2098 if not path:
2099 2099 path = None
2100 2100 if self._localpath:
2101 2101 self.path = ''
2102 2102 return
2103 2103 else:
2104 2104 if self._localpath:
2105 2105 self.path = path
2106 2106 return
2107 2107
2108 2108 if parsequery and '?' in path:
2109 2109 path, self.query = path.split('?', 1)
2110 2110 if not path:
2111 2111 path = None
2112 2112 if not self.query:
2113 2113 self.query = None
2114 2114
2115 2115 # // is required to specify a host/authority
2116 2116 if path and path.startswith('//'):
2117 2117 parts = path[2:].split('/', 1)
2118 2118 if len(parts) > 1:
2119 2119 self.host, path = parts
2120 2120 else:
2121 2121 self.host = parts[0]
2122 2122 path = None
2123 2123 if not self.host:
2124 2124 self.host = None
2125 2125 # path of file:///d is /d
2126 2126 # path of file:///d:/ is d:/, not /d:/
2127 2127 if path and not hasdriveletter(path):
2128 2128 path = '/' + path
2129 2129
2130 2130 if self.host and '@' in self.host:
2131 2131 self.user, self.host = self.host.rsplit('@', 1)
2132 2132 if ':' in self.user:
2133 2133 self.user, self.passwd = self.user.split(':', 1)
2134 2134 if not self.host:
2135 2135 self.host = None
2136 2136
2137 2137 # Don't split on colons in IPv6 addresses without ports
2138 2138 if (self.host and ':' in self.host and
2139 2139 not (self.host.startswith('[') and self.host.endswith(']'))):
2140 2140 self._hostport = self.host
2141 2141 self.host, self.port = self.host.rsplit(':', 1)
2142 2142 if not self.host:
2143 2143 self.host = None
2144 2144
2145 2145 if (self.host and self.scheme == 'file' and
2146 2146 self.host not in ('localhost', '127.0.0.1', '[::1]')):
2147 2147 raise Abort(_('file:// URLs can only refer to localhost'))
2148 2148
2149 2149 self.path = path
2150 2150
2151 2151 # leave the query string escaped
2152 2152 for a in ('user', 'passwd', 'host', 'port',
2153 2153 'path', 'fragment'):
2154 2154 v = getattr(self, a)
2155 2155 if v is not None:
2156 2156 setattr(self, a, _urlunquote(v))
2157 2157
2158 2158 def __repr__(self):
2159 2159 attrs = []
2160 2160 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
2161 2161 'query', 'fragment'):
2162 2162 v = getattr(self, a)
2163 2163 if v is not None:
2164 2164 attrs.append('%s: %r' % (a, v))
2165 2165 return '<url %s>' % ', '.join(attrs)
2166 2166
2167 2167 def __str__(self):
2168 2168 r"""Join the URL's components back into a URL string.
2169 2169
2170 2170 Examples:
2171 2171
2172 2172 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
2173 2173 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
2174 2174 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2175 2175 'http://user:pw@host:80/?foo=bar&baz=42'
2176 2176 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2177 2177 'http://user:pw@host:80/?foo=bar%3dbaz'
2178 2178 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2179 2179 'ssh://user:pw@[::1]:2200//home/joe#'
2180 2180 >>> str(url('http://localhost:80//'))
2181 2181 'http://localhost:80//'
2182 2182 >>> str(url('http://localhost:80/'))
2183 2183 'http://localhost:80/'
2184 2184 >>> str(url('http://localhost:80'))
2185 2185 'http://localhost:80/'
2186 2186 >>> str(url('bundle:foo'))
2187 2187 'bundle:foo'
2188 2188 >>> str(url('bundle://../foo'))
2189 2189 'bundle:../foo'
2190 2190 >>> str(url('path'))
2191 2191 'path'
2192 2192 >>> str(url('file:///tmp/foo/bar'))
2193 2193 'file:///tmp/foo/bar'
2194 2194 >>> str(url('file:///c:/tmp/foo/bar'))
2195 2195 'file:///c:/tmp/foo/bar'
2196 2196 >>> print url(r'bundle:foo\bar')
2197 2197 bundle:foo\bar
2198 2198 >>> print url(r'file:///D:\data\hg')
2199 2199 file:///D:\data\hg
2200 2200 """
2201 2201 if self._localpath:
2202 2202 s = self.path
2203 2203 if self.scheme == 'bundle':
2204 2204 s = 'bundle:' + s
2205 2205 if self.fragment:
2206 2206 s += '#' + self.fragment
2207 2207 return s
2208 2208
2209 2209 s = self.scheme + ':'
2210 2210 if self.user or self.passwd or self.host:
2211 2211 s += '//'
2212 2212 elif self.scheme and (not self.path or self.path.startswith('/')
2213 2213 or hasdriveletter(self.path)):
2214 2214 s += '//'
2215 2215 if hasdriveletter(self.path):
2216 2216 s += '/'
2217 2217 if self.user:
2218 2218 s += urllib.quote(self.user, safe=self._safechars)
2219 2219 if self.passwd:
2220 2220 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2221 2221 if self.user or self.passwd:
2222 2222 s += '@'
2223 2223 if self.host:
2224 2224 if not (self.host.startswith('[') and self.host.endswith(']')):
2225 2225 s += urllib.quote(self.host)
2226 2226 else:
2227 2227 s += self.host
2228 2228 if self.port:
2229 2229 s += ':' + urllib.quote(self.port)
2230 2230 if self.host:
2231 2231 s += '/'
2232 2232 if self.path:
2233 2233 # TODO: similar to the query string, we should not unescape the
2234 2234 # path when we store it, the path might contain '%2f' = '/',
2235 2235 # which we should *not* escape.
2236 2236 s += urllib.quote(self.path, safe=self._safepchars)
2237 2237 if self.query:
2238 2238 # we store the query in escaped form.
2239 2239 s += '?' + self.query
2240 2240 if self.fragment is not None:
2241 2241 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2242 2242 return s
2243 2243
2244 2244 def authinfo(self):
2245 2245 user, passwd = self.user, self.passwd
2246 2246 try:
2247 2247 self.user, self.passwd = None, None
2248 2248 s = str(self)
2249 2249 finally:
2250 2250 self.user, self.passwd = user, passwd
2251 2251 if not self.user:
2252 2252 return (s, None)
2253 2253 # authinfo[1] is passed to urllib2 password manager, and its
2254 2254 # URIs must not contain credentials. The host is passed in the
2255 2255 # URIs list because Python < 2.4.3 uses only that to search for
2256 2256 # a password.
2257 2257 return (s, (None, (s, self.host),
2258 2258 self.user, self.passwd or ''))
2259 2259
2260 2260 def isabs(self):
2261 2261 if self.scheme and self.scheme != 'file':
2262 2262 return True # remote URL
2263 2263 if hasdriveletter(self.path):
2264 2264 return True # absolute for our purposes - can't be joined()
2265 2265 if self.path.startswith(r'\\'):
2266 2266 return True # Windows UNC path
2267 2267 if self.path.startswith('/'):
2268 2268 return True # POSIX-style
2269 2269 return False
2270 2270
2271 2271 def localpath(self):
2272 2272 if self.scheme == 'file' or self.scheme == 'bundle':
2273 2273 path = self.path or '/'
2274 2274 # For Windows, we need to promote hosts containing drive
2275 2275 # letters to paths with drive letters.
2276 2276 if hasdriveletter(self._hostport):
2277 2277 path = self._hostport + '/' + self.path
2278 2278 elif (self.host is not None and self.path
2279 2279 and not hasdriveletter(path)):
2280 2280 path = '/' + path
2281 2281 return path
2282 2282 return self._origpath
2283 2283
2284 2284 def islocal(self):
2285 2285 '''whether localpath will return something that posixfile can open'''
2286 2286 return (not self.scheme or self.scheme == 'file'
2287 2287 or self.scheme == 'bundle')
2288 2288
2289 2289 def hasscheme(path):
2290 2290 return bool(url(path).scheme)
2291 2291
2292 2292 def hasdriveletter(path):
2293 2293 return path and path[1:2] == ':' and path[0:1].isalpha()
2294 2294
2295 2295 def urllocalpath(path):
2296 2296 return url(path, parsequery=False, parsefragment=False).localpath()
2297 2297
2298 2298 def hidepassword(u):
2299 2299 '''hide user credential in a url string'''
2300 2300 u = url(u)
2301 2301 if u.passwd:
2302 2302 u.passwd = '***'
2303 2303 return str(u)
2304 2304
2305 2305 def removeauth(u):
2306 2306 '''remove all authentication information from a url string'''
2307 2307 u = url(u)
2308 2308 u.user = u.passwd = None
2309 2309 return str(u)
2310 2310
2311 def isatty(fd):
2311 def isatty(fp):
2312 2312 try:
2313 return fd.isatty()
2313 return fp.isatty()
2314 2314 except AttributeError:
2315 2315 return False
2316 2316
2317 2317 timecount = unitcountfn(
2318 2318 (1, 1e3, _('%.0f s')),
2319 2319 (100, 1, _('%.1f s')),
2320 2320 (10, 1, _('%.2f s')),
2321 2321 (1, 1, _('%.3f s')),
2322 2322 (100, 0.001, _('%.1f ms')),
2323 2323 (10, 0.001, _('%.2f ms')),
2324 2324 (1, 0.001, _('%.3f ms')),
2325 2325 (100, 0.000001, _('%.1f us')),
2326 2326 (10, 0.000001, _('%.2f us')),
2327 2327 (1, 0.000001, _('%.3f us')),
2328 2328 (100, 0.000000001, _('%.1f ns')),
2329 2329 (10, 0.000000001, _('%.2f ns')),
2330 2330 (1, 0.000000001, _('%.3f ns')),
2331 2331 )
2332 2332
2333 2333 _timenesting = [0]
2334 2334
2335 2335 def timed(func):
2336 2336 '''Report the execution time of a function call to stderr.
2337 2337
2338 2338 During development, use as a decorator when you need to measure
2339 2339 the cost of a function, e.g. as follows:
2340 2340
2341 2341 @util.timed
2342 2342 def foo(a, b, c):
2343 2343 pass
2344 2344 '''
2345 2345
2346 2346 def wrapper(*args, **kwargs):
2347 2347 start = time.time()
2348 2348 indent = 2
2349 2349 _timenesting[0] += indent
2350 2350 try:
2351 2351 return func(*args, **kwargs)
2352 2352 finally:
2353 2353 elapsed = time.time() - start
2354 2354 _timenesting[0] -= indent
2355 2355 sys.stderr.write('%s%s: %s\n' %
2356 2356 (' ' * _timenesting[0], func.__name__,
2357 2357 timecount(elapsed)))
2358 2358 return wrapper
2359 2359
2360 2360 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2361 2361 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2362 2362
2363 2363 def sizetoint(s):
2364 2364 '''Convert a space specifier to a byte count.
2365 2365
2366 2366 >>> sizetoint('30')
2367 2367 30
2368 2368 >>> sizetoint('2.2kb')
2369 2369 2252
2370 2370 >>> sizetoint('6M')
2371 2371 6291456
2372 2372 '''
2373 2373 t = s.strip().lower()
2374 2374 try:
2375 2375 for k, u in _sizeunits:
2376 2376 if t.endswith(k):
2377 2377 return int(float(t[:-len(k)]) * u)
2378 2378 return int(t)
2379 2379 except ValueError:
2380 2380 raise error.ParseError(_("couldn't parse size: %s") % s)
2381 2381
2382 2382 class hooks(object):
2383 2383 '''A collection of hook functions that can be used to extend a
2384 2384 function's behavior. Hooks are called in lexicographic order,
2385 2385 based on the names of their sources.'''
2386 2386
2387 2387 def __init__(self):
2388 2388 self._hooks = []
2389 2389
2390 2390 def add(self, source, hook):
2391 2391 self._hooks.append((source, hook))
2392 2392
2393 2393 def __call__(self, *args):
2394 2394 self._hooks.sort(key=lambda x: x[0])
2395 2395 results = []
2396 2396 for source, hook in self._hooks:
2397 2397 results.append(hook(*args))
2398 2398 return results
2399 2399
2400 2400 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2401 2401 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2402 2402 Skips the 'skip' last entries. By default it will flush stdout first.
2403 2403 It can be used everywhere and do intentionally not require an ui object.
2404 2404 Not be used in production code but very convenient while developing.
2405 2405 '''
2406 2406 if otherf:
2407 2407 otherf.flush()
2408 2408 f.write('%s at:\n' % msg)
2409 2409 entries = [('%s:%s' % (fn, ln), func)
2410 2410 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2411 2411 if entries:
2412 2412 fnmax = max(len(entry[0]) for entry in entries)
2413 2413 for fnln, func in entries:
2414 2414 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2415 2415 f.flush()
2416 2416
2417 2417 class dirs(object):
2418 2418 '''a multiset of directory names from a dirstate or manifest'''
2419 2419
2420 2420 def __init__(self, map, skip=None):
2421 2421 self._dirs = {}
2422 2422 addpath = self.addpath
2423 2423 if safehasattr(map, 'iteritems') and skip is not None:
2424 2424 for f, s in map.iteritems():
2425 2425 if s[0] != skip:
2426 2426 addpath(f)
2427 2427 else:
2428 2428 for f in map:
2429 2429 addpath(f)
2430 2430
2431 2431 def addpath(self, path):
2432 2432 dirs = self._dirs
2433 2433 for base in finddirs(path):
2434 2434 if base in dirs:
2435 2435 dirs[base] += 1
2436 2436 return
2437 2437 dirs[base] = 1
2438 2438
2439 2439 def delpath(self, path):
2440 2440 dirs = self._dirs
2441 2441 for base in finddirs(path):
2442 2442 if dirs[base] > 1:
2443 2443 dirs[base] -= 1
2444 2444 return
2445 2445 del dirs[base]
2446 2446
2447 2447 def __iter__(self):
2448 2448 return self._dirs.iterkeys()
2449 2449
2450 2450 def __contains__(self, d):
2451 2451 return d in self._dirs
2452 2452
2453 2453 if safehasattr(parsers, 'dirs'):
2454 2454 dirs = parsers.dirs
2455 2455
2456 2456 def finddirs(path):
2457 2457 pos = path.rfind('/')
2458 2458 while pos != -1:
2459 2459 yield path[:pos]
2460 2460 pos = path.rfind('/', 0, pos)
2461 2461
2462 2462 # compression utility
2463 2463
2464 2464 class nocompress(object):
2465 2465 def compress(self, x):
2466 2466 return x
2467 2467 def flush(self):
2468 2468 return ""
2469 2469
2470 2470 compressors = {
2471 2471 None: nocompress,
2472 2472 # lambda to prevent early import
2473 2473 'BZ': lambda: bz2.BZ2Compressor(),
2474 2474 'GZ': lambda: zlib.compressobj(),
2475 2475 }
2476 2476 # also support the old form by courtesies
2477 2477 compressors['UN'] = compressors[None]
2478 2478
2479 2479 def _makedecompressor(decompcls):
2480 2480 def generator(f):
2481 2481 d = decompcls()
2482 2482 for chunk in filechunkiter(f):
2483 2483 yield d.decompress(chunk)
2484 2484 def func(fh):
2485 2485 return chunkbuffer(generator(fh))
2486 2486 return func
2487 2487
2488 2488 def _bz2():
2489 2489 d = bz2.BZ2Decompressor()
2490 2490 # Bzip2 stream start with BZ, but we stripped it.
2491 2491 # we put it back for good measure.
2492 2492 d.decompress('BZ')
2493 2493 return d
2494 2494
2495 2495 decompressors = {None: lambda fh: fh,
2496 2496 '_truncatedBZ': _makedecompressor(_bz2),
2497 2497 'BZ': _makedecompressor(lambda: bz2.BZ2Decompressor()),
2498 2498 'GZ': _makedecompressor(lambda: zlib.decompressobj()),
2499 2499 }
2500 2500 # also support the old form by courtesies
2501 2501 decompressors['UN'] = decompressors[None]
2502 2502
2503 2503 # convenient shortcut
2504 2504 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now