##// END OF EJS Templates
util: add a 'nogc' decorator to disable the garbage collection...
Pierre-Yves David -
r23495:b25f07cb default
parent child Browse files
Show More
@@ -1,2191 +1,2214 b''
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 import i18n
17 17 _ = i18n._
18 18 import error, osutil, encoding
19 19 import errno, shutil, sys, tempfile, traceback
20 20 import re as remod
21 21 import os, time, datetime, calendar, textwrap, signal, collections
22 22 import imp, socket, urllib
23 import gc
23 24
24 25 if os.name == 'nt':
25 26 import windows as platform
26 27 else:
27 28 import posix as platform
28 29
29 30 cachestat = platform.cachestat
30 31 checkexec = platform.checkexec
31 32 checklink = platform.checklink
32 33 copymode = platform.copymode
33 34 executablepath = platform.executablepath
34 35 expandglobs = platform.expandglobs
35 36 explainexit = platform.explainexit
36 37 findexe = platform.findexe
37 38 gethgcmd = platform.gethgcmd
38 39 getuser = platform.getuser
39 40 groupmembers = platform.groupmembers
40 41 groupname = platform.groupname
41 42 hidewindow = platform.hidewindow
42 43 isexec = platform.isexec
43 44 isowner = platform.isowner
44 45 localpath = platform.localpath
45 46 lookupreg = platform.lookupreg
46 47 makedir = platform.makedir
47 48 nlinks = platform.nlinks
48 49 normpath = platform.normpath
49 50 normcase = platform.normcase
50 51 openhardlinks = platform.openhardlinks
51 52 oslink = platform.oslink
52 53 parsepatchoutput = platform.parsepatchoutput
53 54 pconvert = platform.pconvert
54 55 popen = platform.popen
55 56 posixfile = platform.posixfile
56 57 quotecommand = platform.quotecommand
57 58 readpipe = platform.readpipe
58 59 rename = platform.rename
59 60 samedevice = platform.samedevice
60 61 samefile = platform.samefile
61 62 samestat = platform.samestat
62 63 setbinary = platform.setbinary
63 64 setflags = platform.setflags
64 65 setsignalhandler = platform.setsignalhandler
65 66 shellquote = platform.shellquote
66 67 spawndetached = platform.spawndetached
67 68 split = platform.split
68 69 sshargs = platform.sshargs
69 70 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
70 71 statisexec = platform.statisexec
71 72 statislink = platform.statislink
72 73 termwidth = platform.termwidth
73 74 testpid = platform.testpid
74 75 umask = platform.umask
75 76 unlink = platform.unlink
76 77 unlinkpath = platform.unlinkpath
77 78 username = platform.username
78 79
79 80 # Python compatibility
80 81
81 82 _notset = object()
82 83
83 84 def safehasattr(thing, attr):
84 85 return getattr(thing, attr, _notset) is not _notset
85 86
86 87 def sha1(s=''):
87 88 '''
88 89 Low-overhead wrapper around Python's SHA support
89 90
90 91 >>> f = _fastsha1
91 92 >>> a = sha1()
92 93 >>> a = f()
93 94 >>> a.hexdigest()
94 95 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
95 96 '''
96 97
97 98 return _fastsha1(s)
98 99
99 100 def _fastsha1(s=''):
100 101 # This function will import sha1 from hashlib or sha (whichever is
101 102 # available) and overwrite itself with it on the first call.
102 103 # Subsequent calls will go directly to the imported function.
103 104 if sys.version_info >= (2, 5):
104 105 from hashlib import sha1 as _sha1
105 106 else:
106 107 from sha import sha as _sha1
107 108 global _fastsha1, sha1
108 109 _fastsha1 = sha1 = _sha1
109 110 return _sha1(s)
110 111
111 112 def md5(s=''):
112 113 try:
113 114 from hashlib import md5 as _md5
114 115 except ImportError:
115 116 from md5 import md5 as _md5
116 117 global md5
117 118 md5 = _md5
118 119 return _md5(s)
119 120
120 121 DIGESTS = {
121 122 'md5': md5,
122 123 'sha1': sha1,
123 124 }
124 125 # List of digest types from strongest to weakest
125 126 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
126 127
127 128 try:
128 129 import hashlib
129 130 DIGESTS.update({
130 131 'sha512': hashlib.sha512,
131 132 })
132 133 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
133 134 except ImportError:
134 135 pass
135 136
136 137 for k in DIGESTS_BY_STRENGTH:
137 138 assert k in DIGESTS
138 139
139 140 class digester(object):
140 141 """helper to compute digests.
141 142
142 143 This helper can be used to compute one or more digests given their name.
143 144
144 145 >>> d = digester(['md5', 'sha1'])
145 146 >>> d.update('foo')
146 147 >>> [k for k in sorted(d)]
147 148 ['md5', 'sha1']
148 149 >>> d['md5']
149 150 'acbd18db4cc2f85cedef654fccc4a4d8'
150 151 >>> d['sha1']
151 152 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
152 153 >>> digester.preferred(['md5', 'sha1'])
153 154 'sha1'
154 155 """
155 156
156 157 def __init__(self, digests, s=''):
157 158 self._hashes = {}
158 159 for k in digests:
159 160 if k not in DIGESTS:
160 161 raise Abort(_('unknown digest type: %s') % k)
161 162 self._hashes[k] = DIGESTS[k]()
162 163 if s:
163 164 self.update(s)
164 165
165 166 def update(self, data):
166 167 for h in self._hashes.values():
167 168 h.update(data)
168 169
169 170 def __getitem__(self, key):
170 171 if key not in DIGESTS:
171 172 raise Abort(_('unknown digest type: %s') % k)
172 173 return self._hashes[key].hexdigest()
173 174
174 175 def __iter__(self):
175 176 return iter(self._hashes)
176 177
177 178 @staticmethod
178 179 def preferred(supported):
179 180 """returns the strongest digest type in both supported and DIGESTS."""
180 181
181 182 for k in DIGESTS_BY_STRENGTH:
182 183 if k in supported:
183 184 return k
184 185 return None
185 186
186 187 class digestchecker(object):
187 188 """file handle wrapper that additionally checks content against a given
188 189 size and digests.
189 190
190 191 d = digestchecker(fh, size, {'md5': '...'})
191 192
192 193 When multiple digests are given, all of them are validated.
193 194 """
194 195
195 196 def __init__(self, fh, size, digests):
196 197 self._fh = fh
197 198 self._size = size
198 199 self._got = 0
199 200 self._digests = dict(digests)
200 201 self._digester = digester(self._digests.keys())
201 202
202 203 def read(self, length=-1):
203 204 content = self._fh.read(length)
204 205 self._digester.update(content)
205 206 self._got += len(content)
206 207 return content
207 208
208 209 def validate(self):
209 210 if self._size != self._got:
210 211 raise Abort(_('size mismatch: expected %d, got %d') %
211 212 (self._size, self._got))
212 213 for k, v in self._digests.items():
213 214 if v != self._digester[k]:
214 215 # i18n: first parameter is a digest name
215 216 raise Abort(_('%s mismatch: expected %s, got %s') %
216 217 (k, v, self._digester[k]))
217 218
218 219 try:
219 220 buffer = buffer
220 221 except NameError:
221 222 if sys.version_info[0] < 3:
222 223 def buffer(sliceable, offset=0):
223 224 return sliceable[offset:]
224 225 else:
225 226 def buffer(sliceable, offset=0):
226 227 return memoryview(sliceable)[offset:]
227 228
228 229 import subprocess
229 230 closefds = os.name == 'posix'
230 231
231 232 def popen2(cmd, env=None, newlines=False):
232 233 # Setting bufsize to -1 lets the system decide the buffer size.
233 234 # The default for bufsize is 0, meaning unbuffered. This leads to
234 235 # poor performance on Mac OS X: http://bugs.python.org/issue4194
235 236 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
236 237 close_fds=closefds,
237 238 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
238 239 universal_newlines=newlines,
239 240 env=env)
240 241 return p.stdin, p.stdout
241 242
242 243 def popen3(cmd, env=None, newlines=False):
243 244 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
244 245 return stdin, stdout, stderr
245 246
246 247 def popen4(cmd, env=None, newlines=False):
247 248 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
248 249 close_fds=closefds,
249 250 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
250 251 stderr=subprocess.PIPE,
251 252 universal_newlines=newlines,
252 253 env=env)
253 254 return p.stdin, p.stdout, p.stderr, p
254 255
255 256 def version():
256 257 """Return version information if available."""
257 258 try:
258 259 import __version__
259 260 return __version__.version
260 261 except ImportError:
261 262 return 'unknown'
262 263
263 264 # used by parsedate
264 265 defaultdateformats = (
265 266 '%Y-%m-%d %H:%M:%S',
266 267 '%Y-%m-%d %I:%M:%S%p',
267 268 '%Y-%m-%d %H:%M',
268 269 '%Y-%m-%d %I:%M%p',
269 270 '%Y-%m-%d',
270 271 '%m-%d',
271 272 '%m/%d',
272 273 '%m/%d/%y',
273 274 '%m/%d/%Y',
274 275 '%a %b %d %H:%M:%S %Y',
275 276 '%a %b %d %I:%M:%S%p %Y',
276 277 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
277 278 '%b %d %H:%M:%S %Y',
278 279 '%b %d %I:%M:%S%p %Y',
279 280 '%b %d %H:%M:%S',
280 281 '%b %d %I:%M:%S%p',
281 282 '%b %d %H:%M',
282 283 '%b %d %I:%M%p',
283 284 '%b %d %Y',
284 285 '%b %d',
285 286 '%H:%M:%S',
286 287 '%I:%M:%S%p',
287 288 '%H:%M',
288 289 '%I:%M%p',
289 290 )
290 291
291 292 extendeddateformats = defaultdateformats + (
292 293 "%Y",
293 294 "%Y-%m",
294 295 "%b",
295 296 "%b %Y",
296 297 )
297 298
298 299 def cachefunc(func):
299 300 '''cache the result of function calls'''
300 301 # XXX doesn't handle keywords args
301 302 if func.func_code.co_argcount == 0:
302 303 cache = []
303 304 def f():
304 305 if len(cache) == 0:
305 306 cache.append(func())
306 307 return cache[0]
307 308 return f
308 309 cache = {}
309 310 if func.func_code.co_argcount == 1:
310 311 # we gain a small amount of time because
311 312 # we don't need to pack/unpack the list
312 313 def f(arg):
313 314 if arg not in cache:
314 315 cache[arg] = func(arg)
315 316 return cache[arg]
316 317 else:
317 318 def f(*args):
318 319 if args not in cache:
319 320 cache[args] = func(*args)
320 321 return cache[args]
321 322
322 323 return f
323 324
324 325 try:
325 326 collections.deque.remove
326 327 deque = collections.deque
327 328 except AttributeError:
328 329 # python 2.4 lacks deque.remove
329 330 class deque(collections.deque):
330 331 def remove(self, val):
331 332 for i, v in enumerate(self):
332 333 if v == val:
333 334 del self[i]
334 335 break
335 336
336 337 class sortdict(dict):
337 338 '''a simple sorted dictionary'''
338 339 def __init__(self, data=None):
339 340 self._list = []
340 341 if data:
341 342 self.update(data)
342 343 def copy(self):
343 344 return sortdict(self)
344 345 def __setitem__(self, key, val):
345 346 if key in self:
346 347 self._list.remove(key)
347 348 self._list.append(key)
348 349 dict.__setitem__(self, key, val)
349 350 def __iter__(self):
350 351 return self._list.__iter__()
351 352 def update(self, src):
352 353 for k in src:
353 354 self[k] = src[k]
354 355 def clear(self):
355 356 dict.clear(self)
356 357 self._list = []
357 358 def items(self):
358 359 return [(k, self[k]) for k in self._list]
359 360 def __delitem__(self, key):
360 361 dict.__delitem__(self, key)
361 362 self._list.remove(key)
362 363 def pop(self, key, *args, **kwargs):
363 364 dict.pop(self, key, *args, **kwargs)
364 365 try:
365 366 self._list.remove(key)
366 367 except ValueError:
367 368 pass
368 369 def keys(self):
369 370 return self._list
370 371 def iterkeys(self):
371 372 return self._list.__iter__()
372 373 def iteritems(self):
373 374 for k in self._list:
374 375 yield k, self[k]
375 376 def insert(self, index, key, val):
376 377 self._list.insert(index, key)
377 378 dict.__setitem__(self, key, val)
378 379
379 380 class lrucachedict(object):
380 381 '''cache most recent gets from or sets to this dictionary'''
381 382 def __init__(self, maxsize):
382 383 self._cache = {}
383 384 self._maxsize = maxsize
384 385 self._order = deque()
385 386
386 387 def __getitem__(self, key):
387 388 value = self._cache[key]
388 389 self._order.remove(key)
389 390 self._order.append(key)
390 391 return value
391 392
392 393 def __setitem__(self, key, value):
393 394 if key not in self._cache:
394 395 if len(self._cache) >= self._maxsize:
395 396 del self._cache[self._order.popleft()]
396 397 else:
397 398 self._order.remove(key)
398 399 self._cache[key] = value
399 400 self._order.append(key)
400 401
401 402 def __contains__(self, key):
402 403 return key in self._cache
403 404
404 405 def clear(self):
405 406 self._cache.clear()
406 407 self._order = deque()
407 408
408 409 def lrucachefunc(func):
409 410 '''cache most recent results of function calls'''
410 411 cache = {}
411 412 order = deque()
412 413 if func.func_code.co_argcount == 1:
413 414 def f(arg):
414 415 if arg not in cache:
415 416 if len(cache) > 20:
416 417 del cache[order.popleft()]
417 418 cache[arg] = func(arg)
418 419 else:
419 420 order.remove(arg)
420 421 order.append(arg)
421 422 return cache[arg]
422 423 else:
423 424 def f(*args):
424 425 if args not in cache:
425 426 if len(cache) > 20:
426 427 del cache[order.popleft()]
427 428 cache[args] = func(*args)
428 429 else:
429 430 order.remove(args)
430 431 order.append(args)
431 432 return cache[args]
432 433
433 434 return f
434 435
435 436 class propertycache(object):
436 437 def __init__(self, func):
437 438 self.func = func
438 439 self.name = func.__name__
439 440 def __get__(self, obj, type=None):
440 441 result = self.func(obj)
441 442 self.cachevalue(obj, result)
442 443 return result
443 444
444 445 def cachevalue(self, obj, value):
445 446 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
446 447 obj.__dict__[self.name] = value
447 448
448 449 def pipefilter(s, cmd):
449 450 '''filter string S through command CMD, returning its output'''
450 451 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
451 452 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
452 453 pout, perr = p.communicate(s)
453 454 return pout
454 455
455 456 def tempfilter(s, cmd):
456 457 '''filter string S through a pair of temporary files with CMD.
457 458 CMD is used as a template to create the real command to be run,
458 459 with the strings INFILE and OUTFILE replaced by the real names of
459 460 the temporary files generated.'''
460 461 inname, outname = None, None
461 462 try:
462 463 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
463 464 fp = os.fdopen(infd, 'wb')
464 465 fp.write(s)
465 466 fp.close()
466 467 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
467 468 os.close(outfd)
468 469 cmd = cmd.replace('INFILE', inname)
469 470 cmd = cmd.replace('OUTFILE', outname)
470 471 code = os.system(cmd)
471 472 if sys.platform == 'OpenVMS' and code & 1:
472 473 code = 0
473 474 if code:
474 475 raise Abort(_("command '%s' failed: %s") %
475 476 (cmd, explainexit(code)))
476 477 fp = open(outname, 'rb')
477 478 r = fp.read()
478 479 fp.close()
479 480 return r
480 481 finally:
481 482 try:
482 483 if inname:
483 484 os.unlink(inname)
484 485 except OSError:
485 486 pass
486 487 try:
487 488 if outname:
488 489 os.unlink(outname)
489 490 except OSError:
490 491 pass
491 492
492 493 filtertable = {
493 494 'tempfile:': tempfilter,
494 495 'pipe:': pipefilter,
495 496 }
496 497
497 498 def filter(s, cmd):
498 499 "filter a string through a command that transforms its input to its output"
499 500 for name, fn in filtertable.iteritems():
500 501 if cmd.startswith(name):
501 502 return fn(s, cmd[len(name):].lstrip())
502 503 return pipefilter(s, cmd)
503 504
504 505 def binary(s):
505 506 """return true if a string is binary data"""
506 507 return bool(s and '\0' in s)
507 508
508 509 def increasingchunks(source, min=1024, max=65536):
509 510 '''return no less than min bytes per chunk while data remains,
510 511 doubling min after each chunk until it reaches max'''
511 512 def log2(x):
512 513 if not x:
513 514 return 0
514 515 i = 0
515 516 while x:
516 517 x >>= 1
517 518 i += 1
518 519 return i - 1
519 520
520 521 buf = []
521 522 blen = 0
522 523 for chunk in source:
523 524 buf.append(chunk)
524 525 blen += len(chunk)
525 526 if blen >= min:
526 527 if min < max:
527 528 min = min << 1
528 529 nmin = 1 << log2(blen)
529 530 if nmin > min:
530 531 min = nmin
531 532 if min > max:
532 533 min = max
533 534 yield ''.join(buf)
534 535 blen = 0
535 536 buf = []
536 537 if buf:
537 538 yield ''.join(buf)
538 539
539 540 Abort = error.Abort
540 541
541 542 def always(fn):
542 543 return True
543 544
544 545 def never(fn):
545 546 return False
546 547
548 def nogc(func):
549 """disable garbage collector
550
551 Python's garbage collector triggers a GC each time a certain number of
552 container objects (the number being defined by gc.get_threshold()) are
553 allocated even when marked not to be tracked by the collector. Tracking has
554 no effect on when GCs are triggered, only on what objects the GC looks
555 into. As a workaround, disable GC while building complexe (huge)
556 containers.
557
558 This garbage collector issue have been fixed in 2.7.
559 """
560 def wrapper(*args, **kwargs):
561 gcenabled = gc.isenabled()
562 gc.disable()
563 try:
564 return func(*args, **kwargs)
565 finally:
566 if gcenabled:
567 gc.enable()
568 return wrapper
569
547 570 def pathto(root, n1, n2):
548 571 '''return the relative path from one place to another.
549 572 root should use os.sep to separate directories
550 573 n1 should use os.sep to separate directories
551 574 n2 should use "/" to separate directories
552 575 returns an os.sep-separated path.
553 576
554 577 If n1 is a relative path, it's assumed it's
555 578 relative to root.
556 579 n2 should always be relative to root.
557 580 '''
558 581 if not n1:
559 582 return localpath(n2)
560 583 if os.path.isabs(n1):
561 584 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
562 585 return os.path.join(root, localpath(n2))
563 586 n2 = '/'.join((pconvert(root), n2))
564 587 a, b = splitpath(n1), n2.split('/')
565 588 a.reverse()
566 589 b.reverse()
567 590 while a and b and a[-1] == b[-1]:
568 591 a.pop()
569 592 b.pop()
570 593 b.reverse()
571 594 return os.sep.join((['..'] * len(a)) + b) or '.'
572 595
573 596 def mainfrozen():
574 597 """return True if we are a frozen executable.
575 598
576 599 The code supports py2exe (most common, Windows only) and tools/freeze
577 600 (portable, not much used).
578 601 """
579 602 return (safehasattr(sys, "frozen") or # new py2exe
580 603 safehasattr(sys, "importers") or # old py2exe
581 604 imp.is_frozen("__main__")) # tools/freeze
582 605
583 606 # the location of data files matching the source code
584 607 if mainfrozen():
585 608 # executable version (py2exe) doesn't support __file__
586 609 datapath = os.path.dirname(sys.executable)
587 610 else:
588 611 datapath = os.path.dirname(__file__)
589 612
590 613 i18n.setdatapath(datapath)
591 614
592 615 _hgexecutable = None
593 616
594 617 def hgexecutable():
595 618 """return location of the 'hg' executable.
596 619
597 620 Defaults to $HG or 'hg' in the search path.
598 621 """
599 622 if _hgexecutable is None:
600 623 hg = os.environ.get('HG')
601 624 mainmod = sys.modules['__main__']
602 625 if hg:
603 626 _sethgexecutable(hg)
604 627 elif mainfrozen():
605 628 _sethgexecutable(sys.executable)
606 629 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
607 630 _sethgexecutable(mainmod.__file__)
608 631 else:
609 632 exe = findexe('hg') or os.path.basename(sys.argv[0])
610 633 _sethgexecutable(exe)
611 634 return _hgexecutable
612 635
613 636 def _sethgexecutable(path):
614 637 """set location of the 'hg' executable"""
615 638 global _hgexecutable
616 639 _hgexecutable = path
617 640
618 641 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
619 642 '''enhanced shell command execution.
620 643 run with environment maybe modified, maybe in different dir.
621 644
622 645 if command fails and onerr is None, return status, else raise onerr
623 646 object as exception.
624 647
625 648 if out is specified, it is assumed to be a file-like object that has a
626 649 write() method. stdout and stderr will be redirected to out.'''
627 650 try:
628 651 sys.stdout.flush()
629 652 except Exception:
630 653 pass
631 654 def py2shell(val):
632 655 'convert python object into string that is useful to shell'
633 656 if val is None or val is False:
634 657 return '0'
635 658 if val is True:
636 659 return '1'
637 660 return str(val)
638 661 origcmd = cmd
639 662 cmd = quotecommand(cmd)
640 663 if sys.platform == 'plan9' and (sys.version_info[0] == 2
641 664 and sys.version_info[1] < 7):
642 665 # subprocess kludge to work around issues in half-baked Python
643 666 # ports, notably bichued/python:
644 667 if not cwd is None:
645 668 os.chdir(cwd)
646 669 rc = os.system(cmd)
647 670 else:
648 671 env = dict(os.environ)
649 672 env.update((k, py2shell(v)) for k, v in environ.iteritems())
650 673 env['HG'] = hgexecutable()
651 674 if out is None or out == sys.__stdout__:
652 675 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
653 676 env=env, cwd=cwd)
654 677 else:
655 678 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
656 679 env=env, cwd=cwd, stdout=subprocess.PIPE,
657 680 stderr=subprocess.STDOUT)
658 681 while True:
659 682 line = proc.stdout.readline()
660 683 if not line:
661 684 break
662 685 out.write(line)
663 686 proc.wait()
664 687 rc = proc.returncode
665 688 if sys.platform == 'OpenVMS' and rc & 1:
666 689 rc = 0
667 690 if rc and onerr:
668 691 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
669 692 explainexit(rc)[0])
670 693 if errprefix:
671 694 errmsg = '%s: %s' % (errprefix, errmsg)
672 695 raise onerr(errmsg)
673 696 return rc
674 697
675 698 def checksignature(func):
676 699 '''wrap a function with code to check for calling errors'''
677 700 def check(*args, **kwargs):
678 701 try:
679 702 return func(*args, **kwargs)
680 703 except TypeError:
681 704 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
682 705 raise error.SignatureError
683 706 raise
684 707
685 708 return check
686 709
687 710 def copyfile(src, dest):
688 711 "copy a file, preserving mode and atime/mtime"
689 712 if os.path.lexists(dest):
690 713 unlink(dest)
691 714 if os.path.islink(src):
692 715 os.symlink(os.readlink(src), dest)
693 716 else:
694 717 try:
695 718 shutil.copyfile(src, dest)
696 719 shutil.copymode(src, dest)
697 720 except shutil.Error, inst:
698 721 raise Abort(str(inst))
699 722
700 723 def copyfiles(src, dst, hardlink=None):
701 724 """Copy a directory tree using hardlinks if possible"""
702 725
703 726 if hardlink is None:
704 727 hardlink = (os.stat(src).st_dev ==
705 728 os.stat(os.path.dirname(dst)).st_dev)
706 729
707 730 num = 0
708 731 if os.path.isdir(src):
709 732 os.mkdir(dst)
710 733 for name, kind in osutil.listdir(src):
711 734 srcname = os.path.join(src, name)
712 735 dstname = os.path.join(dst, name)
713 736 hardlink, n = copyfiles(srcname, dstname, hardlink)
714 737 num += n
715 738 else:
716 739 if hardlink:
717 740 try:
718 741 oslink(src, dst)
719 742 except (IOError, OSError):
720 743 hardlink = False
721 744 shutil.copy(src, dst)
722 745 else:
723 746 shutil.copy(src, dst)
724 747 num += 1
725 748
726 749 return hardlink, num
727 750
728 751 _winreservednames = '''con prn aux nul
729 752 com1 com2 com3 com4 com5 com6 com7 com8 com9
730 753 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
731 754 _winreservedchars = ':*?"<>|'
732 755 def checkwinfilename(path):
733 756 r'''Check that the base-relative path is a valid filename on Windows.
734 757 Returns None if the path is ok, or a UI string describing the problem.
735 758
736 759 >>> checkwinfilename("just/a/normal/path")
737 760 >>> checkwinfilename("foo/bar/con.xml")
738 761 "filename contains 'con', which is reserved on Windows"
739 762 >>> checkwinfilename("foo/con.xml/bar")
740 763 "filename contains 'con', which is reserved on Windows"
741 764 >>> checkwinfilename("foo/bar/xml.con")
742 765 >>> checkwinfilename("foo/bar/AUX/bla.txt")
743 766 "filename contains 'AUX', which is reserved on Windows"
744 767 >>> checkwinfilename("foo/bar/bla:.txt")
745 768 "filename contains ':', which is reserved on Windows"
746 769 >>> checkwinfilename("foo/bar/b\07la.txt")
747 770 "filename contains '\\x07', which is invalid on Windows"
748 771 >>> checkwinfilename("foo/bar/bla ")
749 772 "filename ends with ' ', which is not allowed on Windows"
750 773 >>> checkwinfilename("../bar")
751 774 >>> checkwinfilename("foo\\")
752 775 "filename ends with '\\', which is invalid on Windows"
753 776 >>> checkwinfilename("foo\\/bar")
754 777 "directory name ends with '\\', which is invalid on Windows"
755 778 '''
756 779 if path.endswith('\\'):
757 780 return _("filename ends with '\\', which is invalid on Windows")
758 781 if '\\/' in path:
759 782 return _("directory name ends with '\\', which is invalid on Windows")
760 783 for n in path.replace('\\', '/').split('/'):
761 784 if not n:
762 785 continue
763 786 for c in n:
764 787 if c in _winreservedchars:
765 788 return _("filename contains '%s', which is reserved "
766 789 "on Windows") % c
767 790 if ord(c) <= 31:
768 791 return _("filename contains %r, which is invalid "
769 792 "on Windows") % c
770 793 base = n.split('.')[0]
771 794 if base and base.lower() in _winreservednames:
772 795 return _("filename contains '%s', which is reserved "
773 796 "on Windows") % base
774 797 t = n[-1]
775 798 if t in '. ' and n not in '..':
776 799 return _("filename ends with '%s', which is not allowed "
777 800 "on Windows") % t
778 801
779 802 if os.name == 'nt':
780 803 checkosfilename = checkwinfilename
781 804 else:
782 805 checkosfilename = platform.checkosfilename
783 806
784 807 def makelock(info, pathname):
785 808 try:
786 809 return os.symlink(info, pathname)
787 810 except OSError, why:
788 811 if why.errno == errno.EEXIST:
789 812 raise
790 813 except AttributeError: # no symlink in os
791 814 pass
792 815
793 816 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
794 817 os.write(ld, info)
795 818 os.close(ld)
796 819
797 820 def readlock(pathname):
798 821 try:
799 822 return os.readlink(pathname)
800 823 except OSError, why:
801 824 if why.errno not in (errno.EINVAL, errno.ENOSYS):
802 825 raise
803 826 except AttributeError: # no symlink in os
804 827 pass
805 828 fp = posixfile(pathname)
806 829 r = fp.read()
807 830 fp.close()
808 831 return r
809 832
810 833 def fstat(fp):
811 834 '''stat file object that may not have fileno method.'''
812 835 try:
813 836 return os.fstat(fp.fileno())
814 837 except AttributeError:
815 838 return os.stat(fp.name)
816 839
817 840 # File system features
818 841
819 842 def checkcase(path):
820 843 """
821 844 Return true if the given path is on a case-sensitive filesystem
822 845
823 846 Requires a path (like /foo/.hg) ending with a foldable final
824 847 directory component.
825 848 """
826 849 s1 = os.stat(path)
827 850 d, b = os.path.split(path)
828 851 b2 = b.upper()
829 852 if b == b2:
830 853 b2 = b.lower()
831 854 if b == b2:
832 855 return True # no evidence against case sensitivity
833 856 p2 = os.path.join(d, b2)
834 857 try:
835 858 s2 = os.stat(p2)
836 859 if s2 == s1:
837 860 return False
838 861 return True
839 862 except OSError:
840 863 return True
841 864
842 865 try:
843 866 import re2
844 867 _re2 = None
845 868 except ImportError:
846 869 _re2 = False
847 870
848 871 class _re(object):
849 872 def _checkre2(self):
850 873 global _re2
851 874 try:
852 875 # check if match works, see issue3964
853 876 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
854 877 except ImportError:
855 878 _re2 = False
856 879
857 880 def compile(self, pat, flags=0):
858 881 '''Compile a regular expression, using re2 if possible
859 882
860 883 For best performance, use only re2-compatible regexp features. The
861 884 only flags from the re module that are re2-compatible are
862 885 IGNORECASE and MULTILINE.'''
863 886 if _re2 is None:
864 887 self._checkre2()
865 888 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
866 889 if flags & remod.IGNORECASE:
867 890 pat = '(?i)' + pat
868 891 if flags & remod.MULTILINE:
869 892 pat = '(?m)' + pat
870 893 try:
871 894 return re2.compile(pat)
872 895 except re2.error:
873 896 pass
874 897 return remod.compile(pat, flags)
875 898
876 899 @propertycache
877 900 def escape(self):
878 901 '''Return the version of escape corresponding to self.compile.
879 902
880 903 This is imperfect because whether re2 or re is used for a particular
881 904 function depends on the flags, etc, but it's the best we can do.
882 905 '''
883 906 global _re2
884 907 if _re2 is None:
885 908 self._checkre2()
886 909 if _re2:
887 910 return re2.escape
888 911 else:
889 912 return remod.escape
890 913
891 914 re = _re()
892 915
893 916 _fspathcache = {}
894 917 def fspath(name, root):
895 918 '''Get name in the case stored in the filesystem
896 919
897 920 The name should be relative to root, and be normcase-ed for efficiency.
898 921
899 922 Note that this function is unnecessary, and should not be
900 923 called, for case-sensitive filesystems (simply because it's expensive).
901 924
902 925 The root should be normcase-ed, too.
903 926 '''
904 927 def _makefspathcacheentry(dir):
905 928 return dict((normcase(n), n) for n in os.listdir(dir))
906 929
907 930 seps = os.sep
908 931 if os.altsep:
909 932 seps = seps + os.altsep
910 933 # Protect backslashes. This gets silly very quickly.
911 934 seps.replace('\\','\\\\')
912 935 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
913 936 dir = os.path.normpath(root)
914 937 result = []
915 938 for part, sep in pattern.findall(name):
916 939 if sep:
917 940 result.append(sep)
918 941 continue
919 942
920 943 if dir not in _fspathcache:
921 944 _fspathcache[dir] = _makefspathcacheentry(dir)
922 945 contents = _fspathcache[dir]
923 946
924 947 found = contents.get(part)
925 948 if not found:
926 949 # retry "once per directory" per "dirstate.walk" which
927 950 # may take place for each patches of "hg qpush", for example
928 951 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
929 952 found = contents.get(part)
930 953
931 954 result.append(found or part)
932 955 dir = os.path.join(dir, part)
933 956
934 957 return ''.join(result)
935 958
936 959 def checknlink(testfile):
937 960 '''check whether hardlink count reporting works properly'''
938 961
939 962 # testfile may be open, so we need a separate file for checking to
940 963 # work around issue2543 (or testfile may get lost on Samba shares)
941 964 f1 = testfile + ".hgtmp1"
942 965 if os.path.lexists(f1):
943 966 return False
944 967 try:
945 968 posixfile(f1, 'w').close()
946 969 except IOError:
947 970 return False
948 971
949 972 f2 = testfile + ".hgtmp2"
950 973 fd = None
951 974 try:
952 975 try:
953 976 oslink(f1, f2)
954 977 except OSError:
955 978 return False
956 979
957 980 # nlinks() may behave differently for files on Windows shares if
958 981 # the file is open.
959 982 fd = posixfile(f2)
960 983 return nlinks(f2) > 1
961 984 finally:
962 985 if fd is not None:
963 986 fd.close()
964 987 for f in (f1, f2):
965 988 try:
966 989 os.unlink(f)
967 990 except OSError:
968 991 pass
969 992
970 993 def endswithsep(path):
971 994 '''Check path ends with os.sep or os.altsep.'''
972 995 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
973 996
974 997 def splitpath(path):
975 998 '''Split path by os.sep.
976 999 Note that this function does not use os.altsep because this is
977 1000 an alternative of simple "xxx.split(os.sep)".
978 1001 It is recommended to use os.path.normpath() before using this
979 1002 function if need.'''
980 1003 return path.split(os.sep)
981 1004
982 1005 def gui():
983 1006 '''Are we running in a GUI?'''
984 1007 if sys.platform == 'darwin':
985 1008 if 'SSH_CONNECTION' in os.environ:
986 1009 # handle SSH access to a box where the user is logged in
987 1010 return False
988 1011 elif getattr(osutil, 'isgui', None):
989 1012 # check if a CoreGraphics session is available
990 1013 return osutil.isgui()
991 1014 else:
992 1015 # pure build; use a safe default
993 1016 return True
994 1017 else:
995 1018 return os.name == "nt" or os.environ.get("DISPLAY")
996 1019
997 1020 def mktempcopy(name, emptyok=False, createmode=None):
998 1021 """Create a temporary file with the same contents from name
999 1022
1000 1023 The permission bits are copied from the original file.
1001 1024
1002 1025 If the temporary file is going to be truncated immediately, you
1003 1026 can use emptyok=True as an optimization.
1004 1027
1005 1028 Returns the name of the temporary file.
1006 1029 """
1007 1030 d, fn = os.path.split(name)
1008 1031 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1009 1032 os.close(fd)
1010 1033 # Temporary files are created with mode 0600, which is usually not
1011 1034 # what we want. If the original file already exists, just copy
1012 1035 # its mode. Otherwise, manually obey umask.
1013 1036 copymode(name, temp, createmode)
1014 1037 if emptyok:
1015 1038 return temp
1016 1039 try:
1017 1040 try:
1018 1041 ifp = posixfile(name, "rb")
1019 1042 except IOError, inst:
1020 1043 if inst.errno == errno.ENOENT:
1021 1044 return temp
1022 1045 if not getattr(inst, 'filename', None):
1023 1046 inst.filename = name
1024 1047 raise
1025 1048 ofp = posixfile(temp, "wb")
1026 1049 for chunk in filechunkiter(ifp):
1027 1050 ofp.write(chunk)
1028 1051 ifp.close()
1029 1052 ofp.close()
1030 1053 except: # re-raises
1031 1054 try: os.unlink(temp)
1032 1055 except OSError: pass
1033 1056 raise
1034 1057 return temp
1035 1058
1036 1059 class atomictempfile(object):
1037 1060 '''writable file object that atomically updates a file
1038 1061
1039 1062 All writes will go to a temporary copy of the original file. Call
1040 1063 close() when you are done writing, and atomictempfile will rename
1041 1064 the temporary copy to the original name, making the changes
1042 1065 visible. If the object is destroyed without being closed, all your
1043 1066 writes are discarded.
1044 1067 '''
1045 1068 def __init__(self, name, mode='w+b', createmode=None):
1046 1069 self.__name = name # permanent name
1047 1070 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1048 1071 createmode=createmode)
1049 1072 self._fp = posixfile(self._tempname, mode)
1050 1073
1051 1074 # delegated methods
1052 1075 self.write = self._fp.write
1053 1076 self.seek = self._fp.seek
1054 1077 self.tell = self._fp.tell
1055 1078 self.fileno = self._fp.fileno
1056 1079
1057 1080 def close(self):
1058 1081 if not self._fp.closed:
1059 1082 self._fp.close()
1060 1083 rename(self._tempname, localpath(self.__name))
1061 1084
1062 1085 def discard(self):
1063 1086 if not self._fp.closed:
1064 1087 try:
1065 1088 os.unlink(self._tempname)
1066 1089 except OSError:
1067 1090 pass
1068 1091 self._fp.close()
1069 1092
1070 1093 def __del__(self):
1071 1094 if safehasattr(self, '_fp'): # constructor actually did something
1072 1095 self.discard()
1073 1096
1074 1097 def makedirs(name, mode=None, notindexed=False):
1075 1098 """recursive directory creation with parent mode inheritance"""
1076 1099 try:
1077 1100 makedir(name, notindexed)
1078 1101 except OSError, err:
1079 1102 if err.errno == errno.EEXIST:
1080 1103 return
1081 1104 if err.errno != errno.ENOENT or not name:
1082 1105 raise
1083 1106 parent = os.path.dirname(os.path.abspath(name))
1084 1107 if parent == name:
1085 1108 raise
1086 1109 makedirs(parent, mode, notindexed)
1087 1110 makedir(name, notindexed)
1088 1111 if mode is not None:
1089 1112 os.chmod(name, mode)
1090 1113
1091 1114 def ensuredirs(name, mode=None, notindexed=False):
1092 1115 """race-safe recursive directory creation
1093 1116
1094 1117 Newly created directories are marked as "not to be indexed by
1095 1118 the content indexing service", if ``notindexed`` is specified
1096 1119 for "write" mode access.
1097 1120 """
1098 1121 if os.path.isdir(name):
1099 1122 return
1100 1123 parent = os.path.dirname(os.path.abspath(name))
1101 1124 if parent != name:
1102 1125 ensuredirs(parent, mode, notindexed)
1103 1126 try:
1104 1127 makedir(name, notindexed)
1105 1128 except OSError, err:
1106 1129 if err.errno == errno.EEXIST and os.path.isdir(name):
1107 1130 # someone else seems to have won a directory creation race
1108 1131 return
1109 1132 raise
1110 1133 if mode is not None:
1111 1134 os.chmod(name, mode)
1112 1135
1113 1136 def readfile(path):
1114 1137 fp = open(path, 'rb')
1115 1138 try:
1116 1139 return fp.read()
1117 1140 finally:
1118 1141 fp.close()
1119 1142
1120 1143 def writefile(path, text):
1121 1144 fp = open(path, 'wb')
1122 1145 try:
1123 1146 fp.write(text)
1124 1147 finally:
1125 1148 fp.close()
1126 1149
1127 1150 def appendfile(path, text):
1128 1151 fp = open(path, 'ab')
1129 1152 try:
1130 1153 fp.write(text)
1131 1154 finally:
1132 1155 fp.close()
1133 1156
1134 1157 class chunkbuffer(object):
1135 1158 """Allow arbitrary sized chunks of data to be efficiently read from an
1136 1159 iterator over chunks of arbitrary size."""
1137 1160
1138 1161 def __init__(self, in_iter):
1139 1162 """in_iter is the iterator that's iterating over the input chunks.
1140 1163 targetsize is how big a buffer to try to maintain."""
1141 1164 def splitbig(chunks):
1142 1165 for chunk in chunks:
1143 1166 if len(chunk) > 2**20:
1144 1167 pos = 0
1145 1168 while pos < len(chunk):
1146 1169 end = pos + 2 ** 18
1147 1170 yield chunk[pos:end]
1148 1171 pos = end
1149 1172 else:
1150 1173 yield chunk
1151 1174 self.iter = splitbig(in_iter)
1152 1175 self._queue = deque()
1153 1176
1154 1177 def read(self, l=None):
1155 1178 """Read L bytes of data from the iterator of chunks of data.
1156 1179 Returns less than L bytes if the iterator runs dry.
1157 1180
1158 1181 If size parameter is omitted, read everything"""
1159 1182 left = l
1160 1183 buf = []
1161 1184 queue = self._queue
1162 1185 while left is None or left > 0:
1163 1186 # refill the queue
1164 1187 if not queue:
1165 1188 target = 2**18
1166 1189 for chunk in self.iter:
1167 1190 queue.append(chunk)
1168 1191 target -= len(chunk)
1169 1192 if target <= 0:
1170 1193 break
1171 1194 if not queue:
1172 1195 break
1173 1196
1174 1197 chunk = queue.popleft()
1175 1198 if left is not None:
1176 1199 left -= len(chunk)
1177 1200 if left is not None and left < 0:
1178 1201 queue.appendleft(chunk[left:])
1179 1202 buf.append(chunk[:left])
1180 1203 else:
1181 1204 buf.append(chunk)
1182 1205
1183 1206 return ''.join(buf)
1184 1207
1185 1208 def filechunkiter(f, size=65536, limit=None):
1186 1209 """Create a generator that produces the data in the file size
1187 1210 (default 65536) bytes at a time, up to optional limit (default is
1188 1211 to read all data). Chunks may be less than size bytes if the
1189 1212 chunk is the last chunk in the file, or the file is a socket or
1190 1213 some other type of file that sometimes reads less data than is
1191 1214 requested."""
1192 1215 assert size >= 0
1193 1216 assert limit is None or limit >= 0
1194 1217 while True:
1195 1218 if limit is None:
1196 1219 nbytes = size
1197 1220 else:
1198 1221 nbytes = min(limit, size)
1199 1222 s = nbytes and f.read(nbytes)
1200 1223 if not s:
1201 1224 break
1202 1225 if limit:
1203 1226 limit -= len(s)
1204 1227 yield s
1205 1228
1206 1229 def makedate(timestamp=None):
1207 1230 '''Return a unix timestamp (or the current time) as a (unixtime,
1208 1231 offset) tuple based off the local timezone.'''
1209 1232 if timestamp is None:
1210 1233 timestamp = time.time()
1211 1234 if timestamp < 0:
1212 1235 hint = _("check your clock")
1213 1236 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1214 1237 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1215 1238 datetime.datetime.fromtimestamp(timestamp))
1216 1239 tz = delta.days * 86400 + delta.seconds
1217 1240 return timestamp, tz
1218 1241
1219 1242 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1220 1243 """represent a (unixtime, offset) tuple as a localized time.
1221 1244 unixtime is seconds since the epoch, and offset is the time zone's
1222 1245 number of seconds away from UTC. if timezone is false, do not
1223 1246 append time zone to string."""
1224 1247 t, tz = date or makedate()
1225 1248 if t < 0:
1226 1249 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1227 1250 tz = 0
1228 1251 if "%1" in format or "%2" in format or "%z" in format:
1229 1252 sign = (tz > 0) and "-" or "+"
1230 1253 minutes = abs(tz) // 60
1231 1254 format = format.replace("%z", "%1%2")
1232 1255 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1233 1256 format = format.replace("%2", "%02d" % (minutes % 60))
1234 1257 try:
1235 1258 t = time.gmtime(float(t) - tz)
1236 1259 except ValueError:
1237 1260 # time was out of range
1238 1261 t = time.gmtime(sys.maxint)
1239 1262 s = time.strftime(format, t)
1240 1263 return s
1241 1264
1242 1265 def shortdate(date=None):
1243 1266 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1244 1267 return datestr(date, format='%Y-%m-%d')
1245 1268
1246 1269 def strdate(string, format, defaults=[]):
1247 1270 """parse a localized time string and return a (unixtime, offset) tuple.
1248 1271 if the string cannot be parsed, ValueError is raised."""
1249 1272 def timezone(string):
1250 1273 tz = string.split()[-1]
1251 1274 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1252 1275 sign = (tz[0] == "+") and 1 or -1
1253 1276 hours = int(tz[1:3])
1254 1277 minutes = int(tz[3:5])
1255 1278 return -sign * (hours * 60 + minutes) * 60
1256 1279 if tz == "GMT" or tz == "UTC":
1257 1280 return 0
1258 1281 return None
1259 1282
1260 1283 # NOTE: unixtime = localunixtime + offset
1261 1284 offset, date = timezone(string), string
1262 1285 if offset is not None:
1263 1286 date = " ".join(string.split()[:-1])
1264 1287
1265 1288 # add missing elements from defaults
1266 1289 usenow = False # default to using biased defaults
1267 1290 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1268 1291 found = [True for p in part if ("%"+p) in format]
1269 1292 if not found:
1270 1293 date += "@" + defaults[part][usenow]
1271 1294 format += "@%" + part[0]
1272 1295 else:
1273 1296 # We've found a specific time element, less specific time
1274 1297 # elements are relative to today
1275 1298 usenow = True
1276 1299
1277 1300 timetuple = time.strptime(date, format)
1278 1301 localunixtime = int(calendar.timegm(timetuple))
1279 1302 if offset is None:
1280 1303 # local timezone
1281 1304 unixtime = int(time.mktime(timetuple))
1282 1305 offset = unixtime - localunixtime
1283 1306 else:
1284 1307 unixtime = localunixtime + offset
1285 1308 return unixtime, offset
1286 1309
1287 1310 def parsedate(date, formats=None, bias={}):
1288 1311 """parse a localized date/time and return a (unixtime, offset) tuple.
1289 1312
1290 1313 The date may be a "unixtime offset" string or in one of the specified
1291 1314 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1292 1315
1293 1316 >>> parsedate(' today ') == parsedate(\
1294 1317 datetime.date.today().strftime('%b %d'))
1295 1318 True
1296 1319 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1297 1320 datetime.timedelta(days=1)\
1298 1321 ).strftime('%b %d'))
1299 1322 True
1300 1323 >>> now, tz = makedate()
1301 1324 >>> strnow, strtz = parsedate('now')
1302 1325 >>> (strnow - now) < 1
1303 1326 True
1304 1327 >>> tz == strtz
1305 1328 True
1306 1329 """
1307 1330 if not date:
1308 1331 return 0, 0
1309 1332 if isinstance(date, tuple) and len(date) == 2:
1310 1333 return date
1311 1334 if not formats:
1312 1335 formats = defaultdateformats
1313 1336 date = date.strip()
1314 1337
1315 1338 if date == _('now'):
1316 1339 return makedate()
1317 1340 if date == _('today'):
1318 1341 date = datetime.date.today().strftime('%b %d')
1319 1342 elif date == _('yesterday'):
1320 1343 date = (datetime.date.today() -
1321 1344 datetime.timedelta(days=1)).strftime('%b %d')
1322 1345
1323 1346 try:
1324 1347 when, offset = map(int, date.split(' '))
1325 1348 except ValueError:
1326 1349 # fill out defaults
1327 1350 now = makedate()
1328 1351 defaults = {}
1329 1352 for part in ("d", "mb", "yY", "HI", "M", "S"):
1330 1353 # this piece is for rounding the specific end of unknowns
1331 1354 b = bias.get(part)
1332 1355 if b is None:
1333 1356 if part[0] in "HMS":
1334 1357 b = "00"
1335 1358 else:
1336 1359 b = "0"
1337 1360
1338 1361 # this piece is for matching the generic end to today's date
1339 1362 n = datestr(now, "%" + part[0])
1340 1363
1341 1364 defaults[part] = (b, n)
1342 1365
1343 1366 for format in formats:
1344 1367 try:
1345 1368 when, offset = strdate(date, format, defaults)
1346 1369 except (ValueError, OverflowError):
1347 1370 pass
1348 1371 else:
1349 1372 break
1350 1373 else:
1351 1374 raise Abort(_('invalid date: %r') % date)
1352 1375 # validate explicit (probably user-specified) date and
1353 1376 # time zone offset. values must fit in signed 32 bits for
1354 1377 # current 32-bit linux runtimes. timezones go from UTC-12
1355 1378 # to UTC+14
1356 1379 if abs(when) > 0x7fffffff:
1357 1380 raise Abort(_('date exceeds 32 bits: %d') % when)
1358 1381 if when < 0:
1359 1382 raise Abort(_('negative date value: %d') % when)
1360 1383 if offset < -50400 or offset > 43200:
1361 1384 raise Abort(_('impossible time zone offset: %d') % offset)
1362 1385 return when, offset
1363 1386
1364 1387 def matchdate(date):
1365 1388 """Return a function that matches a given date match specifier
1366 1389
1367 1390 Formats include:
1368 1391
1369 1392 '{date}' match a given date to the accuracy provided
1370 1393
1371 1394 '<{date}' on or before a given date
1372 1395
1373 1396 '>{date}' on or after a given date
1374 1397
1375 1398 >>> p1 = parsedate("10:29:59")
1376 1399 >>> p2 = parsedate("10:30:00")
1377 1400 >>> p3 = parsedate("10:30:59")
1378 1401 >>> p4 = parsedate("10:31:00")
1379 1402 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1380 1403 >>> f = matchdate("10:30")
1381 1404 >>> f(p1[0])
1382 1405 False
1383 1406 >>> f(p2[0])
1384 1407 True
1385 1408 >>> f(p3[0])
1386 1409 True
1387 1410 >>> f(p4[0])
1388 1411 False
1389 1412 >>> f(p5[0])
1390 1413 False
1391 1414 """
1392 1415
1393 1416 def lower(date):
1394 1417 d = {'mb': "1", 'd': "1"}
1395 1418 return parsedate(date, extendeddateformats, d)[0]
1396 1419
1397 1420 def upper(date):
1398 1421 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1399 1422 for days in ("31", "30", "29"):
1400 1423 try:
1401 1424 d["d"] = days
1402 1425 return parsedate(date, extendeddateformats, d)[0]
1403 1426 except Abort:
1404 1427 pass
1405 1428 d["d"] = "28"
1406 1429 return parsedate(date, extendeddateformats, d)[0]
1407 1430
1408 1431 date = date.strip()
1409 1432
1410 1433 if not date:
1411 1434 raise Abort(_("dates cannot consist entirely of whitespace"))
1412 1435 elif date[0] == "<":
1413 1436 if not date[1:]:
1414 1437 raise Abort(_("invalid day spec, use '<DATE'"))
1415 1438 when = upper(date[1:])
1416 1439 return lambda x: x <= when
1417 1440 elif date[0] == ">":
1418 1441 if not date[1:]:
1419 1442 raise Abort(_("invalid day spec, use '>DATE'"))
1420 1443 when = lower(date[1:])
1421 1444 return lambda x: x >= when
1422 1445 elif date[0] == "-":
1423 1446 try:
1424 1447 days = int(date[1:])
1425 1448 except ValueError:
1426 1449 raise Abort(_("invalid day spec: %s") % date[1:])
1427 1450 if days < 0:
1428 1451 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1429 1452 % date[1:])
1430 1453 when = makedate()[0] - days * 3600 * 24
1431 1454 return lambda x: x >= when
1432 1455 elif " to " in date:
1433 1456 a, b = date.split(" to ")
1434 1457 start, stop = lower(a), upper(b)
1435 1458 return lambda x: x >= start and x <= stop
1436 1459 else:
1437 1460 start, stop = lower(date), upper(date)
1438 1461 return lambda x: x >= start and x <= stop
1439 1462
1440 1463 def shortuser(user):
1441 1464 """Return a short representation of a user name or email address."""
1442 1465 f = user.find('@')
1443 1466 if f >= 0:
1444 1467 user = user[:f]
1445 1468 f = user.find('<')
1446 1469 if f >= 0:
1447 1470 user = user[f + 1:]
1448 1471 f = user.find(' ')
1449 1472 if f >= 0:
1450 1473 user = user[:f]
1451 1474 f = user.find('.')
1452 1475 if f >= 0:
1453 1476 user = user[:f]
1454 1477 return user
1455 1478
1456 1479 def emailuser(user):
1457 1480 """Return the user portion of an email address."""
1458 1481 f = user.find('@')
1459 1482 if f >= 0:
1460 1483 user = user[:f]
1461 1484 f = user.find('<')
1462 1485 if f >= 0:
1463 1486 user = user[f + 1:]
1464 1487 return user
1465 1488
1466 1489 def email(author):
1467 1490 '''get email of author.'''
1468 1491 r = author.find('>')
1469 1492 if r == -1:
1470 1493 r = None
1471 1494 return author[author.find('<') + 1:r]
1472 1495
1473 1496 def ellipsis(text, maxlength=400):
1474 1497 """Trim string to at most maxlength (default: 400) columns in display."""
1475 1498 return encoding.trim(text, maxlength, ellipsis='...')
1476 1499
1477 1500 def unitcountfn(*unittable):
1478 1501 '''return a function that renders a readable count of some quantity'''
1479 1502
1480 1503 def go(count):
1481 1504 for multiplier, divisor, format in unittable:
1482 1505 if count >= divisor * multiplier:
1483 1506 return format % (count / float(divisor))
1484 1507 return unittable[-1][2] % count
1485 1508
1486 1509 return go
1487 1510
1488 1511 bytecount = unitcountfn(
1489 1512 (100, 1 << 30, _('%.0f GB')),
1490 1513 (10, 1 << 30, _('%.1f GB')),
1491 1514 (1, 1 << 30, _('%.2f GB')),
1492 1515 (100, 1 << 20, _('%.0f MB')),
1493 1516 (10, 1 << 20, _('%.1f MB')),
1494 1517 (1, 1 << 20, _('%.2f MB')),
1495 1518 (100, 1 << 10, _('%.0f KB')),
1496 1519 (10, 1 << 10, _('%.1f KB')),
1497 1520 (1, 1 << 10, _('%.2f KB')),
1498 1521 (1, 1, _('%.0f bytes')),
1499 1522 )
1500 1523
1501 1524 def uirepr(s):
1502 1525 # Avoid double backslash in Windows path repr()
1503 1526 return repr(s).replace('\\\\', '\\')
1504 1527
1505 1528 # delay import of textwrap
1506 1529 def MBTextWrapper(**kwargs):
1507 1530 class tw(textwrap.TextWrapper):
1508 1531 """
1509 1532 Extend TextWrapper for width-awareness.
1510 1533
1511 1534 Neither number of 'bytes' in any encoding nor 'characters' is
1512 1535 appropriate to calculate terminal columns for specified string.
1513 1536
1514 1537 Original TextWrapper implementation uses built-in 'len()' directly,
1515 1538 so overriding is needed to use width information of each characters.
1516 1539
1517 1540 In addition, characters classified into 'ambiguous' width are
1518 1541 treated as wide in East Asian area, but as narrow in other.
1519 1542
1520 1543 This requires use decision to determine width of such characters.
1521 1544 """
1522 1545 def __init__(self, **kwargs):
1523 1546 textwrap.TextWrapper.__init__(self, **kwargs)
1524 1547
1525 1548 # for compatibility between 2.4 and 2.6
1526 1549 if getattr(self, 'drop_whitespace', None) is None:
1527 1550 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1528 1551
1529 1552 def _cutdown(self, ucstr, space_left):
1530 1553 l = 0
1531 1554 colwidth = encoding.ucolwidth
1532 1555 for i in xrange(len(ucstr)):
1533 1556 l += colwidth(ucstr[i])
1534 1557 if space_left < l:
1535 1558 return (ucstr[:i], ucstr[i:])
1536 1559 return ucstr, ''
1537 1560
1538 1561 # overriding of base class
1539 1562 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1540 1563 space_left = max(width - cur_len, 1)
1541 1564
1542 1565 if self.break_long_words:
1543 1566 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1544 1567 cur_line.append(cut)
1545 1568 reversed_chunks[-1] = res
1546 1569 elif not cur_line:
1547 1570 cur_line.append(reversed_chunks.pop())
1548 1571
1549 1572 # this overriding code is imported from TextWrapper of python 2.6
1550 1573 # to calculate columns of string by 'encoding.ucolwidth()'
1551 1574 def _wrap_chunks(self, chunks):
1552 1575 colwidth = encoding.ucolwidth
1553 1576
1554 1577 lines = []
1555 1578 if self.width <= 0:
1556 1579 raise ValueError("invalid width %r (must be > 0)" % self.width)
1557 1580
1558 1581 # Arrange in reverse order so items can be efficiently popped
1559 1582 # from a stack of chucks.
1560 1583 chunks.reverse()
1561 1584
1562 1585 while chunks:
1563 1586
1564 1587 # Start the list of chunks that will make up the current line.
1565 1588 # cur_len is just the length of all the chunks in cur_line.
1566 1589 cur_line = []
1567 1590 cur_len = 0
1568 1591
1569 1592 # Figure out which static string will prefix this line.
1570 1593 if lines:
1571 1594 indent = self.subsequent_indent
1572 1595 else:
1573 1596 indent = self.initial_indent
1574 1597
1575 1598 # Maximum width for this line.
1576 1599 width = self.width - len(indent)
1577 1600
1578 1601 # First chunk on line is whitespace -- drop it, unless this
1579 1602 # is the very beginning of the text (i.e. no lines started yet).
1580 1603 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1581 1604 del chunks[-1]
1582 1605
1583 1606 while chunks:
1584 1607 l = colwidth(chunks[-1])
1585 1608
1586 1609 # Can at least squeeze this chunk onto the current line.
1587 1610 if cur_len + l <= width:
1588 1611 cur_line.append(chunks.pop())
1589 1612 cur_len += l
1590 1613
1591 1614 # Nope, this line is full.
1592 1615 else:
1593 1616 break
1594 1617
1595 1618 # The current line is full, and the next chunk is too big to
1596 1619 # fit on *any* line (not just this one).
1597 1620 if chunks and colwidth(chunks[-1]) > width:
1598 1621 self._handle_long_word(chunks, cur_line, cur_len, width)
1599 1622
1600 1623 # If the last chunk on this line is all whitespace, drop it.
1601 1624 if (self.drop_whitespace and
1602 1625 cur_line and cur_line[-1].strip() == ''):
1603 1626 del cur_line[-1]
1604 1627
1605 1628 # Convert current line back to a string and store it in list
1606 1629 # of all lines (return value).
1607 1630 if cur_line:
1608 1631 lines.append(indent + ''.join(cur_line))
1609 1632
1610 1633 return lines
1611 1634
1612 1635 global MBTextWrapper
1613 1636 MBTextWrapper = tw
1614 1637 return tw(**kwargs)
1615 1638
1616 1639 def wrap(line, width, initindent='', hangindent=''):
1617 1640 maxindent = max(len(hangindent), len(initindent))
1618 1641 if width <= maxindent:
1619 1642 # adjust for weird terminal size
1620 1643 width = max(78, maxindent + 1)
1621 1644 line = line.decode(encoding.encoding, encoding.encodingmode)
1622 1645 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1623 1646 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1624 1647 wrapper = MBTextWrapper(width=width,
1625 1648 initial_indent=initindent,
1626 1649 subsequent_indent=hangindent)
1627 1650 return wrapper.fill(line).encode(encoding.encoding)
1628 1651
1629 1652 def iterlines(iterator):
1630 1653 for chunk in iterator:
1631 1654 for line in chunk.splitlines():
1632 1655 yield line
1633 1656
1634 1657 def expandpath(path):
1635 1658 return os.path.expanduser(os.path.expandvars(path))
1636 1659
1637 1660 def hgcmd():
1638 1661 """Return the command used to execute current hg
1639 1662
1640 1663 This is different from hgexecutable() because on Windows we want
1641 1664 to avoid things opening new shell windows like batch files, so we
1642 1665 get either the python call or current executable.
1643 1666 """
1644 1667 if mainfrozen():
1645 1668 return [sys.executable]
1646 1669 return gethgcmd()
1647 1670
1648 1671 def rundetached(args, condfn):
1649 1672 """Execute the argument list in a detached process.
1650 1673
1651 1674 condfn is a callable which is called repeatedly and should return
1652 1675 True once the child process is known to have started successfully.
1653 1676 At this point, the child process PID is returned. If the child
1654 1677 process fails to start or finishes before condfn() evaluates to
1655 1678 True, return -1.
1656 1679 """
1657 1680 # Windows case is easier because the child process is either
1658 1681 # successfully starting and validating the condition or exiting
1659 1682 # on failure. We just poll on its PID. On Unix, if the child
1660 1683 # process fails to start, it will be left in a zombie state until
1661 1684 # the parent wait on it, which we cannot do since we expect a long
1662 1685 # running process on success. Instead we listen for SIGCHLD telling
1663 1686 # us our child process terminated.
1664 1687 terminated = set()
1665 1688 def handler(signum, frame):
1666 1689 terminated.add(os.wait())
1667 1690 prevhandler = None
1668 1691 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1669 1692 if SIGCHLD is not None:
1670 1693 prevhandler = signal.signal(SIGCHLD, handler)
1671 1694 try:
1672 1695 pid = spawndetached(args)
1673 1696 while not condfn():
1674 1697 if ((pid in terminated or not testpid(pid))
1675 1698 and not condfn()):
1676 1699 return -1
1677 1700 time.sleep(0.1)
1678 1701 return pid
1679 1702 finally:
1680 1703 if prevhandler is not None:
1681 1704 signal.signal(signal.SIGCHLD, prevhandler)
1682 1705
1683 1706 try:
1684 1707 any, all = any, all
1685 1708 except NameError:
1686 1709 def any(iterable):
1687 1710 for i in iterable:
1688 1711 if i:
1689 1712 return True
1690 1713 return False
1691 1714
1692 1715 def all(iterable):
1693 1716 for i in iterable:
1694 1717 if not i:
1695 1718 return False
1696 1719 return True
1697 1720
1698 1721 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1699 1722 """Return the result of interpolating items in the mapping into string s.
1700 1723
1701 1724 prefix is a single character string, or a two character string with
1702 1725 a backslash as the first character if the prefix needs to be escaped in
1703 1726 a regular expression.
1704 1727
1705 1728 fn is an optional function that will be applied to the replacement text
1706 1729 just before replacement.
1707 1730
1708 1731 escape_prefix is an optional flag that allows using doubled prefix for
1709 1732 its escaping.
1710 1733 """
1711 1734 fn = fn or (lambda s: s)
1712 1735 patterns = '|'.join(mapping.keys())
1713 1736 if escape_prefix:
1714 1737 patterns += '|' + prefix
1715 1738 if len(prefix) > 1:
1716 1739 prefix_char = prefix[1:]
1717 1740 else:
1718 1741 prefix_char = prefix
1719 1742 mapping[prefix_char] = prefix_char
1720 1743 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1721 1744 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1722 1745
1723 1746 def getport(port):
1724 1747 """Return the port for a given network service.
1725 1748
1726 1749 If port is an integer, it's returned as is. If it's a string, it's
1727 1750 looked up using socket.getservbyname(). If there's no matching
1728 1751 service, util.Abort is raised.
1729 1752 """
1730 1753 try:
1731 1754 return int(port)
1732 1755 except ValueError:
1733 1756 pass
1734 1757
1735 1758 try:
1736 1759 return socket.getservbyname(port)
1737 1760 except socket.error:
1738 1761 raise Abort(_("no port number associated with service '%s'") % port)
1739 1762
1740 1763 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1741 1764 '0': False, 'no': False, 'false': False, 'off': False,
1742 1765 'never': False}
1743 1766
1744 1767 def parsebool(s):
1745 1768 """Parse s into a boolean.
1746 1769
1747 1770 If s is not a valid boolean, returns None.
1748 1771 """
1749 1772 return _booleans.get(s.lower(), None)
1750 1773
1751 1774 _hexdig = '0123456789ABCDEFabcdef'
1752 1775 _hextochr = dict((a + b, chr(int(a + b, 16)))
1753 1776 for a in _hexdig for b in _hexdig)
1754 1777
1755 1778 def _urlunquote(s):
1756 1779 """Decode HTTP/HTML % encoding.
1757 1780
1758 1781 >>> _urlunquote('abc%20def')
1759 1782 'abc def'
1760 1783 """
1761 1784 res = s.split('%')
1762 1785 # fastpath
1763 1786 if len(res) == 1:
1764 1787 return s
1765 1788 s = res[0]
1766 1789 for item in res[1:]:
1767 1790 try:
1768 1791 s += _hextochr[item[:2]] + item[2:]
1769 1792 except KeyError:
1770 1793 s += '%' + item
1771 1794 except UnicodeDecodeError:
1772 1795 s += unichr(int(item[:2], 16)) + item[2:]
1773 1796 return s
1774 1797
1775 1798 class url(object):
1776 1799 r"""Reliable URL parser.
1777 1800
1778 1801 This parses URLs and provides attributes for the following
1779 1802 components:
1780 1803
1781 1804 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1782 1805
1783 1806 Missing components are set to None. The only exception is
1784 1807 fragment, which is set to '' if present but empty.
1785 1808
1786 1809 If parsefragment is False, fragment is included in query. If
1787 1810 parsequery is False, query is included in path. If both are
1788 1811 False, both fragment and query are included in path.
1789 1812
1790 1813 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1791 1814
1792 1815 Note that for backward compatibility reasons, bundle URLs do not
1793 1816 take host names. That means 'bundle://../' has a path of '../'.
1794 1817
1795 1818 Examples:
1796 1819
1797 1820 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1798 1821 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1799 1822 >>> url('ssh://[::1]:2200//home/joe/repo')
1800 1823 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1801 1824 >>> url('file:///home/joe/repo')
1802 1825 <url scheme: 'file', path: '/home/joe/repo'>
1803 1826 >>> url('file:///c:/temp/foo/')
1804 1827 <url scheme: 'file', path: 'c:/temp/foo/'>
1805 1828 >>> url('bundle:foo')
1806 1829 <url scheme: 'bundle', path: 'foo'>
1807 1830 >>> url('bundle://../foo')
1808 1831 <url scheme: 'bundle', path: '../foo'>
1809 1832 >>> url(r'c:\foo\bar')
1810 1833 <url path: 'c:\\foo\\bar'>
1811 1834 >>> url(r'\\blah\blah\blah')
1812 1835 <url path: '\\\\blah\\blah\\blah'>
1813 1836 >>> url(r'\\blah\blah\blah#baz')
1814 1837 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1815 1838 >>> url(r'file:///C:\users\me')
1816 1839 <url scheme: 'file', path: 'C:\\users\\me'>
1817 1840
1818 1841 Authentication credentials:
1819 1842
1820 1843 >>> url('ssh://joe:xyz@x/repo')
1821 1844 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1822 1845 >>> url('ssh://joe@x/repo')
1823 1846 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1824 1847
1825 1848 Query strings and fragments:
1826 1849
1827 1850 >>> url('http://host/a?b#c')
1828 1851 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1829 1852 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1830 1853 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1831 1854 """
1832 1855
1833 1856 _safechars = "!~*'()+"
1834 1857 _safepchars = "/!~*'()+:\\"
1835 1858 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1836 1859
1837 1860 def __init__(self, path, parsequery=True, parsefragment=True):
1838 1861 # We slowly chomp away at path until we have only the path left
1839 1862 self.scheme = self.user = self.passwd = self.host = None
1840 1863 self.port = self.path = self.query = self.fragment = None
1841 1864 self._localpath = True
1842 1865 self._hostport = ''
1843 1866 self._origpath = path
1844 1867
1845 1868 if parsefragment and '#' in path:
1846 1869 path, self.fragment = path.split('#', 1)
1847 1870 if not path:
1848 1871 path = None
1849 1872
1850 1873 # special case for Windows drive letters and UNC paths
1851 1874 if hasdriveletter(path) or path.startswith(r'\\'):
1852 1875 self.path = path
1853 1876 return
1854 1877
1855 1878 # For compatibility reasons, we can't handle bundle paths as
1856 1879 # normal URLS
1857 1880 if path.startswith('bundle:'):
1858 1881 self.scheme = 'bundle'
1859 1882 path = path[7:]
1860 1883 if path.startswith('//'):
1861 1884 path = path[2:]
1862 1885 self.path = path
1863 1886 return
1864 1887
1865 1888 if self._matchscheme(path):
1866 1889 parts = path.split(':', 1)
1867 1890 if parts[0]:
1868 1891 self.scheme, path = parts
1869 1892 self._localpath = False
1870 1893
1871 1894 if not path:
1872 1895 path = None
1873 1896 if self._localpath:
1874 1897 self.path = ''
1875 1898 return
1876 1899 else:
1877 1900 if self._localpath:
1878 1901 self.path = path
1879 1902 return
1880 1903
1881 1904 if parsequery and '?' in path:
1882 1905 path, self.query = path.split('?', 1)
1883 1906 if not path:
1884 1907 path = None
1885 1908 if not self.query:
1886 1909 self.query = None
1887 1910
1888 1911 # // is required to specify a host/authority
1889 1912 if path and path.startswith('//'):
1890 1913 parts = path[2:].split('/', 1)
1891 1914 if len(parts) > 1:
1892 1915 self.host, path = parts
1893 1916 else:
1894 1917 self.host = parts[0]
1895 1918 path = None
1896 1919 if not self.host:
1897 1920 self.host = None
1898 1921 # path of file:///d is /d
1899 1922 # path of file:///d:/ is d:/, not /d:/
1900 1923 if path and not hasdriveletter(path):
1901 1924 path = '/' + path
1902 1925
1903 1926 if self.host and '@' in self.host:
1904 1927 self.user, self.host = self.host.rsplit('@', 1)
1905 1928 if ':' in self.user:
1906 1929 self.user, self.passwd = self.user.split(':', 1)
1907 1930 if not self.host:
1908 1931 self.host = None
1909 1932
1910 1933 # Don't split on colons in IPv6 addresses without ports
1911 1934 if (self.host and ':' in self.host and
1912 1935 not (self.host.startswith('[') and self.host.endswith(']'))):
1913 1936 self._hostport = self.host
1914 1937 self.host, self.port = self.host.rsplit(':', 1)
1915 1938 if not self.host:
1916 1939 self.host = None
1917 1940
1918 1941 if (self.host and self.scheme == 'file' and
1919 1942 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1920 1943 raise Abort(_('file:// URLs can only refer to localhost'))
1921 1944
1922 1945 self.path = path
1923 1946
1924 1947 # leave the query string escaped
1925 1948 for a in ('user', 'passwd', 'host', 'port',
1926 1949 'path', 'fragment'):
1927 1950 v = getattr(self, a)
1928 1951 if v is not None:
1929 1952 setattr(self, a, _urlunquote(v))
1930 1953
1931 1954 def __repr__(self):
1932 1955 attrs = []
1933 1956 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1934 1957 'query', 'fragment'):
1935 1958 v = getattr(self, a)
1936 1959 if v is not None:
1937 1960 attrs.append('%s: %r' % (a, v))
1938 1961 return '<url %s>' % ', '.join(attrs)
1939 1962
1940 1963 def __str__(self):
1941 1964 r"""Join the URL's components back into a URL string.
1942 1965
1943 1966 Examples:
1944 1967
1945 1968 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1946 1969 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1947 1970 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1948 1971 'http://user:pw@host:80/?foo=bar&baz=42'
1949 1972 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1950 1973 'http://user:pw@host:80/?foo=bar%3dbaz'
1951 1974 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1952 1975 'ssh://user:pw@[::1]:2200//home/joe#'
1953 1976 >>> str(url('http://localhost:80//'))
1954 1977 'http://localhost:80//'
1955 1978 >>> str(url('http://localhost:80/'))
1956 1979 'http://localhost:80/'
1957 1980 >>> str(url('http://localhost:80'))
1958 1981 'http://localhost:80/'
1959 1982 >>> str(url('bundle:foo'))
1960 1983 'bundle:foo'
1961 1984 >>> str(url('bundle://../foo'))
1962 1985 'bundle:../foo'
1963 1986 >>> str(url('path'))
1964 1987 'path'
1965 1988 >>> str(url('file:///tmp/foo/bar'))
1966 1989 'file:///tmp/foo/bar'
1967 1990 >>> str(url('file:///c:/tmp/foo/bar'))
1968 1991 'file:///c:/tmp/foo/bar'
1969 1992 >>> print url(r'bundle:foo\bar')
1970 1993 bundle:foo\bar
1971 1994 >>> print url(r'file:///D:\data\hg')
1972 1995 file:///D:\data\hg
1973 1996 """
1974 1997 if self._localpath:
1975 1998 s = self.path
1976 1999 if self.scheme == 'bundle':
1977 2000 s = 'bundle:' + s
1978 2001 if self.fragment:
1979 2002 s += '#' + self.fragment
1980 2003 return s
1981 2004
1982 2005 s = self.scheme + ':'
1983 2006 if self.user or self.passwd or self.host:
1984 2007 s += '//'
1985 2008 elif self.scheme and (not self.path or self.path.startswith('/')
1986 2009 or hasdriveletter(self.path)):
1987 2010 s += '//'
1988 2011 if hasdriveletter(self.path):
1989 2012 s += '/'
1990 2013 if self.user:
1991 2014 s += urllib.quote(self.user, safe=self._safechars)
1992 2015 if self.passwd:
1993 2016 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1994 2017 if self.user or self.passwd:
1995 2018 s += '@'
1996 2019 if self.host:
1997 2020 if not (self.host.startswith('[') and self.host.endswith(']')):
1998 2021 s += urllib.quote(self.host)
1999 2022 else:
2000 2023 s += self.host
2001 2024 if self.port:
2002 2025 s += ':' + urllib.quote(self.port)
2003 2026 if self.host:
2004 2027 s += '/'
2005 2028 if self.path:
2006 2029 # TODO: similar to the query string, we should not unescape the
2007 2030 # path when we store it, the path might contain '%2f' = '/',
2008 2031 # which we should *not* escape.
2009 2032 s += urllib.quote(self.path, safe=self._safepchars)
2010 2033 if self.query:
2011 2034 # we store the query in escaped form.
2012 2035 s += '?' + self.query
2013 2036 if self.fragment is not None:
2014 2037 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2015 2038 return s
2016 2039
2017 2040 def authinfo(self):
2018 2041 user, passwd = self.user, self.passwd
2019 2042 try:
2020 2043 self.user, self.passwd = None, None
2021 2044 s = str(self)
2022 2045 finally:
2023 2046 self.user, self.passwd = user, passwd
2024 2047 if not self.user:
2025 2048 return (s, None)
2026 2049 # authinfo[1] is passed to urllib2 password manager, and its
2027 2050 # URIs must not contain credentials. The host is passed in the
2028 2051 # URIs list because Python < 2.4.3 uses only that to search for
2029 2052 # a password.
2030 2053 return (s, (None, (s, self.host),
2031 2054 self.user, self.passwd or ''))
2032 2055
2033 2056 def isabs(self):
2034 2057 if self.scheme and self.scheme != 'file':
2035 2058 return True # remote URL
2036 2059 if hasdriveletter(self.path):
2037 2060 return True # absolute for our purposes - can't be joined()
2038 2061 if self.path.startswith(r'\\'):
2039 2062 return True # Windows UNC path
2040 2063 if self.path.startswith('/'):
2041 2064 return True # POSIX-style
2042 2065 return False
2043 2066
2044 2067 def localpath(self):
2045 2068 if self.scheme == 'file' or self.scheme == 'bundle':
2046 2069 path = self.path or '/'
2047 2070 # For Windows, we need to promote hosts containing drive
2048 2071 # letters to paths with drive letters.
2049 2072 if hasdriveletter(self._hostport):
2050 2073 path = self._hostport + '/' + self.path
2051 2074 elif (self.host is not None and self.path
2052 2075 and not hasdriveletter(path)):
2053 2076 path = '/' + path
2054 2077 return path
2055 2078 return self._origpath
2056 2079
2057 2080 def islocal(self):
2058 2081 '''whether localpath will return something that posixfile can open'''
2059 2082 return (not self.scheme or self.scheme == 'file'
2060 2083 or self.scheme == 'bundle')
2061 2084
2062 2085 def hasscheme(path):
2063 2086 return bool(url(path).scheme)
2064 2087
2065 2088 def hasdriveletter(path):
2066 2089 return path and path[1:2] == ':' and path[0:1].isalpha()
2067 2090
2068 2091 def urllocalpath(path):
2069 2092 return url(path, parsequery=False, parsefragment=False).localpath()
2070 2093
2071 2094 def hidepassword(u):
2072 2095 '''hide user credential in a url string'''
2073 2096 u = url(u)
2074 2097 if u.passwd:
2075 2098 u.passwd = '***'
2076 2099 return str(u)
2077 2100
2078 2101 def removeauth(u):
2079 2102 '''remove all authentication information from a url string'''
2080 2103 u = url(u)
2081 2104 u.user = u.passwd = None
2082 2105 return str(u)
2083 2106
2084 2107 def isatty(fd):
2085 2108 try:
2086 2109 return fd.isatty()
2087 2110 except AttributeError:
2088 2111 return False
2089 2112
2090 2113 timecount = unitcountfn(
2091 2114 (1, 1e3, _('%.0f s')),
2092 2115 (100, 1, _('%.1f s')),
2093 2116 (10, 1, _('%.2f s')),
2094 2117 (1, 1, _('%.3f s')),
2095 2118 (100, 0.001, _('%.1f ms')),
2096 2119 (10, 0.001, _('%.2f ms')),
2097 2120 (1, 0.001, _('%.3f ms')),
2098 2121 (100, 0.000001, _('%.1f us')),
2099 2122 (10, 0.000001, _('%.2f us')),
2100 2123 (1, 0.000001, _('%.3f us')),
2101 2124 (100, 0.000000001, _('%.1f ns')),
2102 2125 (10, 0.000000001, _('%.2f ns')),
2103 2126 (1, 0.000000001, _('%.3f ns')),
2104 2127 )
2105 2128
2106 2129 _timenesting = [0]
2107 2130
2108 2131 def timed(func):
2109 2132 '''Report the execution time of a function call to stderr.
2110 2133
2111 2134 During development, use as a decorator when you need to measure
2112 2135 the cost of a function, e.g. as follows:
2113 2136
2114 2137 @util.timed
2115 2138 def foo(a, b, c):
2116 2139 pass
2117 2140 '''
2118 2141
2119 2142 def wrapper(*args, **kwargs):
2120 2143 start = time.time()
2121 2144 indent = 2
2122 2145 _timenesting[0] += indent
2123 2146 try:
2124 2147 return func(*args, **kwargs)
2125 2148 finally:
2126 2149 elapsed = time.time() - start
2127 2150 _timenesting[0] -= indent
2128 2151 sys.stderr.write('%s%s: %s\n' %
2129 2152 (' ' * _timenesting[0], func.__name__,
2130 2153 timecount(elapsed)))
2131 2154 return wrapper
2132 2155
2133 2156 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2134 2157 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2135 2158
2136 2159 def sizetoint(s):
2137 2160 '''Convert a space specifier to a byte count.
2138 2161
2139 2162 >>> sizetoint('30')
2140 2163 30
2141 2164 >>> sizetoint('2.2kb')
2142 2165 2252
2143 2166 >>> sizetoint('6M')
2144 2167 6291456
2145 2168 '''
2146 2169 t = s.strip().lower()
2147 2170 try:
2148 2171 for k, u in _sizeunits:
2149 2172 if t.endswith(k):
2150 2173 return int(float(t[:-len(k)]) * u)
2151 2174 return int(t)
2152 2175 except ValueError:
2153 2176 raise error.ParseError(_("couldn't parse size: %s") % s)
2154 2177
2155 2178 class hooks(object):
2156 2179 '''A collection of hook functions that can be used to extend a
2157 2180 function's behaviour. Hooks are called in lexicographic order,
2158 2181 based on the names of their sources.'''
2159 2182
2160 2183 def __init__(self):
2161 2184 self._hooks = []
2162 2185
2163 2186 def add(self, source, hook):
2164 2187 self._hooks.append((source, hook))
2165 2188
2166 2189 def __call__(self, *args):
2167 2190 self._hooks.sort(key=lambda x: x[0])
2168 2191 results = []
2169 2192 for source, hook in self._hooks:
2170 2193 results.append(hook(*args))
2171 2194 return results
2172 2195
2173 2196 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2174 2197 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2175 2198 Skips the 'skip' last entries. By default it will flush stdout first.
2176 2199 It can be used everywhere and do intentionally not require an ui object.
2177 2200 Not be used in production code but very convenient while developing.
2178 2201 '''
2179 2202 if otherf:
2180 2203 otherf.flush()
2181 2204 f.write('%s at:\n' % msg)
2182 2205 entries = [('%s:%s' % (fn, ln), func)
2183 2206 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2184 2207 if entries:
2185 2208 fnmax = max(len(entry[0]) for entry in entries)
2186 2209 for fnln, func in entries:
2187 2210 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2188 2211 f.flush()
2189 2212
2190 2213 # convenient shortcut
2191 2214 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now