##// END OF EJS Templates
util: drop any() and all() polyfills
Augie Fackler -
r25152:ac2e66f4 default
parent child Browse files
Show More
@@ -1,2276 +1,2261 b''
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 import i18n
17 17 _ = i18n._
18 18 import error, osutil, encoding, parsers
19 19 import errno, shutil, sys, tempfile, traceback
20 20 import re as remod
21 21 import os, time, datetime, calendar, textwrap, signal, collections
22 22 import imp, socket, urllib, struct
23 23 import gc
24 24
25 25 if os.name == 'nt':
26 26 import windows as platform
27 27 else:
28 28 import posix as platform
29 29
30 30 cachestat = platform.cachestat
31 31 checkexec = platform.checkexec
32 32 checklink = platform.checklink
33 33 copymode = platform.copymode
34 34 executablepath = platform.executablepath
35 35 expandglobs = platform.expandglobs
36 36 explainexit = platform.explainexit
37 37 findexe = platform.findexe
38 38 gethgcmd = platform.gethgcmd
39 39 getuser = platform.getuser
40 40 groupmembers = platform.groupmembers
41 41 groupname = platform.groupname
42 42 hidewindow = platform.hidewindow
43 43 isexec = platform.isexec
44 44 isowner = platform.isowner
45 45 localpath = platform.localpath
46 46 lookupreg = platform.lookupreg
47 47 makedir = platform.makedir
48 48 nlinks = platform.nlinks
49 49 normpath = platform.normpath
50 50 normcase = platform.normcase
51 51 normcasespec = platform.normcasespec
52 52 normcasefallback = platform.normcasefallback
53 53 openhardlinks = platform.openhardlinks
54 54 oslink = platform.oslink
55 55 parsepatchoutput = platform.parsepatchoutput
56 56 pconvert = platform.pconvert
57 57 popen = platform.popen
58 58 posixfile = platform.posixfile
59 59 quotecommand = platform.quotecommand
60 60 readpipe = platform.readpipe
61 61 rename = platform.rename
62 62 removedirs = platform.removedirs
63 63 samedevice = platform.samedevice
64 64 samefile = platform.samefile
65 65 samestat = platform.samestat
66 66 setbinary = platform.setbinary
67 67 setflags = platform.setflags
68 68 setsignalhandler = platform.setsignalhandler
69 69 shellquote = platform.shellquote
70 70 spawndetached = platform.spawndetached
71 71 split = platform.split
72 72 sshargs = platform.sshargs
73 73 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
74 74 statisexec = platform.statisexec
75 75 statislink = platform.statislink
76 76 termwidth = platform.termwidth
77 77 testpid = platform.testpid
78 78 umask = platform.umask
79 79 unlink = platform.unlink
80 80 unlinkpath = platform.unlinkpath
81 81 username = platform.username
82 82
83 83 # Python compatibility
84 84
85 85 _notset = object()
86 86
87 87 def safehasattr(thing, attr):
88 88 return getattr(thing, attr, _notset) is not _notset
89 89
90 90 def sha1(s=''):
91 91 '''
92 92 Low-overhead wrapper around Python's SHA support
93 93
94 94 >>> f = _fastsha1
95 95 >>> a = sha1()
96 96 >>> a = f()
97 97 >>> a.hexdigest()
98 98 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
99 99 '''
100 100
101 101 return _fastsha1(s)
102 102
103 103 def _fastsha1(s=''):
104 104 # This function will import sha1 from hashlib or sha (whichever is
105 105 # available) and overwrite itself with it on the first call.
106 106 # Subsequent calls will go directly to the imported function.
107 107 if sys.version_info >= (2, 5):
108 108 from hashlib import sha1 as _sha1
109 109 else:
110 110 from sha import sha as _sha1
111 111 global _fastsha1, sha1
112 112 _fastsha1 = sha1 = _sha1
113 113 return _sha1(s)
114 114
115 115 def md5(s=''):
116 116 try:
117 117 from hashlib import md5 as _md5
118 118 except ImportError:
119 119 from md5 import md5 as _md5
120 120 global md5
121 121 md5 = _md5
122 122 return _md5(s)
123 123
124 124 DIGESTS = {
125 125 'md5': md5,
126 126 'sha1': sha1,
127 127 }
128 128 # List of digest types from strongest to weakest
129 129 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
130 130
131 131 try:
132 132 import hashlib
133 133 DIGESTS.update({
134 134 'sha512': hashlib.sha512,
135 135 })
136 136 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
137 137 except ImportError:
138 138 pass
139 139
140 140 for k in DIGESTS_BY_STRENGTH:
141 141 assert k in DIGESTS
142 142
143 143 class digester(object):
144 144 """helper to compute digests.
145 145
146 146 This helper can be used to compute one or more digests given their name.
147 147
148 148 >>> d = digester(['md5', 'sha1'])
149 149 >>> d.update('foo')
150 150 >>> [k for k in sorted(d)]
151 151 ['md5', 'sha1']
152 152 >>> d['md5']
153 153 'acbd18db4cc2f85cedef654fccc4a4d8'
154 154 >>> d['sha1']
155 155 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
156 156 >>> digester.preferred(['md5', 'sha1'])
157 157 'sha1'
158 158 """
159 159
160 160 def __init__(self, digests, s=''):
161 161 self._hashes = {}
162 162 for k in digests:
163 163 if k not in DIGESTS:
164 164 raise Abort(_('unknown digest type: %s') % k)
165 165 self._hashes[k] = DIGESTS[k]()
166 166 if s:
167 167 self.update(s)
168 168
169 169 def update(self, data):
170 170 for h in self._hashes.values():
171 171 h.update(data)
172 172
173 173 def __getitem__(self, key):
174 174 if key not in DIGESTS:
175 175 raise Abort(_('unknown digest type: %s') % k)
176 176 return self._hashes[key].hexdigest()
177 177
178 178 def __iter__(self):
179 179 return iter(self._hashes)
180 180
181 181 @staticmethod
182 182 def preferred(supported):
183 183 """returns the strongest digest type in both supported and DIGESTS."""
184 184
185 185 for k in DIGESTS_BY_STRENGTH:
186 186 if k in supported:
187 187 return k
188 188 return None
189 189
190 190 class digestchecker(object):
191 191 """file handle wrapper that additionally checks content against a given
192 192 size and digests.
193 193
194 194 d = digestchecker(fh, size, {'md5': '...'})
195 195
196 196 When multiple digests are given, all of them are validated.
197 197 """
198 198
199 199 def __init__(self, fh, size, digests):
200 200 self._fh = fh
201 201 self._size = size
202 202 self._got = 0
203 203 self._digests = dict(digests)
204 204 self._digester = digester(self._digests.keys())
205 205
206 206 def read(self, length=-1):
207 207 content = self._fh.read(length)
208 208 self._digester.update(content)
209 209 self._got += len(content)
210 210 return content
211 211
212 212 def validate(self):
213 213 if self._size != self._got:
214 214 raise Abort(_('size mismatch: expected %d, got %d') %
215 215 (self._size, self._got))
216 216 for k, v in self._digests.items():
217 217 if v != self._digester[k]:
218 218 # i18n: first parameter is a digest name
219 219 raise Abort(_('%s mismatch: expected %s, got %s') %
220 220 (k, v, self._digester[k]))
221 221
222 222 try:
223 223 buffer = buffer
224 224 except NameError:
225 225 if sys.version_info[0] < 3:
226 226 def buffer(sliceable, offset=0):
227 227 return sliceable[offset:]
228 228 else:
229 229 def buffer(sliceable, offset=0):
230 230 return memoryview(sliceable)[offset:]
231 231
232 232 import subprocess
233 233 closefds = os.name == 'posix'
234 234
235 235 def unpacker(fmt):
236 236 """create a struct unpacker for the specified format"""
237 237 try:
238 238 # 2.5+
239 239 return struct.Struct(fmt).unpack
240 240 except AttributeError:
241 241 # 2.4
242 242 return lambda buf: struct.unpack(fmt, buf)
243 243
244 244 def popen2(cmd, env=None, newlines=False):
245 245 # Setting bufsize to -1 lets the system decide the buffer size.
246 246 # The default for bufsize is 0, meaning unbuffered. This leads to
247 247 # poor performance on Mac OS X: http://bugs.python.org/issue4194
248 248 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
249 249 close_fds=closefds,
250 250 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
251 251 universal_newlines=newlines,
252 252 env=env)
253 253 return p.stdin, p.stdout
254 254
255 255 def popen3(cmd, env=None, newlines=False):
256 256 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
257 257 return stdin, stdout, stderr
258 258
259 259 def popen4(cmd, env=None, newlines=False):
260 260 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
261 261 close_fds=closefds,
262 262 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
263 263 stderr=subprocess.PIPE,
264 264 universal_newlines=newlines,
265 265 env=env)
266 266 return p.stdin, p.stdout, p.stderr, p
267 267
268 268 def version():
269 269 """Return version information if available."""
270 270 try:
271 271 import __version__
272 272 return __version__.version
273 273 except ImportError:
274 274 return 'unknown'
275 275
276 276 # used by parsedate
277 277 defaultdateformats = (
278 278 '%Y-%m-%d %H:%M:%S',
279 279 '%Y-%m-%d %I:%M:%S%p',
280 280 '%Y-%m-%d %H:%M',
281 281 '%Y-%m-%d %I:%M%p',
282 282 '%Y-%m-%d',
283 283 '%m-%d',
284 284 '%m/%d',
285 285 '%m/%d/%y',
286 286 '%m/%d/%Y',
287 287 '%a %b %d %H:%M:%S %Y',
288 288 '%a %b %d %I:%M:%S%p %Y',
289 289 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
290 290 '%b %d %H:%M:%S %Y',
291 291 '%b %d %I:%M:%S%p %Y',
292 292 '%b %d %H:%M:%S',
293 293 '%b %d %I:%M:%S%p',
294 294 '%b %d %H:%M',
295 295 '%b %d %I:%M%p',
296 296 '%b %d %Y',
297 297 '%b %d',
298 298 '%H:%M:%S',
299 299 '%I:%M:%S%p',
300 300 '%H:%M',
301 301 '%I:%M%p',
302 302 )
303 303
304 304 extendeddateformats = defaultdateformats + (
305 305 "%Y",
306 306 "%Y-%m",
307 307 "%b",
308 308 "%b %Y",
309 309 )
310 310
311 311 def cachefunc(func):
312 312 '''cache the result of function calls'''
313 313 # XXX doesn't handle keywords args
314 314 if func.func_code.co_argcount == 0:
315 315 cache = []
316 316 def f():
317 317 if len(cache) == 0:
318 318 cache.append(func())
319 319 return cache[0]
320 320 return f
321 321 cache = {}
322 322 if func.func_code.co_argcount == 1:
323 323 # we gain a small amount of time because
324 324 # we don't need to pack/unpack the list
325 325 def f(arg):
326 326 if arg not in cache:
327 327 cache[arg] = func(arg)
328 328 return cache[arg]
329 329 else:
330 330 def f(*args):
331 331 if args not in cache:
332 332 cache[args] = func(*args)
333 333 return cache[args]
334 334
335 335 return f
336 336
337 337 class sortdict(dict):
338 338 '''a simple sorted dictionary'''
339 339 def __init__(self, data=None):
340 340 self._list = []
341 341 if data:
342 342 self.update(data)
343 343 def copy(self):
344 344 return sortdict(self)
345 345 def __setitem__(self, key, val):
346 346 if key in self:
347 347 self._list.remove(key)
348 348 self._list.append(key)
349 349 dict.__setitem__(self, key, val)
350 350 def __iter__(self):
351 351 return self._list.__iter__()
352 352 def update(self, src):
353 353 if isinstance(src, dict):
354 354 src = src.iteritems()
355 355 for k, v in src:
356 356 self[k] = v
357 357 def clear(self):
358 358 dict.clear(self)
359 359 self._list = []
360 360 def items(self):
361 361 return [(k, self[k]) for k in self._list]
362 362 def __delitem__(self, key):
363 363 dict.__delitem__(self, key)
364 364 self._list.remove(key)
365 365 def pop(self, key, *args, **kwargs):
366 366 dict.pop(self, key, *args, **kwargs)
367 367 try:
368 368 self._list.remove(key)
369 369 except ValueError:
370 370 pass
371 371 def keys(self):
372 372 return self._list
373 373 def iterkeys(self):
374 374 return self._list.__iter__()
375 375 def iteritems(self):
376 376 for k in self._list:
377 377 yield k, self[k]
378 378 def insert(self, index, key, val):
379 379 self._list.insert(index, key)
380 380 dict.__setitem__(self, key, val)
381 381
382 382 class lrucachedict(object):
383 383 '''cache most recent gets from or sets to this dictionary'''
384 384 def __init__(self, maxsize):
385 385 self._cache = {}
386 386 self._maxsize = maxsize
387 387 self._order = collections.deque()
388 388
389 389 def __getitem__(self, key):
390 390 value = self._cache[key]
391 391 self._order.remove(key)
392 392 self._order.append(key)
393 393 return value
394 394
395 395 def __setitem__(self, key, value):
396 396 if key not in self._cache:
397 397 if len(self._cache) >= self._maxsize:
398 398 del self._cache[self._order.popleft()]
399 399 else:
400 400 self._order.remove(key)
401 401 self._cache[key] = value
402 402 self._order.append(key)
403 403
404 404 def __contains__(self, key):
405 405 return key in self._cache
406 406
407 407 def clear(self):
408 408 self._cache.clear()
409 409 self._order = collections.deque()
410 410
411 411 def lrucachefunc(func):
412 412 '''cache most recent results of function calls'''
413 413 cache = {}
414 414 order = collections.deque()
415 415 if func.func_code.co_argcount == 1:
416 416 def f(arg):
417 417 if arg not in cache:
418 418 if len(cache) > 20:
419 419 del cache[order.popleft()]
420 420 cache[arg] = func(arg)
421 421 else:
422 422 order.remove(arg)
423 423 order.append(arg)
424 424 return cache[arg]
425 425 else:
426 426 def f(*args):
427 427 if args not in cache:
428 428 if len(cache) > 20:
429 429 del cache[order.popleft()]
430 430 cache[args] = func(*args)
431 431 else:
432 432 order.remove(args)
433 433 order.append(args)
434 434 return cache[args]
435 435
436 436 return f
437 437
438 438 class propertycache(object):
439 439 def __init__(self, func):
440 440 self.func = func
441 441 self.name = func.__name__
442 442 def __get__(self, obj, type=None):
443 443 result = self.func(obj)
444 444 self.cachevalue(obj, result)
445 445 return result
446 446
447 447 def cachevalue(self, obj, value):
448 448 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
449 449 obj.__dict__[self.name] = value
450 450
451 451 def pipefilter(s, cmd):
452 452 '''filter string S through command CMD, returning its output'''
453 453 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
454 454 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
455 455 pout, perr = p.communicate(s)
456 456 return pout
457 457
458 458 def tempfilter(s, cmd):
459 459 '''filter string S through a pair of temporary files with CMD.
460 460 CMD is used as a template to create the real command to be run,
461 461 with the strings INFILE and OUTFILE replaced by the real names of
462 462 the temporary files generated.'''
463 463 inname, outname = None, None
464 464 try:
465 465 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
466 466 fp = os.fdopen(infd, 'wb')
467 467 fp.write(s)
468 468 fp.close()
469 469 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
470 470 os.close(outfd)
471 471 cmd = cmd.replace('INFILE', inname)
472 472 cmd = cmd.replace('OUTFILE', outname)
473 473 code = os.system(cmd)
474 474 if sys.platform == 'OpenVMS' and code & 1:
475 475 code = 0
476 476 if code:
477 477 raise Abort(_("command '%s' failed: %s") %
478 478 (cmd, explainexit(code)))
479 479 fp = open(outname, 'rb')
480 480 r = fp.read()
481 481 fp.close()
482 482 return r
483 483 finally:
484 484 try:
485 485 if inname:
486 486 os.unlink(inname)
487 487 except OSError:
488 488 pass
489 489 try:
490 490 if outname:
491 491 os.unlink(outname)
492 492 except OSError:
493 493 pass
494 494
495 495 filtertable = {
496 496 'tempfile:': tempfilter,
497 497 'pipe:': pipefilter,
498 498 }
499 499
500 500 def filter(s, cmd):
501 501 "filter a string through a command that transforms its input to its output"
502 502 for name, fn in filtertable.iteritems():
503 503 if cmd.startswith(name):
504 504 return fn(s, cmd[len(name):].lstrip())
505 505 return pipefilter(s, cmd)
506 506
507 507 def binary(s):
508 508 """return true if a string is binary data"""
509 509 return bool(s and '\0' in s)
510 510
511 511 def increasingchunks(source, min=1024, max=65536):
512 512 '''return no less than min bytes per chunk while data remains,
513 513 doubling min after each chunk until it reaches max'''
514 514 def log2(x):
515 515 if not x:
516 516 return 0
517 517 i = 0
518 518 while x:
519 519 x >>= 1
520 520 i += 1
521 521 return i - 1
522 522
523 523 buf = []
524 524 blen = 0
525 525 for chunk in source:
526 526 buf.append(chunk)
527 527 blen += len(chunk)
528 528 if blen >= min:
529 529 if min < max:
530 530 min = min << 1
531 531 nmin = 1 << log2(blen)
532 532 if nmin > min:
533 533 min = nmin
534 534 if min > max:
535 535 min = max
536 536 yield ''.join(buf)
537 537 blen = 0
538 538 buf = []
539 539 if buf:
540 540 yield ''.join(buf)
541 541
542 542 Abort = error.Abort
543 543
544 544 def always(fn):
545 545 return True
546 546
547 547 def never(fn):
548 548 return False
549 549
550 550 def nogc(func):
551 551 """disable garbage collector
552 552
553 553 Python's garbage collector triggers a GC each time a certain number of
554 554 container objects (the number being defined by gc.get_threshold()) are
555 555 allocated even when marked not to be tracked by the collector. Tracking has
556 556 no effect on when GCs are triggered, only on what objects the GC looks
557 557 into. As a workaround, disable GC while building complex (huge)
558 558 containers.
559 559
560 560 This garbage collector issue have been fixed in 2.7.
561 561 """
562 562 def wrapper(*args, **kwargs):
563 563 gcenabled = gc.isenabled()
564 564 gc.disable()
565 565 try:
566 566 return func(*args, **kwargs)
567 567 finally:
568 568 if gcenabled:
569 569 gc.enable()
570 570 return wrapper
571 571
572 572 def pathto(root, n1, n2):
573 573 '''return the relative path from one place to another.
574 574 root should use os.sep to separate directories
575 575 n1 should use os.sep to separate directories
576 576 n2 should use "/" to separate directories
577 577 returns an os.sep-separated path.
578 578
579 579 If n1 is a relative path, it's assumed it's
580 580 relative to root.
581 581 n2 should always be relative to root.
582 582 '''
583 583 if not n1:
584 584 return localpath(n2)
585 585 if os.path.isabs(n1):
586 586 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
587 587 return os.path.join(root, localpath(n2))
588 588 n2 = '/'.join((pconvert(root), n2))
589 589 a, b = splitpath(n1), n2.split('/')
590 590 a.reverse()
591 591 b.reverse()
592 592 while a and b and a[-1] == b[-1]:
593 593 a.pop()
594 594 b.pop()
595 595 b.reverse()
596 596 return os.sep.join((['..'] * len(a)) + b) or '.'
597 597
598 598 def mainfrozen():
599 599 """return True if we are a frozen executable.
600 600
601 601 The code supports py2exe (most common, Windows only) and tools/freeze
602 602 (portable, not much used).
603 603 """
604 604 return (safehasattr(sys, "frozen") or # new py2exe
605 605 safehasattr(sys, "importers") or # old py2exe
606 606 imp.is_frozen("__main__")) # tools/freeze
607 607
608 608 # the location of data files matching the source code
609 609 if mainfrozen():
610 610 # executable version (py2exe) doesn't support __file__
611 611 datapath = os.path.dirname(sys.executable)
612 612 else:
613 613 datapath = os.path.dirname(__file__)
614 614
615 615 i18n.setdatapath(datapath)
616 616
617 617 _hgexecutable = None
618 618
619 619 def hgexecutable():
620 620 """return location of the 'hg' executable.
621 621
622 622 Defaults to $HG or 'hg' in the search path.
623 623 """
624 624 if _hgexecutable is None:
625 625 hg = os.environ.get('HG')
626 626 mainmod = sys.modules['__main__']
627 627 if hg:
628 628 _sethgexecutable(hg)
629 629 elif mainfrozen():
630 630 _sethgexecutable(sys.executable)
631 631 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
632 632 _sethgexecutable(mainmod.__file__)
633 633 else:
634 634 exe = findexe('hg') or os.path.basename(sys.argv[0])
635 635 _sethgexecutable(exe)
636 636 return _hgexecutable
637 637
638 638 def _sethgexecutable(path):
639 639 """set location of the 'hg' executable"""
640 640 global _hgexecutable
641 641 _hgexecutable = path
642 642
643 643 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
644 644 '''enhanced shell command execution.
645 645 run with environment maybe modified, maybe in different dir.
646 646
647 647 if command fails and onerr is None, return status, else raise onerr
648 648 object as exception.
649 649
650 650 if out is specified, it is assumed to be a file-like object that has a
651 651 write() method. stdout and stderr will be redirected to out.'''
652 652 try:
653 653 sys.stdout.flush()
654 654 except Exception:
655 655 pass
656 656 def py2shell(val):
657 657 'convert python object into string that is useful to shell'
658 658 if val is None or val is False:
659 659 return '0'
660 660 if val is True:
661 661 return '1'
662 662 return str(val)
663 663 origcmd = cmd
664 664 cmd = quotecommand(cmd)
665 665 if sys.platform == 'plan9' and (sys.version_info[0] == 2
666 666 and sys.version_info[1] < 7):
667 667 # subprocess kludge to work around issues in half-baked Python
668 668 # ports, notably bichued/python:
669 669 if not cwd is None:
670 670 os.chdir(cwd)
671 671 rc = os.system(cmd)
672 672 else:
673 673 env = dict(os.environ)
674 674 env.update((k, py2shell(v)) for k, v in environ.iteritems())
675 675 env['HG'] = hgexecutable()
676 676 if out is None or out == sys.__stdout__:
677 677 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
678 678 env=env, cwd=cwd)
679 679 else:
680 680 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
681 681 env=env, cwd=cwd, stdout=subprocess.PIPE,
682 682 stderr=subprocess.STDOUT)
683 683 while True:
684 684 line = proc.stdout.readline()
685 685 if not line:
686 686 break
687 687 out.write(line)
688 688 proc.wait()
689 689 rc = proc.returncode
690 690 if sys.platform == 'OpenVMS' and rc & 1:
691 691 rc = 0
692 692 if rc and onerr:
693 693 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
694 694 explainexit(rc)[0])
695 695 if errprefix:
696 696 errmsg = '%s: %s' % (errprefix, errmsg)
697 697 raise onerr(errmsg)
698 698 return rc
699 699
700 700 def checksignature(func):
701 701 '''wrap a function with code to check for calling errors'''
702 702 def check(*args, **kwargs):
703 703 try:
704 704 return func(*args, **kwargs)
705 705 except TypeError:
706 706 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
707 707 raise error.SignatureError
708 708 raise
709 709
710 710 return check
711 711
712 712 def copyfile(src, dest, hardlink=False):
713 713 "copy a file, preserving mode and atime/mtime"
714 714 if os.path.lexists(dest):
715 715 unlink(dest)
716 716 # hardlinks are problematic on CIFS, quietly ignore this flag
717 717 # until we find a way to work around it cleanly (issue4546)
718 718 if False and hardlink:
719 719 try:
720 720 oslink(src, dest)
721 721 return
722 722 except (IOError, OSError):
723 723 pass # fall back to normal copy
724 724 if os.path.islink(src):
725 725 os.symlink(os.readlink(src), dest)
726 726 else:
727 727 try:
728 728 shutil.copyfile(src, dest)
729 729 shutil.copymode(src, dest)
730 730 except shutil.Error, inst:
731 731 raise Abort(str(inst))
732 732
733 733 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
734 734 """Copy a directory tree using hardlinks if possible."""
735 735 num = 0
736 736
737 737 if hardlink is None:
738 738 hardlink = (os.stat(src).st_dev ==
739 739 os.stat(os.path.dirname(dst)).st_dev)
740 740 if hardlink:
741 741 topic = _('linking')
742 742 else:
743 743 topic = _('copying')
744 744
745 745 if os.path.isdir(src):
746 746 os.mkdir(dst)
747 747 for name, kind in osutil.listdir(src):
748 748 srcname = os.path.join(src, name)
749 749 dstname = os.path.join(dst, name)
750 750 def nprog(t, pos):
751 751 if pos is not None:
752 752 return progress(t, pos + num)
753 753 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
754 754 num += n
755 755 else:
756 756 if hardlink:
757 757 try:
758 758 oslink(src, dst)
759 759 except (IOError, OSError):
760 760 hardlink = False
761 761 shutil.copy(src, dst)
762 762 else:
763 763 shutil.copy(src, dst)
764 764 num += 1
765 765 progress(topic, num)
766 766 progress(topic, None)
767 767
768 768 return hardlink, num
769 769
770 770 _winreservednames = '''con prn aux nul
771 771 com1 com2 com3 com4 com5 com6 com7 com8 com9
772 772 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
773 773 _winreservedchars = ':*?"<>|'
774 774 def checkwinfilename(path):
775 775 r'''Check that the base-relative path is a valid filename on Windows.
776 776 Returns None if the path is ok, or a UI string describing the problem.
777 777
778 778 >>> checkwinfilename("just/a/normal/path")
779 779 >>> checkwinfilename("foo/bar/con.xml")
780 780 "filename contains 'con', which is reserved on Windows"
781 781 >>> checkwinfilename("foo/con.xml/bar")
782 782 "filename contains 'con', which is reserved on Windows"
783 783 >>> checkwinfilename("foo/bar/xml.con")
784 784 >>> checkwinfilename("foo/bar/AUX/bla.txt")
785 785 "filename contains 'AUX', which is reserved on Windows"
786 786 >>> checkwinfilename("foo/bar/bla:.txt")
787 787 "filename contains ':', which is reserved on Windows"
788 788 >>> checkwinfilename("foo/bar/b\07la.txt")
789 789 "filename contains '\\x07', which is invalid on Windows"
790 790 >>> checkwinfilename("foo/bar/bla ")
791 791 "filename ends with ' ', which is not allowed on Windows"
792 792 >>> checkwinfilename("../bar")
793 793 >>> checkwinfilename("foo\\")
794 794 "filename ends with '\\', which is invalid on Windows"
795 795 >>> checkwinfilename("foo\\/bar")
796 796 "directory name ends with '\\', which is invalid on Windows"
797 797 '''
798 798 if path.endswith('\\'):
799 799 return _("filename ends with '\\', which is invalid on Windows")
800 800 if '\\/' in path:
801 801 return _("directory name ends with '\\', which is invalid on Windows")
802 802 for n in path.replace('\\', '/').split('/'):
803 803 if not n:
804 804 continue
805 805 for c in n:
806 806 if c in _winreservedchars:
807 807 return _("filename contains '%s', which is reserved "
808 808 "on Windows") % c
809 809 if ord(c) <= 31:
810 810 return _("filename contains %r, which is invalid "
811 811 "on Windows") % c
812 812 base = n.split('.')[0]
813 813 if base and base.lower() in _winreservednames:
814 814 return _("filename contains '%s', which is reserved "
815 815 "on Windows") % base
816 816 t = n[-1]
817 817 if t in '. ' and n not in '..':
818 818 return _("filename ends with '%s', which is not allowed "
819 819 "on Windows") % t
820 820
821 821 if os.name == 'nt':
822 822 checkosfilename = checkwinfilename
823 823 else:
824 824 checkosfilename = platform.checkosfilename
825 825
826 826 def makelock(info, pathname):
827 827 try:
828 828 return os.symlink(info, pathname)
829 829 except OSError, why:
830 830 if why.errno == errno.EEXIST:
831 831 raise
832 832 except AttributeError: # no symlink in os
833 833 pass
834 834
835 835 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
836 836 os.write(ld, info)
837 837 os.close(ld)
838 838
839 839 def readlock(pathname):
840 840 try:
841 841 return os.readlink(pathname)
842 842 except OSError, why:
843 843 if why.errno not in (errno.EINVAL, errno.ENOSYS):
844 844 raise
845 845 except AttributeError: # no symlink in os
846 846 pass
847 847 fp = posixfile(pathname)
848 848 r = fp.read()
849 849 fp.close()
850 850 return r
851 851
852 852 def fstat(fp):
853 853 '''stat file object that may not have fileno method.'''
854 854 try:
855 855 return os.fstat(fp.fileno())
856 856 except AttributeError:
857 857 return os.stat(fp.name)
858 858
859 859 # File system features
860 860
861 861 def checkcase(path):
862 862 """
863 863 Return true if the given path is on a case-sensitive filesystem
864 864
865 865 Requires a path (like /foo/.hg) ending with a foldable final
866 866 directory component.
867 867 """
868 868 s1 = os.lstat(path)
869 869 d, b = os.path.split(path)
870 870 b2 = b.upper()
871 871 if b == b2:
872 872 b2 = b.lower()
873 873 if b == b2:
874 874 return True # no evidence against case sensitivity
875 875 p2 = os.path.join(d, b2)
876 876 try:
877 877 s2 = os.lstat(p2)
878 878 if s2 == s1:
879 879 return False
880 880 return True
881 881 except OSError:
882 882 return True
883 883
884 884 try:
885 885 import re2
886 886 _re2 = None
887 887 except ImportError:
888 888 _re2 = False
889 889
890 890 class _re(object):
891 891 def _checkre2(self):
892 892 global _re2
893 893 try:
894 894 # check if match works, see issue3964
895 895 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
896 896 except ImportError:
897 897 _re2 = False
898 898
899 899 def compile(self, pat, flags=0):
900 900 '''Compile a regular expression, using re2 if possible
901 901
902 902 For best performance, use only re2-compatible regexp features. The
903 903 only flags from the re module that are re2-compatible are
904 904 IGNORECASE and MULTILINE.'''
905 905 if _re2 is None:
906 906 self._checkre2()
907 907 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
908 908 if flags & remod.IGNORECASE:
909 909 pat = '(?i)' + pat
910 910 if flags & remod.MULTILINE:
911 911 pat = '(?m)' + pat
912 912 try:
913 913 return re2.compile(pat)
914 914 except re2.error:
915 915 pass
916 916 return remod.compile(pat, flags)
917 917
918 918 @propertycache
919 919 def escape(self):
920 920 '''Return the version of escape corresponding to self.compile.
921 921
922 922 This is imperfect because whether re2 or re is used for a particular
923 923 function depends on the flags, etc, but it's the best we can do.
924 924 '''
925 925 global _re2
926 926 if _re2 is None:
927 927 self._checkre2()
928 928 if _re2:
929 929 return re2.escape
930 930 else:
931 931 return remod.escape
932 932
933 933 re = _re()
934 934
935 935 _fspathcache = {}
936 936 def fspath(name, root):
937 937 '''Get name in the case stored in the filesystem
938 938
939 939 The name should be relative to root, and be normcase-ed for efficiency.
940 940
941 941 Note that this function is unnecessary, and should not be
942 942 called, for case-sensitive filesystems (simply because it's expensive).
943 943
944 944 The root should be normcase-ed, too.
945 945 '''
946 946 def _makefspathcacheentry(dir):
947 947 return dict((normcase(n), n) for n in os.listdir(dir))
948 948
949 949 seps = os.sep
950 950 if os.altsep:
951 951 seps = seps + os.altsep
952 952 # Protect backslashes. This gets silly very quickly.
953 953 seps.replace('\\','\\\\')
954 954 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
955 955 dir = os.path.normpath(root)
956 956 result = []
957 957 for part, sep in pattern.findall(name):
958 958 if sep:
959 959 result.append(sep)
960 960 continue
961 961
962 962 if dir not in _fspathcache:
963 963 _fspathcache[dir] = _makefspathcacheentry(dir)
964 964 contents = _fspathcache[dir]
965 965
966 966 found = contents.get(part)
967 967 if not found:
968 968 # retry "once per directory" per "dirstate.walk" which
969 969 # may take place for each patches of "hg qpush", for example
970 970 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
971 971 found = contents.get(part)
972 972
973 973 result.append(found or part)
974 974 dir = os.path.join(dir, part)
975 975
976 976 return ''.join(result)
977 977
978 978 def checknlink(testfile):
979 979 '''check whether hardlink count reporting works properly'''
980 980
981 981 # testfile may be open, so we need a separate file for checking to
982 982 # work around issue2543 (or testfile may get lost on Samba shares)
983 983 f1 = testfile + ".hgtmp1"
984 984 if os.path.lexists(f1):
985 985 return False
986 986 try:
987 987 posixfile(f1, 'w').close()
988 988 except IOError:
989 989 return False
990 990
991 991 f2 = testfile + ".hgtmp2"
992 992 fd = None
993 993 try:
994 994 oslink(f1, f2)
995 995 # nlinks() may behave differently for files on Windows shares if
996 996 # the file is open.
997 997 fd = posixfile(f2)
998 998 return nlinks(f2) > 1
999 999 except OSError:
1000 1000 return False
1001 1001 finally:
1002 1002 if fd is not None:
1003 1003 fd.close()
1004 1004 for f in (f1, f2):
1005 1005 try:
1006 1006 os.unlink(f)
1007 1007 except OSError:
1008 1008 pass
1009 1009
1010 1010 def endswithsep(path):
1011 1011 '''Check path ends with os.sep or os.altsep.'''
1012 1012 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1013 1013
1014 1014 def splitpath(path):
1015 1015 '''Split path by os.sep.
1016 1016 Note that this function does not use os.altsep because this is
1017 1017 an alternative of simple "xxx.split(os.sep)".
1018 1018 It is recommended to use os.path.normpath() before using this
1019 1019 function if need.'''
1020 1020 return path.split(os.sep)
1021 1021
1022 1022 def gui():
1023 1023 '''Are we running in a GUI?'''
1024 1024 if sys.platform == 'darwin':
1025 1025 if 'SSH_CONNECTION' in os.environ:
1026 1026 # handle SSH access to a box where the user is logged in
1027 1027 return False
1028 1028 elif getattr(osutil, 'isgui', None):
1029 1029 # check if a CoreGraphics session is available
1030 1030 return osutil.isgui()
1031 1031 else:
1032 1032 # pure build; use a safe default
1033 1033 return True
1034 1034 else:
1035 1035 return os.name == "nt" or os.environ.get("DISPLAY")
1036 1036
1037 1037 def mktempcopy(name, emptyok=False, createmode=None):
1038 1038 """Create a temporary file with the same contents from name
1039 1039
1040 1040 The permission bits are copied from the original file.
1041 1041
1042 1042 If the temporary file is going to be truncated immediately, you
1043 1043 can use emptyok=True as an optimization.
1044 1044
1045 1045 Returns the name of the temporary file.
1046 1046 """
1047 1047 d, fn = os.path.split(name)
1048 1048 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1049 1049 os.close(fd)
1050 1050 # Temporary files are created with mode 0600, which is usually not
1051 1051 # what we want. If the original file already exists, just copy
1052 1052 # its mode. Otherwise, manually obey umask.
1053 1053 copymode(name, temp, createmode)
1054 1054 if emptyok:
1055 1055 return temp
1056 1056 try:
1057 1057 try:
1058 1058 ifp = posixfile(name, "rb")
1059 1059 except IOError, inst:
1060 1060 if inst.errno == errno.ENOENT:
1061 1061 return temp
1062 1062 if not getattr(inst, 'filename', None):
1063 1063 inst.filename = name
1064 1064 raise
1065 1065 ofp = posixfile(temp, "wb")
1066 1066 for chunk in filechunkiter(ifp):
1067 1067 ofp.write(chunk)
1068 1068 ifp.close()
1069 1069 ofp.close()
1070 1070 except: # re-raises
1071 1071 try: os.unlink(temp)
1072 1072 except OSError: pass
1073 1073 raise
1074 1074 return temp
1075 1075
1076 1076 class atomictempfile(object):
1077 1077 '''writable file object that atomically updates a file
1078 1078
1079 1079 All writes will go to a temporary copy of the original file. Call
1080 1080 close() when you are done writing, and atomictempfile will rename
1081 1081 the temporary copy to the original name, making the changes
1082 1082 visible. If the object is destroyed without being closed, all your
1083 1083 writes are discarded.
1084 1084 '''
1085 1085 def __init__(self, name, mode='w+b', createmode=None):
1086 1086 self.__name = name # permanent name
1087 1087 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1088 1088 createmode=createmode)
1089 1089 self._fp = posixfile(self._tempname, mode)
1090 1090
1091 1091 # delegated methods
1092 1092 self.write = self._fp.write
1093 1093 self.seek = self._fp.seek
1094 1094 self.tell = self._fp.tell
1095 1095 self.fileno = self._fp.fileno
1096 1096
1097 1097 def close(self):
1098 1098 if not self._fp.closed:
1099 1099 self._fp.close()
1100 1100 rename(self._tempname, localpath(self.__name))
1101 1101
1102 1102 def discard(self):
1103 1103 if not self._fp.closed:
1104 1104 try:
1105 1105 os.unlink(self._tempname)
1106 1106 except OSError:
1107 1107 pass
1108 1108 self._fp.close()
1109 1109
1110 1110 def __del__(self):
1111 1111 if safehasattr(self, '_fp'): # constructor actually did something
1112 1112 self.discard()
1113 1113
1114 1114 def makedirs(name, mode=None, notindexed=False):
1115 1115 """recursive directory creation with parent mode inheritance"""
1116 1116 try:
1117 1117 makedir(name, notindexed)
1118 1118 except OSError, err:
1119 1119 if err.errno == errno.EEXIST:
1120 1120 return
1121 1121 if err.errno != errno.ENOENT or not name:
1122 1122 raise
1123 1123 parent = os.path.dirname(os.path.abspath(name))
1124 1124 if parent == name:
1125 1125 raise
1126 1126 makedirs(parent, mode, notindexed)
1127 1127 makedir(name, notindexed)
1128 1128 if mode is not None:
1129 1129 os.chmod(name, mode)
1130 1130
1131 1131 def ensuredirs(name, mode=None, notindexed=False):
1132 1132 """race-safe recursive directory creation
1133 1133
1134 1134 Newly created directories are marked as "not to be indexed by
1135 1135 the content indexing service", if ``notindexed`` is specified
1136 1136 for "write" mode access.
1137 1137 """
1138 1138 if os.path.isdir(name):
1139 1139 return
1140 1140 parent = os.path.dirname(os.path.abspath(name))
1141 1141 if parent != name:
1142 1142 ensuredirs(parent, mode, notindexed)
1143 1143 try:
1144 1144 makedir(name, notindexed)
1145 1145 except OSError, err:
1146 1146 if err.errno == errno.EEXIST and os.path.isdir(name):
1147 1147 # someone else seems to have won a directory creation race
1148 1148 return
1149 1149 raise
1150 1150 if mode is not None:
1151 1151 os.chmod(name, mode)
1152 1152
1153 1153 def readfile(path):
1154 1154 fp = open(path, 'rb')
1155 1155 try:
1156 1156 return fp.read()
1157 1157 finally:
1158 1158 fp.close()
1159 1159
1160 1160 def writefile(path, text):
1161 1161 fp = open(path, 'wb')
1162 1162 try:
1163 1163 fp.write(text)
1164 1164 finally:
1165 1165 fp.close()
1166 1166
1167 1167 def appendfile(path, text):
1168 1168 fp = open(path, 'ab')
1169 1169 try:
1170 1170 fp.write(text)
1171 1171 finally:
1172 1172 fp.close()
1173 1173
1174 1174 class chunkbuffer(object):
1175 1175 """Allow arbitrary sized chunks of data to be efficiently read from an
1176 1176 iterator over chunks of arbitrary size."""
1177 1177
1178 1178 def __init__(self, in_iter):
1179 1179 """in_iter is the iterator that's iterating over the input chunks.
1180 1180 targetsize is how big a buffer to try to maintain."""
1181 1181 def splitbig(chunks):
1182 1182 for chunk in chunks:
1183 1183 if len(chunk) > 2**20:
1184 1184 pos = 0
1185 1185 while pos < len(chunk):
1186 1186 end = pos + 2 ** 18
1187 1187 yield chunk[pos:end]
1188 1188 pos = end
1189 1189 else:
1190 1190 yield chunk
1191 1191 self.iter = splitbig(in_iter)
1192 1192 self._queue = collections.deque()
1193 1193
1194 1194 def read(self, l=None):
1195 1195 """Read L bytes of data from the iterator of chunks of data.
1196 1196 Returns less than L bytes if the iterator runs dry.
1197 1197
1198 1198 If size parameter is omitted, read everything"""
1199 1199 left = l
1200 1200 buf = []
1201 1201 queue = self._queue
1202 1202 while left is None or left > 0:
1203 1203 # refill the queue
1204 1204 if not queue:
1205 1205 target = 2**18
1206 1206 for chunk in self.iter:
1207 1207 queue.append(chunk)
1208 1208 target -= len(chunk)
1209 1209 if target <= 0:
1210 1210 break
1211 1211 if not queue:
1212 1212 break
1213 1213
1214 1214 chunk = queue.popleft()
1215 1215 if left is not None:
1216 1216 left -= len(chunk)
1217 1217 if left is not None and left < 0:
1218 1218 queue.appendleft(chunk[left:])
1219 1219 buf.append(chunk[:left])
1220 1220 else:
1221 1221 buf.append(chunk)
1222 1222
1223 1223 return ''.join(buf)
1224 1224
1225 1225 def filechunkiter(f, size=65536, limit=None):
1226 1226 """Create a generator that produces the data in the file size
1227 1227 (default 65536) bytes at a time, up to optional limit (default is
1228 1228 to read all data). Chunks may be less than size bytes if the
1229 1229 chunk is the last chunk in the file, or the file is a socket or
1230 1230 some other type of file that sometimes reads less data than is
1231 1231 requested."""
1232 1232 assert size >= 0
1233 1233 assert limit is None or limit >= 0
1234 1234 while True:
1235 1235 if limit is None:
1236 1236 nbytes = size
1237 1237 else:
1238 1238 nbytes = min(limit, size)
1239 1239 s = nbytes and f.read(nbytes)
1240 1240 if not s:
1241 1241 break
1242 1242 if limit:
1243 1243 limit -= len(s)
1244 1244 yield s
1245 1245
1246 1246 def makedate(timestamp=None):
1247 1247 '''Return a unix timestamp (or the current time) as a (unixtime,
1248 1248 offset) tuple based off the local timezone.'''
1249 1249 if timestamp is None:
1250 1250 timestamp = time.time()
1251 1251 if timestamp < 0:
1252 1252 hint = _("check your clock")
1253 1253 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1254 1254 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1255 1255 datetime.datetime.fromtimestamp(timestamp))
1256 1256 tz = delta.days * 86400 + delta.seconds
1257 1257 return timestamp, tz
1258 1258
1259 1259 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1260 1260 """represent a (unixtime, offset) tuple as a localized time.
1261 1261 unixtime is seconds since the epoch, and offset is the time zone's
1262 1262 number of seconds away from UTC. if timezone is false, do not
1263 1263 append time zone to string."""
1264 1264 t, tz = date or makedate()
1265 1265 if t < 0:
1266 1266 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1267 1267 tz = 0
1268 1268 if "%1" in format or "%2" in format or "%z" in format:
1269 1269 sign = (tz > 0) and "-" or "+"
1270 1270 minutes = abs(tz) // 60
1271 1271 format = format.replace("%z", "%1%2")
1272 1272 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1273 1273 format = format.replace("%2", "%02d" % (minutes % 60))
1274 1274 try:
1275 1275 t = time.gmtime(float(t) - tz)
1276 1276 except ValueError:
1277 1277 # time was out of range
1278 1278 t = time.gmtime(sys.maxint)
1279 1279 s = time.strftime(format, t)
1280 1280 return s
1281 1281
1282 1282 def shortdate(date=None):
1283 1283 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1284 1284 return datestr(date, format='%Y-%m-%d')
1285 1285
1286 1286 def strdate(string, format, defaults=[]):
1287 1287 """parse a localized time string and return a (unixtime, offset) tuple.
1288 1288 if the string cannot be parsed, ValueError is raised."""
1289 1289 def timezone(string):
1290 1290 tz = string.split()[-1]
1291 1291 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1292 1292 sign = (tz[0] == "+") and 1 or -1
1293 1293 hours = int(tz[1:3])
1294 1294 minutes = int(tz[3:5])
1295 1295 return -sign * (hours * 60 + minutes) * 60
1296 1296 if tz == "GMT" or tz == "UTC":
1297 1297 return 0
1298 1298 return None
1299 1299
1300 1300 # NOTE: unixtime = localunixtime + offset
1301 1301 offset, date = timezone(string), string
1302 1302 if offset is not None:
1303 1303 date = " ".join(string.split()[:-1])
1304 1304
1305 1305 # add missing elements from defaults
1306 1306 usenow = False # default to using biased defaults
1307 1307 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1308 1308 found = [True for p in part if ("%"+p) in format]
1309 1309 if not found:
1310 1310 date += "@" + defaults[part][usenow]
1311 1311 format += "@%" + part[0]
1312 1312 else:
1313 1313 # We've found a specific time element, less specific time
1314 1314 # elements are relative to today
1315 1315 usenow = True
1316 1316
1317 1317 timetuple = time.strptime(date, format)
1318 1318 localunixtime = int(calendar.timegm(timetuple))
1319 1319 if offset is None:
1320 1320 # local timezone
1321 1321 unixtime = int(time.mktime(timetuple))
1322 1322 offset = unixtime - localunixtime
1323 1323 else:
1324 1324 unixtime = localunixtime + offset
1325 1325 return unixtime, offset
1326 1326
1327 1327 def parsedate(date, formats=None, bias={}):
1328 1328 """parse a localized date/time and return a (unixtime, offset) tuple.
1329 1329
1330 1330 The date may be a "unixtime offset" string or in one of the specified
1331 1331 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1332 1332
1333 1333 >>> parsedate(' today ') == parsedate(\
1334 1334 datetime.date.today().strftime('%b %d'))
1335 1335 True
1336 1336 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1337 1337 datetime.timedelta(days=1)\
1338 1338 ).strftime('%b %d'))
1339 1339 True
1340 1340 >>> now, tz = makedate()
1341 1341 >>> strnow, strtz = parsedate('now')
1342 1342 >>> (strnow - now) < 1
1343 1343 True
1344 1344 >>> tz == strtz
1345 1345 True
1346 1346 """
1347 1347 if not date:
1348 1348 return 0, 0
1349 1349 if isinstance(date, tuple) and len(date) == 2:
1350 1350 return date
1351 1351 if not formats:
1352 1352 formats = defaultdateformats
1353 1353 date = date.strip()
1354 1354
1355 1355 if date == 'now' or date == _('now'):
1356 1356 return makedate()
1357 1357 if date == 'today' or date == _('today'):
1358 1358 date = datetime.date.today().strftime('%b %d')
1359 1359 elif date == 'yesterday' or date == _('yesterday'):
1360 1360 date = (datetime.date.today() -
1361 1361 datetime.timedelta(days=1)).strftime('%b %d')
1362 1362
1363 1363 try:
1364 1364 when, offset = map(int, date.split(' '))
1365 1365 except ValueError:
1366 1366 # fill out defaults
1367 1367 now = makedate()
1368 1368 defaults = {}
1369 1369 for part in ("d", "mb", "yY", "HI", "M", "S"):
1370 1370 # this piece is for rounding the specific end of unknowns
1371 1371 b = bias.get(part)
1372 1372 if b is None:
1373 1373 if part[0] in "HMS":
1374 1374 b = "00"
1375 1375 else:
1376 1376 b = "0"
1377 1377
1378 1378 # this piece is for matching the generic end to today's date
1379 1379 n = datestr(now, "%" + part[0])
1380 1380
1381 1381 defaults[part] = (b, n)
1382 1382
1383 1383 for format in formats:
1384 1384 try:
1385 1385 when, offset = strdate(date, format, defaults)
1386 1386 except (ValueError, OverflowError):
1387 1387 pass
1388 1388 else:
1389 1389 break
1390 1390 else:
1391 1391 raise Abort(_('invalid date: %r') % date)
1392 1392 # validate explicit (probably user-specified) date and
1393 1393 # time zone offset. values must fit in signed 32 bits for
1394 1394 # current 32-bit linux runtimes. timezones go from UTC-12
1395 1395 # to UTC+14
1396 1396 if abs(when) > 0x7fffffff:
1397 1397 raise Abort(_('date exceeds 32 bits: %d') % when)
1398 1398 if when < 0:
1399 1399 raise Abort(_('negative date value: %d') % when)
1400 1400 if offset < -50400 or offset > 43200:
1401 1401 raise Abort(_('impossible time zone offset: %d') % offset)
1402 1402 return when, offset
1403 1403
1404 1404 def matchdate(date):
1405 1405 """Return a function that matches a given date match specifier
1406 1406
1407 1407 Formats include:
1408 1408
1409 1409 '{date}' match a given date to the accuracy provided
1410 1410
1411 1411 '<{date}' on or before a given date
1412 1412
1413 1413 '>{date}' on or after a given date
1414 1414
1415 1415 >>> p1 = parsedate("10:29:59")
1416 1416 >>> p2 = parsedate("10:30:00")
1417 1417 >>> p3 = parsedate("10:30:59")
1418 1418 >>> p4 = parsedate("10:31:00")
1419 1419 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1420 1420 >>> f = matchdate("10:30")
1421 1421 >>> f(p1[0])
1422 1422 False
1423 1423 >>> f(p2[0])
1424 1424 True
1425 1425 >>> f(p3[0])
1426 1426 True
1427 1427 >>> f(p4[0])
1428 1428 False
1429 1429 >>> f(p5[0])
1430 1430 False
1431 1431 """
1432 1432
1433 1433 def lower(date):
1434 1434 d = {'mb': "1", 'd': "1"}
1435 1435 return parsedate(date, extendeddateformats, d)[0]
1436 1436
1437 1437 def upper(date):
1438 1438 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1439 1439 for days in ("31", "30", "29"):
1440 1440 try:
1441 1441 d["d"] = days
1442 1442 return parsedate(date, extendeddateformats, d)[0]
1443 1443 except Abort:
1444 1444 pass
1445 1445 d["d"] = "28"
1446 1446 return parsedate(date, extendeddateformats, d)[0]
1447 1447
1448 1448 date = date.strip()
1449 1449
1450 1450 if not date:
1451 1451 raise Abort(_("dates cannot consist entirely of whitespace"))
1452 1452 elif date[0] == "<":
1453 1453 if not date[1:]:
1454 1454 raise Abort(_("invalid day spec, use '<DATE'"))
1455 1455 when = upper(date[1:])
1456 1456 return lambda x: x <= when
1457 1457 elif date[0] == ">":
1458 1458 if not date[1:]:
1459 1459 raise Abort(_("invalid day spec, use '>DATE'"))
1460 1460 when = lower(date[1:])
1461 1461 return lambda x: x >= when
1462 1462 elif date[0] == "-":
1463 1463 try:
1464 1464 days = int(date[1:])
1465 1465 except ValueError:
1466 1466 raise Abort(_("invalid day spec: %s") % date[1:])
1467 1467 if days < 0:
1468 1468 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1469 1469 % date[1:])
1470 1470 when = makedate()[0] - days * 3600 * 24
1471 1471 return lambda x: x >= when
1472 1472 elif " to " in date:
1473 1473 a, b = date.split(" to ")
1474 1474 start, stop = lower(a), upper(b)
1475 1475 return lambda x: x >= start and x <= stop
1476 1476 else:
1477 1477 start, stop = lower(date), upper(date)
1478 1478 return lambda x: x >= start and x <= stop
1479 1479
1480 1480 def shortuser(user):
1481 1481 """Return a short representation of a user name or email address."""
1482 1482 f = user.find('@')
1483 1483 if f >= 0:
1484 1484 user = user[:f]
1485 1485 f = user.find('<')
1486 1486 if f >= 0:
1487 1487 user = user[f + 1:]
1488 1488 f = user.find(' ')
1489 1489 if f >= 0:
1490 1490 user = user[:f]
1491 1491 f = user.find('.')
1492 1492 if f >= 0:
1493 1493 user = user[:f]
1494 1494 return user
1495 1495
1496 1496 def emailuser(user):
1497 1497 """Return the user portion of an email address."""
1498 1498 f = user.find('@')
1499 1499 if f >= 0:
1500 1500 user = user[:f]
1501 1501 f = user.find('<')
1502 1502 if f >= 0:
1503 1503 user = user[f + 1:]
1504 1504 return user
1505 1505
1506 1506 def email(author):
1507 1507 '''get email of author.'''
1508 1508 r = author.find('>')
1509 1509 if r == -1:
1510 1510 r = None
1511 1511 return author[author.find('<') + 1:r]
1512 1512
1513 1513 def ellipsis(text, maxlength=400):
1514 1514 """Trim string to at most maxlength (default: 400) columns in display."""
1515 1515 return encoding.trim(text, maxlength, ellipsis='...')
1516 1516
1517 1517 def unitcountfn(*unittable):
1518 1518 '''return a function that renders a readable count of some quantity'''
1519 1519
1520 1520 def go(count):
1521 1521 for multiplier, divisor, format in unittable:
1522 1522 if count >= divisor * multiplier:
1523 1523 return format % (count / float(divisor))
1524 1524 return unittable[-1][2] % count
1525 1525
1526 1526 return go
1527 1527
1528 1528 bytecount = unitcountfn(
1529 1529 (100, 1 << 30, _('%.0f GB')),
1530 1530 (10, 1 << 30, _('%.1f GB')),
1531 1531 (1, 1 << 30, _('%.2f GB')),
1532 1532 (100, 1 << 20, _('%.0f MB')),
1533 1533 (10, 1 << 20, _('%.1f MB')),
1534 1534 (1, 1 << 20, _('%.2f MB')),
1535 1535 (100, 1 << 10, _('%.0f KB')),
1536 1536 (10, 1 << 10, _('%.1f KB')),
1537 1537 (1, 1 << 10, _('%.2f KB')),
1538 1538 (1, 1, _('%.0f bytes')),
1539 1539 )
1540 1540
1541 1541 def uirepr(s):
1542 1542 # Avoid double backslash in Windows path repr()
1543 1543 return repr(s).replace('\\\\', '\\')
1544 1544
1545 1545 # delay import of textwrap
1546 1546 def MBTextWrapper(**kwargs):
1547 1547 class tw(textwrap.TextWrapper):
1548 1548 """
1549 1549 Extend TextWrapper for width-awareness.
1550 1550
1551 1551 Neither number of 'bytes' in any encoding nor 'characters' is
1552 1552 appropriate to calculate terminal columns for specified string.
1553 1553
1554 1554 Original TextWrapper implementation uses built-in 'len()' directly,
1555 1555 so overriding is needed to use width information of each characters.
1556 1556
1557 1557 In addition, characters classified into 'ambiguous' width are
1558 1558 treated as wide in East Asian area, but as narrow in other.
1559 1559
1560 1560 This requires use decision to determine width of such characters.
1561 1561 """
1562 1562 def __init__(self, **kwargs):
1563 1563 textwrap.TextWrapper.__init__(self, **kwargs)
1564 1564
1565 1565 # for compatibility between 2.4 and 2.6
1566 1566 if getattr(self, 'drop_whitespace', None) is None:
1567 1567 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1568 1568
1569 1569 def _cutdown(self, ucstr, space_left):
1570 1570 l = 0
1571 1571 colwidth = encoding.ucolwidth
1572 1572 for i in xrange(len(ucstr)):
1573 1573 l += colwidth(ucstr[i])
1574 1574 if space_left < l:
1575 1575 return (ucstr[:i], ucstr[i:])
1576 1576 return ucstr, ''
1577 1577
1578 1578 # overriding of base class
1579 1579 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1580 1580 space_left = max(width - cur_len, 1)
1581 1581
1582 1582 if self.break_long_words:
1583 1583 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1584 1584 cur_line.append(cut)
1585 1585 reversed_chunks[-1] = res
1586 1586 elif not cur_line:
1587 1587 cur_line.append(reversed_chunks.pop())
1588 1588
1589 1589 # this overriding code is imported from TextWrapper of python 2.6
1590 1590 # to calculate columns of string by 'encoding.ucolwidth()'
1591 1591 def _wrap_chunks(self, chunks):
1592 1592 colwidth = encoding.ucolwidth
1593 1593
1594 1594 lines = []
1595 1595 if self.width <= 0:
1596 1596 raise ValueError("invalid width %r (must be > 0)" % self.width)
1597 1597
1598 1598 # Arrange in reverse order so items can be efficiently popped
1599 1599 # from a stack of chucks.
1600 1600 chunks.reverse()
1601 1601
1602 1602 while chunks:
1603 1603
1604 1604 # Start the list of chunks that will make up the current line.
1605 1605 # cur_len is just the length of all the chunks in cur_line.
1606 1606 cur_line = []
1607 1607 cur_len = 0
1608 1608
1609 1609 # Figure out which static string will prefix this line.
1610 1610 if lines:
1611 1611 indent = self.subsequent_indent
1612 1612 else:
1613 1613 indent = self.initial_indent
1614 1614
1615 1615 # Maximum width for this line.
1616 1616 width = self.width - len(indent)
1617 1617
1618 1618 # First chunk on line is whitespace -- drop it, unless this
1619 1619 # is the very beginning of the text (i.e. no lines started yet).
1620 1620 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1621 1621 del chunks[-1]
1622 1622
1623 1623 while chunks:
1624 1624 l = colwidth(chunks[-1])
1625 1625
1626 1626 # Can at least squeeze this chunk onto the current line.
1627 1627 if cur_len + l <= width:
1628 1628 cur_line.append(chunks.pop())
1629 1629 cur_len += l
1630 1630
1631 1631 # Nope, this line is full.
1632 1632 else:
1633 1633 break
1634 1634
1635 1635 # The current line is full, and the next chunk is too big to
1636 1636 # fit on *any* line (not just this one).
1637 1637 if chunks and colwidth(chunks[-1]) > width:
1638 1638 self._handle_long_word(chunks, cur_line, cur_len, width)
1639 1639
1640 1640 # If the last chunk on this line is all whitespace, drop it.
1641 1641 if (self.drop_whitespace and
1642 1642 cur_line and cur_line[-1].strip() == ''):
1643 1643 del cur_line[-1]
1644 1644
1645 1645 # Convert current line back to a string and store it in list
1646 1646 # of all lines (return value).
1647 1647 if cur_line:
1648 1648 lines.append(indent + ''.join(cur_line))
1649 1649
1650 1650 return lines
1651 1651
1652 1652 global MBTextWrapper
1653 1653 MBTextWrapper = tw
1654 1654 return tw(**kwargs)
1655 1655
1656 1656 def wrap(line, width, initindent='', hangindent=''):
1657 1657 maxindent = max(len(hangindent), len(initindent))
1658 1658 if width <= maxindent:
1659 1659 # adjust for weird terminal size
1660 1660 width = max(78, maxindent + 1)
1661 1661 line = line.decode(encoding.encoding, encoding.encodingmode)
1662 1662 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1663 1663 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1664 1664 wrapper = MBTextWrapper(width=width,
1665 1665 initial_indent=initindent,
1666 1666 subsequent_indent=hangindent)
1667 1667 return wrapper.fill(line).encode(encoding.encoding)
1668 1668
1669 1669 def iterlines(iterator):
1670 1670 for chunk in iterator:
1671 1671 for line in chunk.splitlines():
1672 1672 yield line
1673 1673
1674 1674 def expandpath(path):
1675 1675 return os.path.expanduser(os.path.expandvars(path))
1676 1676
1677 1677 def hgcmd():
1678 1678 """Return the command used to execute current hg
1679 1679
1680 1680 This is different from hgexecutable() because on Windows we want
1681 1681 to avoid things opening new shell windows like batch files, so we
1682 1682 get either the python call or current executable.
1683 1683 """
1684 1684 if mainfrozen():
1685 1685 return [sys.executable]
1686 1686 return gethgcmd()
1687 1687
1688 1688 def rundetached(args, condfn):
1689 1689 """Execute the argument list in a detached process.
1690 1690
1691 1691 condfn is a callable which is called repeatedly and should return
1692 1692 True once the child process is known to have started successfully.
1693 1693 At this point, the child process PID is returned. If the child
1694 1694 process fails to start or finishes before condfn() evaluates to
1695 1695 True, return -1.
1696 1696 """
1697 1697 # Windows case is easier because the child process is either
1698 1698 # successfully starting and validating the condition or exiting
1699 1699 # on failure. We just poll on its PID. On Unix, if the child
1700 1700 # process fails to start, it will be left in a zombie state until
1701 1701 # the parent wait on it, which we cannot do since we expect a long
1702 1702 # running process on success. Instead we listen for SIGCHLD telling
1703 1703 # us our child process terminated.
1704 1704 terminated = set()
1705 1705 def handler(signum, frame):
1706 1706 terminated.add(os.wait())
1707 1707 prevhandler = None
1708 1708 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1709 1709 if SIGCHLD is not None:
1710 1710 prevhandler = signal.signal(SIGCHLD, handler)
1711 1711 try:
1712 1712 pid = spawndetached(args)
1713 1713 while not condfn():
1714 1714 if ((pid in terminated or not testpid(pid))
1715 1715 and not condfn()):
1716 1716 return -1
1717 1717 time.sleep(0.1)
1718 1718 return pid
1719 1719 finally:
1720 1720 if prevhandler is not None:
1721 1721 signal.signal(signal.SIGCHLD, prevhandler)
1722 1722
1723 try:
1724 any, all = any, all
1725 except NameError:
1726 def any(iterable):
1727 for i in iterable:
1728 if i:
1729 return True
1730 return False
1731
1732 def all(iterable):
1733 for i in iterable:
1734 if not i:
1735 return False
1736 return True
1737
1738 1723 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1739 1724 """Return the result of interpolating items in the mapping into string s.
1740 1725
1741 1726 prefix is a single character string, or a two character string with
1742 1727 a backslash as the first character if the prefix needs to be escaped in
1743 1728 a regular expression.
1744 1729
1745 1730 fn is an optional function that will be applied to the replacement text
1746 1731 just before replacement.
1747 1732
1748 1733 escape_prefix is an optional flag that allows using doubled prefix for
1749 1734 its escaping.
1750 1735 """
1751 1736 fn = fn or (lambda s: s)
1752 1737 patterns = '|'.join(mapping.keys())
1753 1738 if escape_prefix:
1754 1739 patterns += '|' + prefix
1755 1740 if len(prefix) > 1:
1756 1741 prefix_char = prefix[1:]
1757 1742 else:
1758 1743 prefix_char = prefix
1759 1744 mapping[prefix_char] = prefix_char
1760 1745 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1761 1746 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1762 1747
1763 1748 def getport(port):
1764 1749 """Return the port for a given network service.
1765 1750
1766 1751 If port is an integer, it's returned as is. If it's a string, it's
1767 1752 looked up using socket.getservbyname(). If there's no matching
1768 1753 service, util.Abort is raised.
1769 1754 """
1770 1755 try:
1771 1756 return int(port)
1772 1757 except ValueError:
1773 1758 pass
1774 1759
1775 1760 try:
1776 1761 return socket.getservbyname(port)
1777 1762 except socket.error:
1778 1763 raise Abort(_("no port number associated with service '%s'") % port)
1779 1764
1780 1765 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1781 1766 '0': False, 'no': False, 'false': False, 'off': False,
1782 1767 'never': False}
1783 1768
1784 1769 def parsebool(s):
1785 1770 """Parse s into a boolean.
1786 1771
1787 1772 If s is not a valid boolean, returns None.
1788 1773 """
1789 1774 return _booleans.get(s.lower(), None)
1790 1775
1791 1776 _hexdig = '0123456789ABCDEFabcdef'
1792 1777 _hextochr = dict((a + b, chr(int(a + b, 16)))
1793 1778 for a in _hexdig for b in _hexdig)
1794 1779
1795 1780 def _urlunquote(s):
1796 1781 """Decode HTTP/HTML % encoding.
1797 1782
1798 1783 >>> _urlunquote('abc%20def')
1799 1784 'abc def'
1800 1785 """
1801 1786 res = s.split('%')
1802 1787 # fastpath
1803 1788 if len(res) == 1:
1804 1789 return s
1805 1790 s = res[0]
1806 1791 for item in res[1:]:
1807 1792 try:
1808 1793 s += _hextochr[item[:2]] + item[2:]
1809 1794 except KeyError:
1810 1795 s += '%' + item
1811 1796 except UnicodeDecodeError:
1812 1797 s += unichr(int(item[:2], 16)) + item[2:]
1813 1798 return s
1814 1799
1815 1800 class url(object):
1816 1801 r"""Reliable URL parser.
1817 1802
1818 1803 This parses URLs and provides attributes for the following
1819 1804 components:
1820 1805
1821 1806 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1822 1807
1823 1808 Missing components are set to None. The only exception is
1824 1809 fragment, which is set to '' if present but empty.
1825 1810
1826 1811 If parsefragment is False, fragment is included in query. If
1827 1812 parsequery is False, query is included in path. If both are
1828 1813 False, both fragment and query are included in path.
1829 1814
1830 1815 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1831 1816
1832 1817 Note that for backward compatibility reasons, bundle URLs do not
1833 1818 take host names. That means 'bundle://../' has a path of '../'.
1834 1819
1835 1820 Examples:
1836 1821
1837 1822 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1838 1823 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1839 1824 >>> url('ssh://[::1]:2200//home/joe/repo')
1840 1825 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1841 1826 >>> url('file:///home/joe/repo')
1842 1827 <url scheme: 'file', path: '/home/joe/repo'>
1843 1828 >>> url('file:///c:/temp/foo/')
1844 1829 <url scheme: 'file', path: 'c:/temp/foo/'>
1845 1830 >>> url('bundle:foo')
1846 1831 <url scheme: 'bundle', path: 'foo'>
1847 1832 >>> url('bundle://../foo')
1848 1833 <url scheme: 'bundle', path: '../foo'>
1849 1834 >>> url(r'c:\foo\bar')
1850 1835 <url path: 'c:\\foo\\bar'>
1851 1836 >>> url(r'\\blah\blah\blah')
1852 1837 <url path: '\\\\blah\\blah\\blah'>
1853 1838 >>> url(r'\\blah\blah\blah#baz')
1854 1839 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1855 1840 >>> url(r'file:///C:\users\me')
1856 1841 <url scheme: 'file', path: 'C:\\users\\me'>
1857 1842
1858 1843 Authentication credentials:
1859 1844
1860 1845 >>> url('ssh://joe:xyz@x/repo')
1861 1846 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1862 1847 >>> url('ssh://joe@x/repo')
1863 1848 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1864 1849
1865 1850 Query strings and fragments:
1866 1851
1867 1852 >>> url('http://host/a?b#c')
1868 1853 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1869 1854 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1870 1855 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1871 1856 """
1872 1857
1873 1858 _safechars = "!~*'()+"
1874 1859 _safepchars = "/!~*'()+:\\"
1875 1860 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1876 1861
1877 1862 def __init__(self, path, parsequery=True, parsefragment=True):
1878 1863 # We slowly chomp away at path until we have only the path left
1879 1864 self.scheme = self.user = self.passwd = self.host = None
1880 1865 self.port = self.path = self.query = self.fragment = None
1881 1866 self._localpath = True
1882 1867 self._hostport = ''
1883 1868 self._origpath = path
1884 1869
1885 1870 if parsefragment and '#' in path:
1886 1871 path, self.fragment = path.split('#', 1)
1887 1872 if not path:
1888 1873 path = None
1889 1874
1890 1875 # special case for Windows drive letters and UNC paths
1891 1876 if hasdriveletter(path) or path.startswith(r'\\'):
1892 1877 self.path = path
1893 1878 return
1894 1879
1895 1880 # For compatibility reasons, we can't handle bundle paths as
1896 1881 # normal URLS
1897 1882 if path.startswith('bundle:'):
1898 1883 self.scheme = 'bundle'
1899 1884 path = path[7:]
1900 1885 if path.startswith('//'):
1901 1886 path = path[2:]
1902 1887 self.path = path
1903 1888 return
1904 1889
1905 1890 if self._matchscheme(path):
1906 1891 parts = path.split(':', 1)
1907 1892 if parts[0]:
1908 1893 self.scheme, path = parts
1909 1894 self._localpath = False
1910 1895
1911 1896 if not path:
1912 1897 path = None
1913 1898 if self._localpath:
1914 1899 self.path = ''
1915 1900 return
1916 1901 else:
1917 1902 if self._localpath:
1918 1903 self.path = path
1919 1904 return
1920 1905
1921 1906 if parsequery and '?' in path:
1922 1907 path, self.query = path.split('?', 1)
1923 1908 if not path:
1924 1909 path = None
1925 1910 if not self.query:
1926 1911 self.query = None
1927 1912
1928 1913 # // is required to specify a host/authority
1929 1914 if path and path.startswith('//'):
1930 1915 parts = path[2:].split('/', 1)
1931 1916 if len(parts) > 1:
1932 1917 self.host, path = parts
1933 1918 else:
1934 1919 self.host = parts[0]
1935 1920 path = None
1936 1921 if not self.host:
1937 1922 self.host = None
1938 1923 # path of file:///d is /d
1939 1924 # path of file:///d:/ is d:/, not /d:/
1940 1925 if path and not hasdriveletter(path):
1941 1926 path = '/' + path
1942 1927
1943 1928 if self.host and '@' in self.host:
1944 1929 self.user, self.host = self.host.rsplit('@', 1)
1945 1930 if ':' in self.user:
1946 1931 self.user, self.passwd = self.user.split(':', 1)
1947 1932 if not self.host:
1948 1933 self.host = None
1949 1934
1950 1935 # Don't split on colons in IPv6 addresses without ports
1951 1936 if (self.host and ':' in self.host and
1952 1937 not (self.host.startswith('[') and self.host.endswith(']'))):
1953 1938 self._hostport = self.host
1954 1939 self.host, self.port = self.host.rsplit(':', 1)
1955 1940 if not self.host:
1956 1941 self.host = None
1957 1942
1958 1943 if (self.host and self.scheme == 'file' and
1959 1944 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1960 1945 raise Abort(_('file:// URLs can only refer to localhost'))
1961 1946
1962 1947 self.path = path
1963 1948
1964 1949 # leave the query string escaped
1965 1950 for a in ('user', 'passwd', 'host', 'port',
1966 1951 'path', 'fragment'):
1967 1952 v = getattr(self, a)
1968 1953 if v is not None:
1969 1954 setattr(self, a, _urlunquote(v))
1970 1955
1971 1956 def __repr__(self):
1972 1957 attrs = []
1973 1958 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1974 1959 'query', 'fragment'):
1975 1960 v = getattr(self, a)
1976 1961 if v is not None:
1977 1962 attrs.append('%s: %r' % (a, v))
1978 1963 return '<url %s>' % ', '.join(attrs)
1979 1964
1980 1965 def __str__(self):
1981 1966 r"""Join the URL's components back into a URL string.
1982 1967
1983 1968 Examples:
1984 1969
1985 1970 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1986 1971 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1987 1972 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1988 1973 'http://user:pw@host:80/?foo=bar&baz=42'
1989 1974 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1990 1975 'http://user:pw@host:80/?foo=bar%3dbaz'
1991 1976 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1992 1977 'ssh://user:pw@[::1]:2200//home/joe#'
1993 1978 >>> str(url('http://localhost:80//'))
1994 1979 'http://localhost:80//'
1995 1980 >>> str(url('http://localhost:80/'))
1996 1981 'http://localhost:80/'
1997 1982 >>> str(url('http://localhost:80'))
1998 1983 'http://localhost:80/'
1999 1984 >>> str(url('bundle:foo'))
2000 1985 'bundle:foo'
2001 1986 >>> str(url('bundle://../foo'))
2002 1987 'bundle:../foo'
2003 1988 >>> str(url('path'))
2004 1989 'path'
2005 1990 >>> str(url('file:///tmp/foo/bar'))
2006 1991 'file:///tmp/foo/bar'
2007 1992 >>> str(url('file:///c:/tmp/foo/bar'))
2008 1993 'file:///c:/tmp/foo/bar'
2009 1994 >>> print url(r'bundle:foo\bar')
2010 1995 bundle:foo\bar
2011 1996 >>> print url(r'file:///D:\data\hg')
2012 1997 file:///D:\data\hg
2013 1998 """
2014 1999 if self._localpath:
2015 2000 s = self.path
2016 2001 if self.scheme == 'bundle':
2017 2002 s = 'bundle:' + s
2018 2003 if self.fragment:
2019 2004 s += '#' + self.fragment
2020 2005 return s
2021 2006
2022 2007 s = self.scheme + ':'
2023 2008 if self.user or self.passwd or self.host:
2024 2009 s += '//'
2025 2010 elif self.scheme and (not self.path or self.path.startswith('/')
2026 2011 or hasdriveletter(self.path)):
2027 2012 s += '//'
2028 2013 if hasdriveletter(self.path):
2029 2014 s += '/'
2030 2015 if self.user:
2031 2016 s += urllib.quote(self.user, safe=self._safechars)
2032 2017 if self.passwd:
2033 2018 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2034 2019 if self.user or self.passwd:
2035 2020 s += '@'
2036 2021 if self.host:
2037 2022 if not (self.host.startswith('[') and self.host.endswith(']')):
2038 2023 s += urllib.quote(self.host)
2039 2024 else:
2040 2025 s += self.host
2041 2026 if self.port:
2042 2027 s += ':' + urllib.quote(self.port)
2043 2028 if self.host:
2044 2029 s += '/'
2045 2030 if self.path:
2046 2031 # TODO: similar to the query string, we should not unescape the
2047 2032 # path when we store it, the path might contain '%2f' = '/',
2048 2033 # which we should *not* escape.
2049 2034 s += urllib.quote(self.path, safe=self._safepchars)
2050 2035 if self.query:
2051 2036 # we store the query in escaped form.
2052 2037 s += '?' + self.query
2053 2038 if self.fragment is not None:
2054 2039 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2055 2040 return s
2056 2041
2057 2042 def authinfo(self):
2058 2043 user, passwd = self.user, self.passwd
2059 2044 try:
2060 2045 self.user, self.passwd = None, None
2061 2046 s = str(self)
2062 2047 finally:
2063 2048 self.user, self.passwd = user, passwd
2064 2049 if not self.user:
2065 2050 return (s, None)
2066 2051 # authinfo[1] is passed to urllib2 password manager, and its
2067 2052 # URIs must not contain credentials. The host is passed in the
2068 2053 # URIs list because Python < 2.4.3 uses only that to search for
2069 2054 # a password.
2070 2055 return (s, (None, (s, self.host),
2071 2056 self.user, self.passwd or ''))
2072 2057
2073 2058 def isabs(self):
2074 2059 if self.scheme and self.scheme != 'file':
2075 2060 return True # remote URL
2076 2061 if hasdriveletter(self.path):
2077 2062 return True # absolute for our purposes - can't be joined()
2078 2063 if self.path.startswith(r'\\'):
2079 2064 return True # Windows UNC path
2080 2065 if self.path.startswith('/'):
2081 2066 return True # POSIX-style
2082 2067 return False
2083 2068
2084 2069 def localpath(self):
2085 2070 if self.scheme == 'file' or self.scheme == 'bundle':
2086 2071 path = self.path or '/'
2087 2072 # For Windows, we need to promote hosts containing drive
2088 2073 # letters to paths with drive letters.
2089 2074 if hasdriveletter(self._hostport):
2090 2075 path = self._hostport + '/' + self.path
2091 2076 elif (self.host is not None and self.path
2092 2077 and not hasdriveletter(path)):
2093 2078 path = '/' + path
2094 2079 return path
2095 2080 return self._origpath
2096 2081
2097 2082 def islocal(self):
2098 2083 '''whether localpath will return something that posixfile can open'''
2099 2084 return (not self.scheme or self.scheme == 'file'
2100 2085 or self.scheme == 'bundle')
2101 2086
2102 2087 def hasscheme(path):
2103 2088 return bool(url(path).scheme)
2104 2089
2105 2090 def hasdriveletter(path):
2106 2091 return path and path[1:2] == ':' and path[0:1].isalpha()
2107 2092
2108 2093 def urllocalpath(path):
2109 2094 return url(path, parsequery=False, parsefragment=False).localpath()
2110 2095
2111 2096 def hidepassword(u):
2112 2097 '''hide user credential in a url string'''
2113 2098 u = url(u)
2114 2099 if u.passwd:
2115 2100 u.passwd = '***'
2116 2101 return str(u)
2117 2102
2118 2103 def removeauth(u):
2119 2104 '''remove all authentication information from a url string'''
2120 2105 u = url(u)
2121 2106 u.user = u.passwd = None
2122 2107 return str(u)
2123 2108
2124 2109 def isatty(fd):
2125 2110 try:
2126 2111 return fd.isatty()
2127 2112 except AttributeError:
2128 2113 return False
2129 2114
2130 2115 timecount = unitcountfn(
2131 2116 (1, 1e3, _('%.0f s')),
2132 2117 (100, 1, _('%.1f s')),
2133 2118 (10, 1, _('%.2f s')),
2134 2119 (1, 1, _('%.3f s')),
2135 2120 (100, 0.001, _('%.1f ms')),
2136 2121 (10, 0.001, _('%.2f ms')),
2137 2122 (1, 0.001, _('%.3f ms')),
2138 2123 (100, 0.000001, _('%.1f us')),
2139 2124 (10, 0.000001, _('%.2f us')),
2140 2125 (1, 0.000001, _('%.3f us')),
2141 2126 (100, 0.000000001, _('%.1f ns')),
2142 2127 (10, 0.000000001, _('%.2f ns')),
2143 2128 (1, 0.000000001, _('%.3f ns')),
2144 2129 )
2145 2130
2146 2131 _timenesting = [0]
2147 2132
2148 2133 def timed(func):
2149 2134 '''Report the execution time of a function call to stderr.
2150 2135
2151 2136 During development, use as a decorator when you need to measure
2152 2137 the cost of a function, e.g. as follows:
2153 2138
2154 2139 @util.timed
2155 2140 def foo(a, b, c):
2156 2141 pass
2157 2142 '''
2158 2143
2159 2144 def wrapper(*args, **kwargs):
2160 2145 start = time.time()
2161 2146 indent = 2
2162 2147 _timenesting[0] += indent
2163 2148 try:
2164 2149 return func(*args, **kwargs)
2165 2150 finally:
2166 2151 elapsed = time.time() - start
2167 2152 _timenesting[0] -= indent
2168 2153 sys.stderr.write('%s%s: %s\n' %
2169 2154 (' ' * _timenesting[0], func.__name__,
2170 2155 timecount(elapsed)))
2171 2156 return wrapper
2172 2157
2173 2158 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2174 2159 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2175 2160
2176 2161 def sizetoint(s):
2177 2162 '''Convert a space specifier to a byte count.
2178 2163
2179 2164 >>> sizetoint('30')
2180 2165 30
2181 2166 >>> sizetoint('2.2kb')
2182 2167 2252
2183 2168 >>> sizetoint('6M')
2184 2169 6291456
2185 2170 '''
2186 2171 t = s.strip().lower()
2187 2172 try:
2188 2173 for k, u in _sizeunits:
2189 2174 if t.endswith(k):
2190 2175 return int(float(t[:-len(k)]) * u)
2191 2176 return int(t)
2192 2177 except ValueError:
2193 2178 raise error.ParseError(_("couldn't parse size: %s") % s)
2194 2179
2195 2180 class hooks(object):
2196 2181 '''A collection of hook functions that can be used to extend a
2197 2182 function's behaviour. Hooks are called in lexicographic order,
2198 2183 based on the names of their sources.'''
2199 2184
2200 2185 def __init__(self):
2201 2186 self._hooks = []
2202 2187
2203 2188 def add(self, source, hook):
2204 2189 self._hooks.append((source, hook))
2205 2190
2206 2191 def __call__(self, *args):
2207 2192 self._hooks.sort(key=lambda x: x[0])
2208 2193 results = []
2209 2194 for source, hook in self._hooks:
2210 2195 results.append(hook(*args))
2211 2196 return results
2212 2197
2213 2198 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2214 2199 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2215 2200 Skips the 'skip' last entries. By default it will flush stdout first.
2216 2201 It can be used everywhere and do intentionally not require an ui object.
2217 2202 Not be used in production code but very convenient while developing.
2218 2203 '''
2219 2204 if otherf:
2220 2205 otherf.flush()
2221 2206 f.write('%s at:\n' % msg)
2222 2207 entries = [('%s:%s' % (fn, ln), func)
2223 2208 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2224 2209 if entries:
2225 2210 fnmax = max(len(entry[0]) for entry in entries)
2226 2211 for fnln, func in entries:
2227 2212 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2228 2213 f.flush()
2229 2214
2230 2215 class dirs(object):
2231 2216 '''a multiset of directory names from a dirstate or manifest'''
2232 2217
2233 2218 def __init__(self, map, skip=None):
2234 2219 self._dirs = {}
2235 2220 addpath = self.addpath
2236 2221 if safehasattr(map, 'iteritems') and skip is not None:
2237 2222 for f, s in map.iteritems():
2238 2223 if s[0] != skip:
2239 2224 addpath(f)
2240 2225 else:
2241 2226 for f in map:
2242 2227 addpath(f)
2243 2228
2244 2229 def addpath(self, path):
2245 2230 dirs = self._dirs
2246 2231 for base in finddirs(path):
2247 2232 if base in dirs:
2248 2233 dirs[base] += 1
2249 2234 return
2250 2235 dirs[base] = 1
2251 2236
2252 2237 def delpath(self, path):
2253 2238 dirs = self._dirs
2254 2239 for base in finddirs(path):
2255 2240 if dirs[base] > 1:
2256 2241 dirs[base] -= 1
2257 2242 return
2258 2243 del dirs[base]
2259 2244
2260 2245 def __iter__(self):
2261 2246 return self._dirs.iterkeys()
2262 2247
2263 2248 def __contains__(self, d):
2264 2249 return d in self._dirs
2265 2250
2266 2251 if safehasattr(parsers, 'dirs'):
2267 2252 dirs = parsers.dirs
2268 2253
2269 2254 def finddirs(path):
2270 2255 pos = path.rfind('/')
2271 2256 while pos != -1:
2272 2257 yield path[:pos]
2273 2258 pos = path.rfind('/', 0, pos)
2274 2259
2275 2260 # convenient shortcut
2276 2261 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now