##// END OF EJS Templates
util: add normcase spec and fallback...
Siddharth Agarwal -
r24605:98744856 default
parent child Browse files
Show More
@@ -1,2242 +1,2244 b''
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 import i18n
17 17 _ = i18n._
18 18 import error, osutil, encoding
19 19 import errno, shutil, sys, tempfile, traceback
20 20 import re as remod
21 21 import os, time, datetime, calendar, textwrap, signal, collections
22 22 import imp, socket, urllib, struct
23 23 import gc
24 24
25 25 if os.name == 'nt':
26 26 import windows as platform
27 27 else:
28 28 import posix as platform
29 29
30 30 cachestat = platform.cachestat
31 31 checkexec = platform.checkexec
32 32 checklink = platform.checklink
33 33 copymode = platform.copymode
34 34 executablepath = platform.executablepath
35 35 expandglobs = platform.expandglobs
36 36 explainexit = platform.explainexit
37 37 findexe = platform.findexe
38 38 gethgcmd = platform.gethgcmd
39 39 getuser = platform.getuser
40 40 groupmembers = platform.groupmembers
41 41 groupname = platform.groupname
42 42 hidewindow = platform.hidewindow
43 43 isexec = platform.isexec
44 44 isowner = platform.isowner
45 45 localpath = platform.localpath
46 46 lookupreg = platform.lookupreg
47 47 makedir = platform.makedir
48 48 nlinks = platform.nlinks
49 49 normpath = platform.normpath
50 50 normcase = platform.normcase
51 normcasespec = platform.normcasespec
52 normcasefallback = platform.normcasefallback
51 53 openhardlinks = platform.openhardlinks
52 54 oslink = platform.oslink
53 55 parsepatchoutput = platform.parsepatchoutput
54 56 pconvert = platform.pconvert
55 57 popen = platform.popen
56 58 posixfile = platform.posixfile
57 59 quotecommand = platform.quotecommand
58 60 readpipe = platform.readpipe
59 61 rename = platform.rename
60 62 samedevice = platform.samedevice
61 63 samefile = platform.samefile
62 64 samestat = platform.samestat
63 65 setbinary = platform.setbinary
64 66 setflags = platform.setflags
65 67 setsignalhandler = platform.setsignalhandler
66 68 shellquote = platform.shellquote
67 69 spawndetached = platform.spawndetached
68 70 split = platform.split
69 71 sshargs = platform.sshargs
70 72 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
71 73 statisexec = platform.statisexec
72 74 statislink = platform.statislink
73 75 termwidth = platform.termwidth
74 76 testpid = platform.testpid
75 77 umask = platform.umask
76 78 unlink = platform.unlink
77 79 unlinkpath = platform.unlinkpath
78 80 username = platform.username
79 81
80 82 # Python compatibility
81 83
82 84 _notset = object()
83 85
84 86 def safehasattr(thing, attr):
85 87 return getattr(thing, attr, _notset) is not _notset
86 88
87 89 def sha1(s=''):
88 90 '''
89 91 Low-overhead wrapper around Python's SHA support
90 92
91 93 >>> f = _fastsha1
92 94 >>> a = sha1()
93 95 >>> a = f()
94 96 >>> a.hexdigest()
95 97 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
96 98 '''
97 99
98 100 return _fastsha1(s)
99 101
100 102 def _fastsha1(s=''):
101 103 # This function will import sha1 from hashlib or sha (whichever is
102 104 # available) and overwrite itself with it on the first call.
103 105 # Subsequent calls will go directly to the imported function.
104 106 if sys.version_info >= (2, 5):
105 107 from hashlib import sha1 as _sha1
106 108 else:
107 109 from sha import sha as _sha1
108 110 global _fastsha1, sha1
109 111 _fastsha1 = sha1 = _sha1
110 112 return _sha1(s)
111 113
112 114 def md5(s=''):
113 115 try:
114 116 from hashlib import md5 as _md5
115 117 except ImportError:
116 118 from md5 import md5 as _md5
117 119 global md5
118 120 md5 = _md5
119 121 return _md5(s)
120 122
121 123 DIGESTS = {
122 124 'md5': md5,
123 125 'sha1': sha1,
124 126 }
125 127 # List of digest types from strongest to weakest
126 128 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
127 129
128 130 try:
129 131 import hashlib
130 132 DIGESTS.update({
131 133 'sha512': hashlib.sha512,
132 134 })
133 135 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
134 136 except ImportError:
135 137 pass
136 138
137 139 for k in DIGESTS_BY_STRENGTH:
138 140 assert k in DIGESTS
139 141
140 142 class digester(object):
141 143 """helper to compute digests.
142 144
143 145 This helper can be used to compute one or more digests given their name.
144 146
145 147 >>> d = digester(['md5', 'sha1'])
146 148 >>> d.update('foo')
147 149 >>> [k for k in sorted(d)]
148 150 ['md5', 'sha1']
149 151 >>> d['md5']
150 152 'acbd18db4cc2f85cedef654fccc4a4d8'
151 153 >>> d['sha1']
152 154 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
153 155 >>> digester.preferred(['md5', 'sha1'])
154 156 'sha1'
155 157 """
156 158
157 159 def __init__(self, digests, s=''):
158 160 self._hashes = {}
159 161 for k in digests:
160 162 if k not in DIGESTS:
161 163 raise Abort(_('unknown digest type: %s') % k)
162 164 self._hashes[k] = DIGESTS[k]()
163 165 if s:
164 166 self.update(s)
165 167
166 168 def update(self, data):
167 169 for h in self._hashes.values():
168 170 h.update(data)
169 171
170 172 def __getitem__(self, key):
171 173 if key not in DIGESTS:
172 174 raise Abort(_('unknown digest type: %s') % k)
173 175 return self._hashes[key].hexdigest()
174 176
175 177 def __iter__(self):
176 178 return iter(self._hashes)
177 179
178 180 @staticmethod
179 181 def preferred(supported):
180 182 """returns the strongest digest type in both supported and DIGESTS."""
181 183
182 184 for k in DIGESTS_BY_STRENGTH:
183 185 if k in supported:
184 186 return k
185 187 return None
186 188
187 189 class digestchecker(object):
188 190 """file handle wrapper that additionally checks content against a given
189 191 size and digests.
190 192
191 193 d = digestchecker(fh, size, {'md5': '...'})
192 194
193 195 When multiple digests are given, all of them are validated.
194 196 """
195 197
196 198 def __init__(self, fh, size, digests):
197 199 self._fh = fh
198 200 self._size = size
199 201 self._got = 0
200 202 self._digests = dict(digests)
201 203 self._digester = digester(self._digests.keys())
202 204
203 205 def read(self, length=-1):
204 206 content = self._fh.read(length)
205 207 self._digester.update(content)
206 208 self._got += len(content)
207 209 return content
208 210
209 211 def validate(self):
210 212 if self._size != self._got:
211 213 raise Abort(_('size mismatch: expected %d, got %d') %
212 214 (self._size, self._got))
213 215 for k, v in self._digests.items():
214 216 if v != self._digester[k]:
215 217 # i18n: first parameter is a digest name
216 218 raise Abort(_('%s mismatch: expected %s, got %s') %
217 219 (k, v, self._digester[k]))
218 220
219 221 try:
220 222 buffer = buffer
221 223 except NameError:
222 224 if sys.version_info[0] < 3:
223 225 def buffer(sliceable, offset=0):
224 226 return sliceable[offset:]
225 227 else:
226 228 def buffer(sliceable, offset=0):
227 229 return memoryview(sliceable)[offset:]
228 230
229 231 import subprocess
230 232 closefds = os.name == 'posix'
231 233
232 234 def unpacker(fmt):
233 235 """create a struct unpacker for the specified format"""
234 236 try:
235 237 # 2.5+
236 238 return struct.Struct(fmt).unpack
237 239 except AttributeError:
238 240 # 2.4
239 241 return lambda buf: struct.unpack(fmt, buf)
240 242
241 243 def popen2(cmd, env=None, newlines=False):
242 244 # Setting bufsize to -1 lets the system decide the buffer size.
243 245 # The default for bufsize is 0, meaning unbuffered. This leads to
244 246 # poor performance on Mac OS X: http://bugs.python.org/issue4194
245 247 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
246 248 close_fds=closefds,
247 249 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
248 250 universal_newlines=newlines,
249 251 env=env)
250 252 return p.stdin, p.stdout
251 253
252 254 def popen3(cmd, env=None, newlines=False):
253 255 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
254 256 return stdin, stdout, stderr
255 257
256 258 def popen4(cmd, env=None, newlines=False):
257 259 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
258 260 close_fds=closefds,
259 261 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
260 262 stderr=subprocess.PIPE,
261 263 universal_newlines=newlines,
262 264 env=env)
263 265 return p.stdin, p.stdout, p.stderr, p
264 266
265 267 def version():
266 268 """Return version information if available."""
267 269 try:
268 270 import __version__
269 271 return __version__.version
270 272 except ImportError:
271 273 return 'unknown'
272 274
273 275 # used by parsedate
274 276 defaultdateformats = (
275 277 '%Y-%m-%d %H:%M:%S',
276 278 '%Y-%m-%d %I:%M:%S%p',
277 279 '%Y-%m-%d %H:%M',
278 280 '%Y-%m-%d %I:%M%p',
279 281 '%Y-%m-%d',
280 282 '%m-%d',
281 283 '%m/%d',
282 284 '%m/%d/%y',
283 285 '%m/%d/%Y',
284 286 '%a %b %d %H:%M:%S %Y',
285 287 '%a %b %d %I:%M:%S%p %Y',
286 288 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
287 289 '%b %d %H:%M:%S %Y',
288 290 '%b %d %I:%M:%S%p %Y',
289 291 '%b %d %H:%M:%S',
290 292 '%b %d %I:%M:%S%p',
291 293 '%b %d %H:%M',
292 294 '%b %d %I:%M%p',
293 295 '%b %d %Y',
294 296 '%b %d',
295 297 '%H:%M:%S',
296 298 '%I:%M:%S%p',
297 299 '%H:%M',
298 300 '%I:%M%p',
299 301 )
300 302
301 303 extendeddateformats = defaultdateformats + (
302 304 "%Y",
303 305 "%Y-%m",
304 306 "%b",
305 307 "%b %Y",
306 308 )
307 309
308 310 def cachefunc(func):
309 311 '''cache the result of function calls'''
310 312 # XXX doesn't handle keywords args
311 313 if func.func_code.co_argcount == 0:
312 314 cache = []
313 315 def f():
314 316 if len(cache) == 0:
315 317 cache.append(func())
316 318 return cache[0]
317 319 return f
318 320 cache = {}
319 321 if func.func_code.co_argcount == 1:
320 322 # we gain a small amount of time because
321 323 # we don't need to pack/unpack the list
322 324 def f(arg):
323 325 if arg not in cache:
324 326 cache[arg] = func(arg)
325 327 return cache[arg]
326 328 else:
327 329 def f(*args):
328 330 if args not in cache:
329 331 cache[args] = func(*args)
330 332 return cache[args]
331 333
332 334 return f
333 335
334 336 try:
335 337 collections.deque.remove
336 338 deque = collections.deque
337 339 except AttributeError:
338 340 # python 2.4 lacks deque.remove
339 341 class deque(collections.deque):
340 342 def remove(self, val):
341 343 for i, v in enumerate(self):
342 344 if v == val:
343 345 del self[i]
344 346 break
345 347
346 348 class sortdict(dict):
347 349 '''a simple sorted dictionary'''
348 350 def __init__(self, data=None):
349 351 self._list = []
350 352 if data:
351 353 self.update(data)
352 354 def copy(self):
353 355 return sortdict(self)
354 356 def __setitem__(self, key, val):
355 357 if key in self:
356 358 self._list.remove(key)
357 359 self._list.append(key)
358 360 dict.__setitem__(self, key, val)
359 361 def __iter__(self):
360 362 return self._list.__iter__()
361 363 def update(self, src):
362 364 if isinstance(src, dict):
363 365 src = src.iteritems()
364 366 for k, v in src:
365 367 self[k] = v
366 368 def clear(self):
367 369 dict.clear(self)
368 370 self._list = []
369 371 def items(self):
370 372 return [(k, self[k]) for k in self._list]
371 373 def __delitem__(self, key):
372 374 dict.__delitem__(self, key)
373 375 self._list.remove(key)
374 376 def pop(self, key, *args, **kwargs):
375 377 dict.pop(self, key, *args, **kwargs)
376 378 try:
377 379 self._list.remove(key)
378 380 except ValueError:
379 381 pass
380 382 def keys(self):
381 383 return self._list
382 384 def iterkeys(self):
383 385 return self._list.__iter__()
384 386 def iteritems(self):
385 387 for k in self._list:
386 388 yield k, self[k]
387 389 def insert(self, index, key, val):
388 390 self._list.insert(index, key)
389 391 dict.__setitem__(self, key, val)
390 392
391 393 class lrucachedict(object):
392 394 '''cache most recent gets from or sets to this dictionary'''
393 395 def __init__(self, maxsize):
394 396 self._cache = {}
395 397 self._maxsize = maxsize
396 398 self._order = deque()
397 399
398 400 def __getitem__(self, key):
399 401 value = self._cache[key]
400 402 self._order.remove(key)
401 403 self._order.append(key)
402 404 return value
403 405
404 406 def __setitem__(self, key, value):
405 407 if key not in self._cache:
406 408 if len(self._cache) >= self._maxsize:
407 409 del self._cache[self._order.popleft()]
408 410 else:
409 411 self._order.remove(key)
410 412 self._cache[key] = value
411 413 self._order.append(key)
412 414
413 415 def __contains__(self, key):
414 416 return key in self._cache
415 417
416 418 def clear(self):
417 419 self._cache.clear()
418 420 self._order = deque()
419 421
420 422 def lrucachefunc(func):
421 423 '''cache most recent results of function calls'''
422 424 cache = {}
423 425 order = deque()
424 426 if func.func_code.co_argcount == 1:
425 427 def f(arg):
426 428 if arg not in cache:
427 429 if len(cache) > 20:
428 430 del cache[order.popleft()]
429 431 cache[arg] = func(arg)
430 432 else:
431 433 order.remove(arg)
432 434 order.append(arg)
433 435 return cache[arg]
434 436 else:
435 437 def f(*args):
436 438 if args not in cache:
437 439 if len(cache) > 20:
438 440 del cache[order.popleft()]
439 441 cache[args] = func(*args)
440 442 else:
441 443 order.remove(args)
442 444 order.append(args)
443 445 return cache[args]
444 446
445 447 return f
446 448
447 449 class propertycache(object):
448 450 def __init__(self, func):
449 451 self.func = func
450 452 self.name = func.__name__
451 453 def __get__(self, obj, type=None):
452 454 result = self.func(obj)
453 455 self.cachevalue(obj, result)
454 456 return result
455 457
456 458 def cachevalue(self, obj, value):
457 459 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
458 460 obj.__dict__[self.name] = value
459 461
460 462 def pipefilter(s, cmd):
461 463 '''filter string S through command CMD, returning its output'''
462 464 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
463 465 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
464 466 pout, perr = p.communicate(s)
465 467 return pout
466 468
467 469 def tempfilter(s, cmd):
468 470 '''filter string S through a pair of temporary files with CMD.
469 471 CMD is used as a template to create the real command to be run,
470 472 with the strings INFILE and OUTFILE replaced by the real names of
471 473 the temporary files generated.'''
472 474 inname, outname = None, None
473 475 try:
474 476 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
475 477 fp = os.fdopen(infd, 'wb')
476 478 fp.write(s)
477 479 fp.close()
478 480 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
479 481 os.close(outfd)
480 482 cmd = cmd.replace('INFILE', inname)
481 483 cmd = cmd.replace('OUTFILE', outname)
482 484 code = os.system(cmd)
483 485 if sys.platform == 'OpenVMS' and code & 1:
484 486 code = 0
485 487 if code:
486 488 raise Abort(_("command '%s' failed: %s") %
487 489 (cmd, explainexit(code)))
488 490 fp = open(outname, 'rb')
489 491 r = fp.read()
490 492 fp.close()
491 493 return r
492 494 finally:
493 495 try:
494 496 if inname:
495 497 os.unlink(inname)
496 498 except OSError:
497 499 pass
498 500 try:
499 501 if outname:
500 502 os.unlink(outname)
501 503 except OSError:
502 504 pass
503 505
504 506 filtertable = {
505 507 'tempfile:': tempfilter,
506 508 'pipe:': pipefilter,
507 509 }
508 510
509 511 def filter(s, cmd):
510 512 "filter a string through a command that transforms its input to its output"
511 513 for name, fn in filtertable.iteritems():
512 514 if cmd.startswith(name):
513 515 return fn(s, cmd[len(name):].lstrip())
514 516 return pipefilter(s, cmd)
515 517
516 518 def binary(s):
517 519 """return true if a string is binary data"""
518 520 return bool(s and '\0' in s)
519 521
520 522 def increasingchunks(source, min=1024, max=65536):
521 523 '''return no less than min bytes per chunk while data remains,
522 524 doubling min after each chunk until it reaches max'''
523 525 def log2(x):
524 526 if not x:
525 527 return 0
526 528 i = 0
527 529 while x:
528 530 x >>= 1
529 531 i += 1
530 532 return i - 1
531 533
532 534 buf = []
533 535 blen = 0
534 536 for chunk in source:
535 537 buf.append(chunk)
536 538 blen += len(chunk)
537 539 if blen >= min:
538 540 if min < max:
539 541 min = min << 1
540 542 nmin = 1 << log2(blen)
541 543 if nmin > min:
542 544 min = nmin
543 545 if min > max:
544 546 min = max
545 547 yield ''.join(buf)
546 548 blen = 0
547 549 buf = []
548 550 if buf:
549 551 yield ''.join(buf)
550 552
551 553 Abort = error.Abort
552 554
553 555 def always(fn):
554 556 return True
555 557
556 558 def never(fn):
557 559 return False
558 560
559 561 def nogc(func):
560 562 """disable garbage collector
561 563
562 564 Python's garbage collector triggers a GC each time a certain number of
563 565 container objects (the number being defined by gc.get_threshold()) are
564 566 allocated even when marked not to be tracked by the collector. Tracking has
565 567 no effect on when GCs are triggered, only on what objects the GC looks
566 568 into. As a workaround, disable GC while building complex (huge)
567 569 containers.
568 570
569 571 This garbage collector issue have been fixed in 2.7.
570 572 """
571 573 def wrapper(*args, **kwargs):
572 574 gcenabled = gc.isenabled()
573 575 gc.disable()
574 576 try:
575 577 return func(*args, **kwargs)
576 578 finally:
577 579 if gcenabled:
578 580 gc.enable()
579 581 return wrapper
580 582
581 583 def pathto(root, n1, n2):
582 584 '''return the relative path from one place to another.
583 585 root should use os.sep to separate directories
584 586 n1 should use os.sep to separate directories
585 587 n2 should use "/" to separate directories
586 588 returns an os.sep-separated path.
587 589
588 590 If n1 is a relative path, it's assumed it's
589 591 relative to root.
590 592 n2 should always be relative to root.
591 593 '''
592 594 if not n1:
593 595 return localpath(n2)
594 596 if os.path.isabs(n1):
595 597 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
596 598 return os.path.join(root, localpath(n2))
597 599 n2 = '/'.join((pconvert(root), n2))
598 600 a, b = splitpath(n1), n2.split('/')
599 601 a.reverse()
600 602 b.reverse()
601 603 while a and b and a[-1] == b[-1]:
602 604 a.pop()
603 605 b.pop()
604 606 b.reverse()
605 607 return os.sep.join((['..'] * len(a)) + b) or '.'
606 608
607 609 def mainfrozen():
608 610 """return True if we are a frozen executable.
609 611
610 612 The code supports py2exe (most common, Windows only) and tools/freeze
611 613 (portable, not much used).
612 614 """
613 615 return (safehasattr(sys, "frozen") or # new py2exe
614 616 safehasattr(sys, "importers") or # old py2exe
615 617 imp.is_frozen("__main__")) # tools/freeze
616 618
617 619 # the location of data files matching the source code
618 620 if mainfrozen():
619 621 # executable version (py2exe) doesn't support __file__
620 622 datapath = os.path.dirname(sys.executable)
621 623 else:
622 624 datapath = os.path.dirname(__file__)
623 625
624 626 i18n.setdatapath(datapath)
625 627
626 628 _hgexecutable = None
627 629
628 630 def hgexecutable():
629 631 """return location of the 'hg' executable.
630 632
631 633 Defaults to $HG or 'hg' in the search path.
632 634 """
633 635 if _hgexecutable is None:
634 636 hg = os.environ.get('HG')
635 637 mainmod = sys.modules['__main__']
636 638 if hg:
637 639 _sethgexecutable(hg)
638 640 elif mainfrozen():
639 641 _sethgexecutable(sys.executable)
640 642 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
641 643 _sethgexecutable(mainmod.__file__)
642 644 else:
643 645 exe = findexe('hg') or os.path.basename(sys.argv[0])
644 646 _sethgexecutable(exe)
645 647 return _hgexecutable
646 648
647 649 def _sethgexecutable(path):
648 650 """set location of the 'hg' executable"""
649 651 global _hgexecutable
650 652 _hgexecutable = path
651 653
652 654 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
653 655 '''enhanced shell command execution.
654 656 run with environment maybe modified, maybe in different dir.
655 657
656 658 if command fails and onerr is None, return status, else raise onerr
657 659 object as exception.
658 660
659 661 if out is specified, it is assumed to be a file-like object that has a
660 662 write() method. stdout and stderr will be redirected to out.'''
661 663 try:
662 664 sys.stdout.flush()
663 665 except Exception:
664 666 pass
665 667 def py2shell(val):
666 668 'convert python object into string that is useful to shell'
667 669 if val is None or val is False:
668 670 return '0'
669 671 if val is True:
670 672 return '1'
671 673 return str(val)
672 674 origcmd = cmd
673 675 cmd = quotecommand(cmd)
674 676 if sys.platform == 'plan9' and (sys.version_info[0] == 2
675 677 and sys.version_info[1] < 7):
676 678 # subprocess kludge to work around issues in half-baked Python
677 679 # ports, notably bichued/python:
678 680 if not cwd is None:
679 681 os.chdir(cwd)
680 682 rc = os.system(cmd)
681 683 else:
682 684 env = dict(os.environ)
683 685 env.update((k, py2shell(v)) for k, v in environ.iteritems())
684 686 env['HG'] = hgexecutable()
685 687 if out is None or out == sys.__stdout__:
686 688 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
687 689 env=env, cwd=cwd)
688 690 else:
689 691 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
690 692 env=env, cwd=cwd, stdout=subprocess.PIPE,
691 693 stderr=subprocess.STDOUT)
692 694 while True:
693 695 line = proc.stdout.readline()
694 696 if not line:
695 697 break
696 698 out.write(line)
697 699 proc.wait()
698 700 rc = proc.returncode
699 701 if sys.platform == 'OpenVMS' and rc & 1:
700 702 rc = 0
701 703 if rc and onerr:
702 704 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
703 705 explainexit(rc)[0])
704 706 if errprefix:
705 707 errmsg = '%s: %s' % (errprefix, errmsg)
706 708 raise onerr(errmsg)
707 709 return rc
708 710
709 711 def checksignature(func):
710 712 '''wrap a function with code to check for calling errors'''
711 713 def check(*args, **kwargs):
712 714 try:
713 715 return func(*args, **kwargs)
714 716 except TypeError:
715 717 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
716 718 raise error.SignatureError
717 719 raise
718 720
719 721 return check
720 722
721 723 def copyfile(src, dest, hardlink=False):
722 724 "copy a file, preserving mode and atime/mtime"
723 725 if os.path.lexists(dest):
724 726 unlink(dest)
725 727 # hardlinks are problematic on CIFS, quietly ignore this flag
726 728 # until we find a way to work around it cleanly (issue4546)
727 729 if False and hardlink:
728 730 try:
729 731 oslink(src, dest)
730 732 return
731 733 except (IOError, OSError):
732 734 pass # fall back to normal copy
733 735 if os.path.islink(src):
734 736 os.symlink(os.readlink(src), dest)
735 737 else:
736 738 try:
737 739 shutil.copyfile(src, dest)
738 740 shutil.copymode(src, dest)
739 741 except shutil.Error, inst:
740 742 raise Abort(str(inst))
741 743
742 744 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
743 745 """Copy a directory tree using hardlinks if possible."""
744 746 num = 0
745 747
746 748 if hardlink is None:
747 749 hardlink = (os.stat(src).st_dev ==
748 750 os.stat(os.path.dirname(dst)).st_dev)
749 751 if hardlink:
750 752 topic = _('linking')
751 753 else:
752 754 topic = _('copying')
753 755
754 756 if os.path.isdir(src):
755 757 os.mkdir(dst)
756 758 for name, kind in osutil.listdir(src):
757 759 srcname = os.path.join(src, name)
758 760 dstname = os.path.join(dst, name)
759 761 def nprog(t, pos):
760 762 if pos is not None:
761 763 return progress(t, pos + num)
762 764 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
763 765 num += n
764 766 else:
765 767 if hardlink:
766 768 try:
767 769 oslink(src, dst)
768 770 except (IOError, OSError):
769 771 hardlink = False
770 772 shutil.copy(src, dst)
771 773 else:
772 774 shutil.copy(src, dst)
773 775 num += 1
774 776 progress(topic, num)
775 777 progress(topic, None)
776 778
777 779 return hardlink, num
778 780
779 781 _winreservednames = '''con prn aux nul
780 782 com1 com2 com3 com4 com5 com6 com7 com8 com9
781 783 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
782 784 _winreservedchars = ':*?"<>|'
783 785 def checkwinfilename(path):
784 786 r'''Check that the base-relative path is a valid filename on Windows.
785 787 Returns None if the path is ok, or a UI string describing the problem.
786 788
787 789 >>> checkwinfilename("just/a/normal/path")
788 790 >>> checkwinfilename("foo/bar/con.xml")
789 791 "filename contains 'con', which is reserved on Windows"
790 792 >>> checkwinfilename("foo/con.xml/bar")
791 793 "filename contains 'con', which is reserved on Windows"
792 794 >>> checkwinfilename("foo/bar/xml.con")
793 795 >>> checkwinfilename("foo/bar/AUX/bla.txt")
794 796 "filename contains 'AUX', which is reserved on Windows"
795 797 >>> checkwinfilename("foo/bar/bla:.txt")
796 798 "filename contains ':', which is reserved on Windows"
797 799 >>> checkwinfilename("foo/bar/b\07la.txt")
798 800 "filename contains '\\x07', which is invalid on Windows"
799 801 >>> checkwinfilename("foo/bar/bla ")
800 802 "filename ends with ' ', which is not allowed on Windows"
801 803 >>> checkwinfilename("../bar")
802 804 >>> checkwinfilename("foo\\")
803 805 "filename ends with '\\', which is invalid on Windows"
804 806 >>> checkwinfilename("foo\\/bar")
805 807 "directory name ends with '\\', which is invalid on Windows"
806 808 '''
807 809 if path.endswith('\\'):
808 810 return _("filename ends with '\\', which is invalid on Windows")
809 811 if '\\/' in path:
810 812 return _("directory name ends with '\\', which is invalid on Windows")
811 813 for n in path.replace('\\', '/').split('/'):
812 814 if not n:
813 815 continue
814 816 for c in n:
815 817 if c in _winreservedchars:
816 818 return _("filename contains '%s', which is reserved "
817 819 "on Windows") % c
818 820 if ord(c) <= 31:
819 821 return _("filename contains %r, which is invalid "
820 822 "on Windows") % c
821 823 base = n.split('.')[0]
822 824 if base and base.lower() in _winreservednames:
823 825 return _("filename contains '%s', which is reserved "
824 826 "on Windows") % base
825 827 t = n[-1]
826 828 if t in '. ' and n not in '..':
827 829 return _("filename ends with '%s', which is not allowed "
828 830 "on Windows") % t
829 831
830 832 if os.name == 'nt':
831 833 checkosfilename = checkwinfilename
832 834 else:
833 835 checkosfilename = platform.checkosfilename
834 836
835 837 def makelock(info, pathname):
836 838 try:
837 839 return os.symlink(info, pathname)
838 840 except OSError, why:
839 841 if why.errno == errno.EEXIST:
840 842 raise
841 843 except AttributeError: # no symlink in os
842 844 pass
843 845
844 846 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
845 847 os.write(ld, info)
846 848 os.close(ld)
847 849
848 850 def readlock(pathname):
849 851 try:
850 852 return os.readlink(pathname)
851 853 except OSError, why:
852 854 if why.errno not in (errno.EINVAL, errno.ENOSYS):
853 855 raise
854 856 except AttributeError: # no symlink in os
855 857 pass
856 858 fp = posixfile(pathname)
857 859 r = fp.read()
858 860 fp.close()
859 861 return r
860 862
861 863 def fstat(fp):
862 864 '''stat file object that may not have fileno method.'''
863 865 try:
864 866 return os.fstat(fp.fileno())
865 867 except AttributeError:
866 868 return os.stat(fp.name)
867 869
868 870 # File system features
869 871
870 872 def checkcase(path):
871 873 """
872 874 Return true if the given path is on a case-sensitive filesystem
873 875
874 876 Requires a path (like /foo/.hg) ending with a foldable final
875 877 directory component.
876 878 """
877 879 s1 = os.stat(path)
878 880 d, b = os.path.split(path)
879 881 b2 = b.upper()
880 882 if b == b2:
881 883 b2 = b.lower()
882 884 if b == b2:
883 885 return True # no evidence against case sensitivity
884 886 p2 = os.path.join(d, b2)
885 887 try:
886 888 s2 = os.stat(p2)
887 889 if s2 == s1:
888 890 return False
889 891 return True
890 892 except OSError:
891 893 return True
892 894
893 895 try:
894 896 import re2
895 897 _re2 = None
896 898 except ImportError:
897 899 _re2 = False
898 900
899 901 class _re(object):
900 902 def _checkre2(self):
901 903 global _re2
902 904 try:
903 905 # check if match works, see issue3964
904 906 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
905 907 except ImportError:
906 908 _re2 = False
907 909
908 910 def compile(self, pat, flags=0):
909 911 '''Compile a regular expression, using re2 if possible
910 912
911 913 For best performance, use only re2-compatible regexp features. The
912 914 only flags from the re module that are re2-compatible are
913 915 IGNORECASE and MULTILINE.'''
914 916 if _re2 is None:
915 917 self._checkre2()
916 918 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
917 919 if flags & remod.IGNORECASE:
918 920 pat = '(?i)' + pat
919 921 if flags & remod.MULTILINE:
920 922 pat = '(?m)' + pat
921 923 try:
922 924 return re2.compile(pat)
923 925 except re2.error:
924 926 pass
925 927 return remod.compile(pat, flags)
926 928
927 929 @propertycache
928 930 def escape(self):
929 931 '''Return the version of escape corresponding to self.compile.
930 932
931 933 This is imperfect because whether re2 or re is used for a particular
932 934 function depends on the flags, etc, but it's the best we can do.
933 935 '''
934 936 global _re2
935 937 if _re2 is None:
936 938 self._checkre2()
937 939 if _re2:
938 940 return re2.escape
939 941 else:
940 942 return remod.escape
941 943
942 944 re = _re()
943 945
944 946 _fspathcache = {}
945 947 def fspath(name, root):
946 948 '''Get name in the case stored in the filesystem
947 949
948 950 The name should be relative to root, and be normcase-ed for efficiency.
949 951
950 952 Note that this function is unnecessary, and should not be
951 953 called, for case-sensitive filesystems (simply because it's expensive).
952 954
953 955 The root should be normcase-ed, too.
954 956 '''
955 957 def _makefspathcacheentry(dir):
956 958 return dict((normcase(n), n) for n in os.listdir(dir))
957 959
958 960 seps = os.sep
959 961 if os.altsep:
960 962 seps = seps + os.altsep
961 963 # Protect backslashes. This gets silly very quickly.
962 964 seps.replace('\\','\\\\')
963 965 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
964 966 dir = os.path.normpath(root)
965 967 result = []
966 968 for part, sep in pattern.findall(name):
967 969 if sep:
968 970 result.append(sep)
969 971 continue
970 972
971 973 if dir not in _fspathcache:
972 974 _fspathcache[dir] = _makefspathcacheentry(dir)
973 975 contents = _fspathcache[dir]
974 976
975 977 found = contents.get(part)
976 978 if not found:
977 979 # retry "once per directory" per "dirstate.walk" which
978 980 # may take place for each patches of "hg qpush", for example
979 981 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
980 982 found = contents.get(part)
981 983
982 984 result.append(found or part)
983 985 dir = os.path.join(dir, part)
984 986
985 987 return ''.join(result)
986 988
987 989 def checknlink(testfile):
988 990 '''check whether hardlink count reporting works properly'''
989 991
990 992 # testfile may be open, so we need a separate file for checking to
991 993 # work around issue2543 (or testfile may get lost on Samba shares)
992 994 f1 = testfile + ".hgtmp1"
993 995 if os.path.lexists(f1):
994 996 return False
995 997 try:
996 998 posixfile(f1, 'w').close()
997 999 except IOError:
998 1000 return False
999 1001
1000 1002 f2 = testfile + ".hgtmp2"
1001 1003 fd = None
1002 1004 try:
1003 1005 try:
1004 1006 oslink(f1, f2)
1005 1007 except OSError:
1006 1008 return False
1007 1009
1008 1010 # nlinks() may behave differently for files on Windows shares if
1009 1011 # the file is open.
1010 1012 fd = posixfile(f2)
1011 1013 return nlinks(f2) > 1
1012 1014 finally:
1013 1015 if fd is not None:
1014 1016 fd.close()
1015 1017 for f in (f1, f2):
1016 1018 try:
1017 1019 os.unlink(f)
1018 1020 except OSError:
1019 1021 pass
1020 1022
1021 1023 def endswithsep(path):
1022 1024 '''Check path ends with os.sep or os.altsep.'''
1023 1025 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1024 1026
1025 1027 def splitpath(path):
1026 1028 '''Split path by os.sep.
1027 1029 Note that this function does not use os.altsep because this is
1028 1030 an alternative of simple "xxx.split(os.sep)".
1029 1031 It is recommended to use os.path.normpath() before using this
1030 1032 function if need.'''
1031 1033 return path.split(os.sep)
1032 1034
1033 1035 def gui():
1034 1036 '''Are we running in a GUI?'''
1035 1037 if sys.platform == 'darwin':
1036 1038 if 'SSH_CONNECTION' in os.environ:
1037 1039 # handle SSH access to a box where the user is logged in
1038 1040 return False
1039 1041 elif getattr(osutil, 'isgui', None):
1040 1042 # check if a CoreGraphics session is available
1041 1043 return osutil.isgui()
1042 1044 else:
1043 1045 # pure build; use a safe default
1044 1046 return True
1045 1047 else:
1046 1048 return os.name == "nt" or os.environ.get("DISPLAY")
1047 1049
1048 1050 def mktempcopy(name, emptyok=False, createmode=None):
1049 1051 """Create a temporary file with the same contents from name
1050 1052
1051 1053 The permission bits are copied from the original file.
1052 1054
1053 1055 If the temporary file is going to be truncated immediately, you
1054 1056 can use emptyok=True as an optimization.
1055 1057
1056 1058 Returns the name of the temporary file.
1057 1059 """
1058 1060 d, fn = os.path.split(name)
1059 1061 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1060 1062 os.close(fd)
1061 1063 # Temporary files are created with mode 0600, which is usually not
1062 1064 # what we want. If the original file already exists, just copy
1063 1065 # its mode. Otherwise, manually obey umask.
1064 1066 copymode(name, temp, createmode)
1065 1067 if emptyok:
1066 1068 return temp
1067 1069 try:
1068 1070 try:
1069 1071 ifp = posixfile(name, "rb")
1070 1072 except IOError, inst:
1071 1073 if inst.errno == errno.ENOENT:
1072 1074 return temp
1073 1075 if not getattr(inst, 'filename', None):
1074 1076 inst.filename = name
1075 1077 raise
1076 1078 ofp = posixfile(temp, "wb")
1077 1079 for chunk in filechunkiter(ifp):
1078 1080 ofp.write(chunk)
1079 1081 ifp.close()
1080 1082 ofp.close()
1081 1083 except: # re-raises
1082 1084 try: os.unlink(temp)
1083 1085 except OSError: pass
1084 1086 raise
1085 1087 return temp
1086 1088
1087 1089 class atomictempfile(object):
1088 1090 '''writable file object that atomically updates a file
1089 1091
1090 1092 All writes will go to a temporary copy of the original file. Call
1091 1093 close() when you are done writing, and atomictempfile will rename
1092 1094 the temporary copy to the original name, making the changes
1093 1095 visible. If the object is destroyed without being closed, all your
1094 1096 writes are discarded.
1095 1097 '''
1096 1098 def __init__(self, name, mode='w+b', createmode=None):
1097 1099 self.__name = name # permanent name
1098 1100 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1099 1101 createmode=createmode)
1100 1102 self._fp = posixfile(self._tempname, mode)
1101 1103
1102 1104 # delegated methods
1103 1105 self.write = self._fp.write
1104 1106 self.seek = self._fp.seek
1105 1107 self.tell = self._fp.tell
1106 1108 self.fileno = self._fp.fileno
1107 1109
1108 1110 def close(self):
1109 1111 if not self._fp.closed:
1110 1112 self._fp.close()
1111 1113 rename(self._tempname, localpath(self.__name))
1112 1114
1113 1115 def discard(self):
1114 1116 if not self._fp.closed:
1115 1117 try:
1116 1118 os.unlink(self._tempname)
1117 1119 except OSError:
1118 1120 pass
1119 1121 self._fp.close()
1120 1122
1121 1123 def __del__(self):
1122 1124 if safehasattr(self, '_fp'): # constructor actually did something
1123 1125 self.discard()
1124 1126
1125 1127 def makedirs(name, mode=None, notindexed=False):
1126 1128 """recursive directory creation with parent mode inheritance"""
1127 1129 try:
1128 1130 makedir(name, notindexed)
1129 1131 except OSError, err:
1130 1132 if err.errno == errno.EEXIST:
1131 1133 return
1132 1134 if err.errno != errno.ENOENT or not name:
1133 1135 raise
1134 1136 parent = os.path.dirname(os.path.abspath(name))
1135 1137 if parent == name:
1136 1138 raise
1137 1139 makedirs(parent, mode, notindexed)
1138 1140 makedir(name, notindexed)
1139 1141 if mode is not None:
1140 1142 os.chmod(name, mode)
1141 1143
1142 1144 def ensuredirs(name, mode=None, notindexed=False):
1143 1145 """race-safe recursive directory creation
1144 1146
1145 1147 Newly created directories are marked as "not to be indexed by
1146 1148 the content indexing service", if ``notindexed`` is specified
1147 1149 for "write" mode access.
1148 1150 """
1149 1151 if os.path.isdir(name):
1150 1152 return
1151 1153 parent = os.path.dirname(os.path.abspath(name))
1152 1154 if parent != name:
1153 1155 ensuredirs(parent, mode, notindexed)
1154 1156 try:
1155 1157 makedir(name, notindexed)
1156 1158 except OSError, err:
1157 1159 if err.errno == errno.EEXIST and os.path.isdir(name):
1158 1160 # someone else seems to have won a directory creation race
1159 1161 return
1160 1162 raise
1161 1163 if mode is not None:
1162 1164 os.chmod(name, mode)
1163 1165
1164 1166 def readfile(path):
1165 1167 fp = open(path, 'rb')
1166 1168 try:
1167 1169 return fp.read()
1168 1170 finally:
1169 1171 fp.close()
1170 1172
1171 1173 def writefile(path, text):
1172 1174 fp = open(path, 'wb')
1173 1175 try:
1174 1176 fp.write(text)
1175 1177 finally:
1176 1178 fp.close()
1177 1179
1178 1180 def appendfile(path, text):
1179 1181 fp = open(path, 'ab')
1180 1182 try:
1181 1183 fp.write(text)
1182 1184 finally:
1183 1185 fp.close()
1184 1186
1185 1187 class chunkbuffer(object):
1186 1188 """Allow arbitrary sized chunks of data to be efficiently read from an
1187 1189 iterator over chunks of arbitrary size."""
1188 1190
1189 1191 def __init__(self, in_iter):
1190 1192 """in_iter is the iterator that's iterating over the input chunks.
1191 1193 targetsize is how big a buffer to try to maintain."""
1192 1194 def splitbig(chunks):
1193 1195 for chunk in chunks:
1194 1196 if len(chunk) > 2**20:
1195 1197 pos = 0
1196 1198 while pos < len(chunk):
1197 1199 end = pos + 2 ** 18
1198 1200 yield chunk[pos:end]
1199 1201 pos = end
1200 1202 else:
1201 1203 yield chunk
1202 1204 self.iter = splitbig(in_iter)
1203 1205 self._queue = deque()
1204 1206
1205 1207 def read(self, l=None):
1206 1208 """Read L bytes of data from the iterator of chunks of data.
1207 1209 Returns less than L bytes if the iterator runs dry.
1208 1210
1209 1211 If size parameter is omitted, read everything"""
1210 1212 left = l
1211 1213 buf = []
1212 1214 queue = self._queue
1213 1215 while left is None or left > 0:
1214 1216 # refill the queue
1215 1217 if not queue:
1216 1218 target = 2**18
1217 1219 for chunk in self.iter:
1218 1220 queue.append(chunk)
1219 1221 target -= len(chunk)
1220 1222 if target <= 0:
1221 1223 break
1222 1224 if not queue:
1223 1225 break
1224 1226
1225 1227 chunk = queue.popleft()
1226 1228 if left is not None:
1227 1229 left -= len(chunk)
1228 1230 if left is not None and left < 0:
1229 1231 queue.appendleft(chunk[left:])
1230 1232 buf.append(chunk[:left])
1231 1233 else:
1232 1234 buf.append(chunk)
1233 1235
1234 1236 return ''.join(buf)
1235 1237
1236 1238 def filechunkiter(f, size=65536, limit=None):
1237 1239 """Create a generator that produces the data in the file size
1238 1240 (default 65536) bytes at a time, up to optional limit (default is
1239 1241 to read all data). Chunks may be less than size bytes if the
1240 1242 chunk is the last chunk in the file, or the file is a socket or
1241 1243 some other type of file that sometimes reads less data than is
1242 1244 requested."""
1243 1245 assert size >= 0
1244 1246 assert limit is None or limit >= 0
1245 1247 while True:
1246 1248 if limit is None:
1247 1249 nbytes = size
1248 1250 else:
1249 1251 nbytes = min(limit, size)
1250 1252 s = nbytes and f.read(nbytes)
1251 1253 if not s:
1252 1254 break
1253 1255 if limit:
1254 1256 limit -= len(s)
1255 1257 yield s
1256 1258
1257 1259 def makedate(timestamp=None):
1258 1260 '''Return a unix timestamp (or the current time) as a (unixtime,
1259 1261 offset) tuple based off the local timezone.'''
1260 1262 if timestamp is None:
1261 1263 timestamp = time.time()
1262 1264 if timestamp < 0:
1263 1265 hint = _("check your clock")
1264 1266 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1265 1267 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1266 1268 datetime.datetime.fromtimestamp(timestamp))
1267 1269 tz = delta.days * 86400 + delta.seconds
1268 1270 return timestamp, tz
1269 1271
1270 1272 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1271 1273 """represent a (unixtime, offset) tuple as a localized time.
1272 1274 unixtime is seconds since the epoch, and offset is the time zone's
1273 1275 number of seconds away from UTC. if timezone is false, do not
1274 1276 append time zone to string."""
1275 1277 t, tz = date or makedate()
1276 1278 if t < 0:
1277 1279 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1278 1280 tz = 0
1279 1281 if "%1" in format or "%2" in format or "%z" in format:
1280 1282 sign = (tz > 0) and "-" or "+"
1281 1283 minutes = abs(tz) // 60
1282 1284 format = format.replace("%z", "%1%2")
1283 1285 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1284 1286 format = format.replace("%2", "%02d" % (minutes % 60))
1285 1287 try:
1286 1288 t = time.gmtime(float(t) - tz)
1287 1289 except ValueError:
1288 1290 # time was out of range
1289 1291 t = time.gmtime(sys.maxint)
1290 1292 s = time.strftime(format, t)
1291 1293 return s
1292 1294
1293 1295 def shortdate(date=None):
1294 1296 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1295 1297 return datestr(date, format='%Y-%m-%d')
1296 1298
1297 1299 def strdate(string, format, defaults=[]):
1298 1300 """parse a localized time string and return a (unixtime, offset) tuple.
1299 1301 if the string cannot be parsed, ValueError is raised."""
1300 1302 def timezone(string):
1301 1303 tz = string.split()[-1]
1302 1304 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1303 1305 sign = (tz[0] == "+") and 1 or -1
1304 1306 hours = int(tz[1:3])
1305 1307 minutes = int(tz[3:5])
1306 1308 return -sign * (hours * 60 + minutes) * 60
1307 1309 if tz == "GMT" or tz == "UTC":
1308 1310 return 0
1309 1311 return None
1310 1312
1311 1313 # NOTE: unixtime = localunixtime + offset
1312 1314 offset, date = timezone(string), string
1313 1315 if offset is not None:
1314 1316 date = " ".join(string.split()[:-1])
1315 1317
1316 1318 # add missing elements from defaults
1317 1319 usenow = False # default to using biased defaults
1318 1320 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1319 1321 found = [True for p in part if ("%"+p) in format]
1320 1322 if not found:
1321 1323 date += "@" + defaults[part][usenow]
1322 1324 format += "@%" + part[0]
1323 1325 else:
1324 1326 # We've found a specific time element, less specific time
1325 1327 # elements are relative to today
1326 1328 usenow = True
1327 1329
1328 1330 timetuple = time.strptime(date, format)
1329 1331 localunixtime = int(calendar.timegm(timetuple))
1330 1332 if offset is None:
1331 1333 # local timezone
1332 1334 unixtime = int(time.mktime(timetuple))
1333 1335 offset = unixtime - localunixtime
1334 1336 else:
1335 1337 unixtime = localunixtime + offset
1336 1338 return unixtime, offset
1337 1339
1338 1340 def parsedate(date, formats=None, bias={}):
1339 1341 """parse a localized date/time and return a (unixtime, offset) tuple.
1340 1342
1341 1343 The date may be a "unixtime offset" string or in one of the specified
1342 1344 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1343 1345
1344 1346 >>> parsedate(' today ') == parsedate(\
1345 1347 datetime.date.today().strftime('%b %d'))
1346 1348 True
1347 1349 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1348 1350 datetime.timedelta(days=1)\
1349 1351 ).strftime('%b %d'))
1350 1352 True
1351 1353 >>> now, tz = makedate()
1352 1354 >>> strnow, strtz = parsedate('now')
1353 1355 >>> (strnow - now) < 1
1354 1356 True
1355 1357 >>> tz == strtz
1356 1358 True
1357 1359 """
1358 1360 if not date:
1359 1361 return 0, 0
1360 1362 if isinstance(date, tuple) and len(date) == 2:
1361 1363 return date
1362 1364 if not formats:
1363 1365 formats = defaultdateformats
1364 1366 date = date.strip()
1365 1367
1366 1368 if date == 'now' or date == _('now'):
1367 1369 return makedate()
1368 1370 if date == 'today' or date == _('today'):
1369 1371 date = datetime.date.today().strftime('%b %d')
1370 1372 elif date == 'yesterday' or date == _('yesterday'):
1371 1373 date = (datetime.date.today() -
1372 1374 datetime.timedelta(days=1)).strftime('%b %d')
1373 1375
1374 1376 try:
1375 1377 when, offset = map(int, date.split(' '))
1376 1378 except ValueError:
1377 1379 # fill out defaults
1378 1380 now = makedate()
1379 1381 defaults = {}
1380 1382 for part in ("d", "mb", "yY", "HI", "M", "S"):
1381 1383 # this piece is for rounding the specific end of unknowns
1382 1384 b = bias.get(part)
1383 1385 if b is None:
1384 1386 if part[0] in "HMS":
1385 1387 b = "00"
1386 1388 else:
1387 1389 b = "0"
1388 1390
1389 1391 # this piece is for matching the generic end to today's date
1390 1392 n = datestr(now, "%" + part[0])
1391 1393
1392 1394 defaults[part] = (b, n)
1393 1395
1394 1396 for format in formats:
1395 1397 try:
1396 1398 when, offset = strdate(date, format, defaults)
1397 1399 except (ValueError, OverflowError):
1398 1400 pass
1399 1401 else:
1400 1402 break
1401 1403 else:
1402 1404 raise Abort(_('invalid date: %r') % date)
1403 1405 # validate explicit (probably user-specified) date and
1404 1406 # time zone offset. values must fit in signed 32 bits for
1405 1407 # current 32-bit linux runtimes. timezones go from UTC-12
1406 1408 # to UTC+14
1407 1409 if abs(when) > 0x7fffffff:
1408 1410 raise Abort(_('date exceeds 32 bits: %d') % when)
1409 1411 if when < 0:
1410 1412 raise Abort(_('negative date value: %d') % when)
1411 1413 if offset < -50400 or offset > 43200:
1412 1414 raise Abort(_('impossible time zone offset: %d') % offset)
1413 1415 return when, offset
1414 1416
1415 1417 def matchdate(date):
1416 1418 """Return a function that matches a given date match specifier
1417 1419
1418 1420 Formats include:
1419 1421
1420 1422 '{date}' match a given date to the accuracy provided
1421 1423
1422 1424 '<{date}' on or before a given date
1423 1425
1424 1426 '>{date}' on or after a given date
1425 1427
1426 1428 >>> p1 = parsedate("10:29:59")
1427 1429 >>> p2 = parsedate("10:30:00")
1428 1430 >>> p3 = parsedate("10:30:59")
1429 1431 >>> p4 = parsedate("10:31:00")
1430 1432 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1431 1433 >>> f = matchdate("10:30")
1432 1434 >>> f(p1[0])
1433 1435 False
1434 1436 >>> f(p2[0])
1435 1437 True
1436 1438 >>> f(p3[0])
1437 1439 True
1438 1440 >>> f(p4[0])
1439 1441 False
1440 1442 >>> f(p5[0])
1441 1443 False
1442 1444 """
1443 1445
1444 1446 def lower(date):
1445 1447 d = {'mb': "1", 'd': "1"}
1446 1448 return parsedate(date, extendeddateformats, d)[0]
1447 1449
1448 1450 def upper(date):
1449 1451 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1450 1452 for days in ("31", "30", "29"):
1451 1453 try:
1452 1454 d["d"] = days
1453 1455 return parsedate(date, extendeddateformats, d)[0]
1454 1456 except Abort:
1455 1457 pass
1456 1458 d["d"] = "28"
1457 1459 return parsedate(date, extendeddateformats, d)[0]
1458 1460
1459 1461 date = date.strip()
1460 1462
1461 1463 if not date:
1462 1464 raise Abort(_("dates cannot consist entirely of whitespace"))
1463 1465 elif date[0] == "<":
1464 1466 if not date[1:]:
1465 1467 raise Abort(_("invalid day spec, use '<DATE'"))
1466 1468 when = upper(date[1:])
1467 1469 return lambda x: x <= when
1468 1470 elif date[0] == ">":
1469 1471 if not date[1:]:
1470 1472 raise Abort(_("invalid day spec, use '>DATE'"))
1471 1473 when = lower(date[1:])
1472 1474 return lambda x: x >= when
1473 1475 elif date[0] == "-":
1474 1476 try:
1475 1477 days = int(date[1:])
1476 1478 except ValueError:
1477 1479 raise Abort(_("invalid day spec: %s") % date[1:])
1478 1480 if days < 0:
1479 1481 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1480 1482 % date[1:])
1481 1483 when = makedate()[0] - days * 3600 * 24
1482 1484 return lambda x: x >= when
1483 1485 elif " to " in date:
1484 1486 a, b = date.split(" to ")
1485 1487 start, stop = lower(a), upper(b)
1486 1488 return lambda x: x >= start and x <= stop
1487 1489 else:
1488 1490 start, stop = lower(date), upper(date)
1489 1491 return lambda x: x >= start and x <= stop
1490 1492
1491 1493 def shortuser(user):
1492 1494 """Return a short representation of a user name or email address."""
1493 1495 f = user.find('@')
1494 1496 if f >= 0:
1495 1497 user = user[:f]
1496 1498 f = user.find('<')
1497 1499 if f >= 0:
1498 1500 user = user[f + 1:]
1499 1501 f = user.find(' ')
1500 1502 if f >= 0:
1501 1503 user = user[:f]
1502 1504 f = user.find('.')
1503 1505 if f >= 0:
1504 1506 user = user[:f]
1505 1507 return user
1506 1508
1507 1509 def emailuser(user):
1508 1510 """Return the user portion of an email address."""
1509 1511 f = user.find('@')
1510 1512 if f >= 0:
1511 1513 user = user[:f]
1512 1514 f = user.find('<')
1513 1515 if f >= 0:
1514 1516 user = user[f + 1:]
1515 1517 return user
1516 1518
1517 1519 def email(author):
1518 1520 '''get email of author.'''
1519 1521 r = author.find('>')
1520 1522 if r == -1:
1521 1523 r = None
1522 1524 return author[author.find('<') + 1:r]
1523 1525
1524 1526 def ellipsis(text, maxlength=400):
1525 1527 """Trim string to at most maxlength (default: 400) columns in display."""
1526 1528 return encoding.trim(text, maxlength, ellipsis='...')
1527 1529
1528 1530 def unitcountfn(*unittable):
1529 1531 '''return a function that renders a readable count of some quantity'''
1530 1532
1531 1533 def go(count):
1532 1534 for multiplier, divisor, format in unittable:
1533 1535 if count >= divisor * multiplier:
1534 1536 return format % (count / float(divisor))
1535 1537 return unittable[-1][2] % count
1536 1538
1537 1539 return go
1538 1540
1539 1541 bytecount = unitcountfn(
1540 1542 (100, 1 << 30, _('%.0f GB')),
1541 1543 (10, 1 << 30, _('%.1f GB')),
1542 1544 (1, 1 << 30, _('%.2f GB')),
1543 1545 (100, 1 << 20, _('%.0f MB')),
1544 1546 (10, 1 << 20, _('%.1f MB')),
1545 1547 (1, 1 << 20, _('%.2f MB')),
1546 1548 (100, 1 << 10, _('%.0f KB')),
1547 1549 (10, 1 << 10, _('%.1f KB')),
1548 1550 (1, 1 << 10, _('%.2f KB')),
1549 1551 (1, 1, _('%.0f bytes')),
1550 1552 )
1551 1553
1552 1554 def uirepr(s):
1553 1555 # Avoid double backslash in Windows path repr()
1554 1556 return repr(s).replace('\\\\', '\\')
1555 1557
1556 1558 # delay import of textwrap
1557 1559 def MBTextWrapper(**kwargs):
1558 1560 class tw(textwrap.TextWrapper):
1559 1561 """
1560 1562 Extend TextWrapper for width-awareness.
1561 1563
1562 1564 Neither number of 'bytes' in any encoding nor 'characters' is
1563 1565 appropriate to calculate terminal columns for specified string.
1564 1566
1565 1567 Original TextWrapper implementation uses built-in 'len()' directly,
1566 1568 so overriding is needed to use width information of each characters.
1567 1569
1568 1570 In addition, characters classified into 'ambiguous' width are
1569 1571 treated as wide in East Asian area, but as narrow in other.
1570 1572
1571 1573 This requires use decision to determine width of such characters.
1572 1574 """
1573 1575 def __init__(self, **kwargs):
1574 1576 textwrap.TextWrapper.__init__(self, **kwargs)
1575 1577
1576 1578 # for compatibility between 2.4 and 2.6
1577 1579 if getattr(self, 'drop_whitespace', None) is None:
1578 1580 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1579 1581
1580 1582 def _cutdown(self, ucstr, space_left):
1581 1583 l = 0
1582 1584 colwidth = encoding.ucolwidth
1583 1585 for i in xrange(len(ucstr)):
1584 1586 l += colwidth(ucstr[i])
1585 1587 if space_left < l:
1586 1588 return (ucstr[:i], ucstr[i:])
1587 1589 return ucstr, ''
1588 1590
1589 1591 # overriding of base class
1590 1592 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1591 1593 space_left = max(width - cur_len, 1)
1592 1594
1593 1595 if self.break_long_words:
1594 1596 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1595 1597 cur_line.append(cut)
1596 1598 reversed_chunks[-1] = res
1597 1599 elif not cur_line:
1598 1600 cur_line.append(reversed_chunks.pop())
1599 1601
1600 1602 # this overriding code is imported from TextWrapper of python 2.6
1601 1603 # to calculate columns of string by 'encoding.ucolwidth()'
1602 1604 def _wrap_chunks(self, chunks):
1603 1605 colwidth = encoding.ucolwidth
1604 1606
1605 1607 lines = []
1606 1608 if self.width <= 0:
1607 1609 raise ValueError("invalid width %r (must be > 0)" % self.width)
1608 1610
1609 1611 # Arrange in reverse order so items can be efficiently popped
1610 1612 # from a stack of chucks.
1611 1613 chunks.reverse()
1612 1614
1613 1615 while chunks:
1614 1616
1615 1617 # Start the list of chunks that will make up the current line.
1616 1618 # cur_len is just the length of all the chunks in cur_line.
1617 1619 cur_line = []
1618 1620 cur_len = 0
1619 1621
1620 1622 # Figure out which static string will prefix this line.
1621 1623 if lines:
1622 1624 indent = self.subsequent_indent
1623 1625 else:
1624 1626 indent = self.initial_indent
1625 1627
1626 1628 # Maximum width for this line.
1627 1629 width = self.width - len(indent)
1628 1630
1629 1631 # First chunk on line is whitespace -- drop it, unless this
1630 1632 # is the very beginning of the text (i.e. no lines started yet).
1631 1633 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1632 1634 del chunks[-1]
1633 1635
1634 1636 while chunks:
1635 1637 l = colwidth(chunks[-1])
1636 1638
1637 1639 # Can at least squeeze this chunk onto the current line.
1638 1640 if cur_len + l <= width:
1639 1641 cur_line.append(chunks.pop())
1640 1642 cur_len += l
1641 1643
1642 1644 # Nope, this line is full.
1643 1645 else:
1644 1646 break
1645 1647
1646 1648 # The current line is full, and the next chunk is too big to
1647 1649 # fit on *any* line (not just this one).
1648 1650 if chunks and colwidth(chunks[-1]) > width:
1649 1651 self._handle_long_word(chunks, cur_line, cur_len, width)
1650 1652
1651 1653 # If the last chunk on this line is all whitespace, drop it.
1652 1654 if (self.drop_whitespace and
1653 1655 cur_line and cur_line[-1].strip() == ''):
1654 1656 del cur_line[-1]
1655 1657
1656 1658 # Convert current line back to a string and store it in list
1657 1659 # of all lines (return value).
1658 1660 if cur_line:
1659 1661 lines.append(indent + ''.join(cur_line))
1660 1662
1661 1663 return lines
1662 1664
1663 1665 global MBTextWrapper
1664 1666 MBTextWrapper = tw
1665 1667 return tw(**kwargs)
1666 1668
1667 1669 def wrap(line, width, initindent='', hangindent=''):
1668 1670 maxindent = max(len(hangindent), len(initindent))
1669 1671 if width <= maxindent:
1670 1672 # adjust for weird terminal size
1671 1673 width = max(78, maxindent + 1)
1672 1674 line = line.decode(encoding.encoding, encoding.encodingmode)
1673 1675 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1674 1676 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1675 1677 wrapper = MBTextWrapper(width=width,
1676 1678 initial_indent=initindent,
1677 1679 subsequent_indent=hangindent)
1678 1680 return wrapper.fill(line).encode(encoding.encoding)
1679 1681
1680 1682 def iterlines(iterator):
1681 1683 for chunk in iterator:
1682 1684 for line in chunk.splitlines():
1683 1685 yield line
1684 1686
1685 1687 def expandpath(path):
1686 1688 return os.path.expanduser(os.path.expandvars(path))
1687 1689
1688 1690 def hgcmd():
1689 1691 """Return the command used to execute current hg
1690 1692
1691 1693 This is different from hgexecutable() because on Windows we want
1692 1694 to avoid things opening new shell windows like batch files, so we
1693 1695 get either the python call or current executable.
1694 1696 """
1695 1697 if mainfrozen():
1696 1698 return [sys.executable]
1697 1699 return gethgcmd()
1698 1700
1699 1701 def rundetached(args, condfn):
1700 1702 """Execute the argument list in a detached process.
1701 1703
1702 1704 condfn is a callable which is called repeatedly and should return
1703 1705 True once the child process is known to have started successfully.
1704 1706 At this point, the child process PID is returned. If the child
1705 1707 process fails to start or finishes before condfn() evaluates to
1706 1708 True, return -1.
1707 1709 """
1708 1710 # Windows case is easier because the child process is either
1709 1711 # successfully starting and validating the condition or exiting
1710 1712 # on failure. We just poll on its PID. On Unix, if the child
1711 1713 # process fails to start, it will be left in a zombie state until
1712 1714 # the parent wait on it, which we cannot do since we expect a long
1713 1715 # running process on success. Instead we listen for SIGCHLD telling
1714 1716 # us our child process terminated.
1715 1717 terminated = set()
1716 1718 def handler(signum, frame):
1717 1719 terminated.add(os.wait())
1718 1720 prevhandler = None
1719 1721 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1720 1722 if SIGCHLD is not None:
1721 1723 prevhandler = signal.signal(SIGCHLD, handler)
1722 1724 try:
1723 1725 pid = spawndetached(args)
1724 1726 while not condfn():
1725 1727 if ((pid in terminated or not testpid(pid))
1726 1728 and not condfn()):
1727 1729 return -1
1728 1730 time.sleep(0.1)
1729 1731 return pid
1730 1732 finally:
1731 1733 if prevhandler is not None:
1732 1734 signal.signal(signal.SIGCHLD, prevhandler)
1733 1735
1734 1736 try:
1735 1737 any, all = any, all
1736 1738 except NameError:
1737 1739 def any(iterable):
1738 1740 for i in iterable:
1739 1741 if i:
1740 1742 return True
1741 1743 return False
1742 1744
1743 1745 def all(iterable):
1744 1746 for i in iterable:
1745 1747 if not i:
1746 1748 return False
1747 1749 return True
1748 1750
1749 1751 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1750 1752 """Return the result of interpolating items in the mapping into string s.
1751 1753
1752 1754 prefix is a single character string, or a two character string with
1753 1755 a backslash as the first character if the prefix needs to be escaped in
1754 1756 a regular expression.
1755 1757
1756 1758 fn is an optional function that will be applied to the replacement text
1757 1759 just before replacement.
1758 1760
1759 1761 escape_prefix is an optional flag that allows using doubled prefix for
1760 1762 its escaping.
1761 1763 """
1762 1764 fn = fn or (lambda s: s)
1763 1765 patterns = '|'.join(mapping.keys())
1764 1766 if escape_prefix:
1765 1767 patterns += '|' + prefix
1766 1768 if len(prefix) > 1:
1767 1769 prefix_char = prefix[1:]
1768 1770 else:
1769 1771 prefix_char = prefix
1770 1772 mapping[prefix_char] = prefix_char
1771 1773 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1772 1774 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1773 1775
1774 1776 def getport(port):
1775 1777 """Return the port for a given network service.
1776 1778
1777 1779 If port is an integer, it's returned as is. If it's a string, it's
1778 1780 looked up using socket.getservbyname(). If there's no matching
1779 1781 service, util.Abort is raised.
1780 1782 """
1781 1783 try:
1782 1784 return int(port)
1783 1785 except ValueError:
1784 1786 pass
1785 1787
1786 1788 try:
1787 1789 return socket.getservbyname(port)
1788 1790 except socket.error:
1789 1791 raise Abort(_("no port number associated with service '%s'") % port)
1790 1792
1791 1793 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1792 1794 '0': False, 'no': False, 'false': False, 'off': False,
1793 1795 'never': False}
1794 1796
1795 1797 def parsebool(s):
1796 1798 """Parse s into a boolean.
1797 1799
1798 1800 If s is not a valid boolean, returns None.
1799 1801 """
1800 1802 return _booleans.get(s.lower(), None)
1801 1803
1802 1804 _hexdig = '0123456789ABCDEFabcdef'
1803 1805 _hextochr = dict((a + b, chr(int(a + b, 16)))
1804 1806 for a in _hexdig for b in _hexdig)
1805 1807
1806 1808 def _urlunquote(s):
1807 1809 """Decode HTTP/HTML % encoding.
1808 1810
1809 1811 >>> _urlunquote('abc%20def')
1810 1812 'abc def'
1811 1813 """
1812 1814 res = s.split('%')
1813 1815 # fastpath
1814 1816 if len(res) == 1:
1815 1817 return s
1816 1818 s = res[0]
1817 1819 for item in res[1:]:
1818 1820 try:
1819 1821 s += _hextochr[item[:2]] + item[2:]
1820 1822 except KeyError:
1821 1823 s += '%' + item
1822 1824 except UnicodeDecodeError:
1823 1825 s += unichr(int(item[:2], 16)) + item[2:]
1824 1826 return s
1825 1827
1826 1828 class url(object):
1827 1829 r"""Reliable URL parser.
1828 1830
1829 1831 This parses URLs and provides attributes for the following
1830 1832 components:
1831 1833
1832 1834 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1833 1835
1834 1836 Missing components are set to None. The only exception is
1835 1837 fragment, which is set to '' if present but empty.
1836 1838
1837 1839 If parsefragment is False, fragment is included in query. If
1838 1840 parsequery is False, query is included in path. If both are
1839 1841 False, both fragment and query are included in path.
1840 1842
1841 1843 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1842 1844
1843 1845 Note that for backward compatibility reasons, bundle URLs do not
1844 1846 take host names. That means 'bundle://../' has a path of '../'.
1845 1847
1846 1848 Examples:
1847 1849
1848 1850 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1849 1851 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1850 1852 >>> url('ssh://[::1]:2200//home/joe/repo')
1851 1853 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1852 1854 >>> url('file:///home/joe/repo')
1853 1855 <url scheme: 'file', path: '/home/joe/repo'>
1854 1856 >>> url('file:///c:/temp/foo/')
1855 1857 <url scheme: 'file', path: 'c:/temp/foo/'>
1856 1858 >>> url('bundle:foo')
1857 1859 <url scheme: 'bundle', path: 'foo'>
1858 1860 >>> url('bundle://../foo')
1859 1861 <url scheme: 'bundle', path: '../foo'>
1860 1862 >>> url(r'c:\foo\bar')
1861 1863 <url path: 'c:\\foo\\bar'>
1862 1864 >>> url(r'\\blah\blah\blah')
1863 1865 <url path: '\\\\blah\\blah\\blah'>
1864 1866 >>> url(r'\\blah\blah\blah#baz')
1865 1867 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1866 1868 >>> url(r'file:///C:\users\me')
1867 1869 <url scheme: 'file', path: 'C:\\users\\me'>
1868 1870
1869 1871 Authentication credentials:
1870 1872
1871 1873 >>> url('ssh://joe:xyz@x/repo')
1872 1874 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1873 1875 >>> url('ssh://joe@x/repo')
1874 1876 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1875 1877
1876 1878 Query strings and fragments:
1877 1879
1878 1880 >>> url('http://host/a?b#c')
1879 1881 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1880 1882 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1881 1883 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1882 1884 """
1883 1885
1884 1886 _safechars = "!~*'()+"
1885 1887 _safepchars = "/!~*'()+:\\"
1886 1888 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1887 1889
1888 1890 def __init__(self, path, parsequery=True, parsefragment=True):
1889 1891 # We slowly chomp away at path until we have only the path left
1890 1892 self.scheme = self.user = self.passwd = self.host = None
1891 1893 self.port = self.path = self.query = self.fragment = None
1892 1894 self._localpath = True
1893 1895 self._hostport = ''
1894 1896 self._origpath = path
1895 1897
1896 1898 if parsefragment and '#' in path:
1897 1899 path, self.fragment = path.split('#', 1)
1898 1900 if not path:
1899 1901 path = None
1900 1902
1901 1903 # special case for Windows drive letters and UNC paths
1902 1904 if hasdriveletter(path) or path.startswith(r'\\'):
1903 1905 self.path = path
1904 1906 return
1905 1907
1906 1908 # For compatibility reasons, we can't handle bundle paths as
1907 1909 # normal URLS
1908 1910 if path.startswith('bundle:'):
1909 1911 self.scheme = 'bundle'
1910 1912 path = path[7:]
1911 1913 if path.startswith('//'):
1912 1914 path = path[2:]
1913 1915 self.path = path
1914 1916 return
1915 1917
1916 1918 if self._matchscheme(path):
1917 1919 parts = path.split(':', 1)
1918 1920 if parts[0]:
1919 1921 self.scheme, path = parts
1920 1922 self._localpath = False
1921 1923
1922 1924 if not path:
1923 1925 path = None
1924 1926 if self._localpath:
1925 1927 self.path = ''
1926 1928 return
1927 1929 else:
1928 1930 if self._localpath:
1929 1931 self.path = path
1930 1932 return
1931 1933
1932 1934 if parsequery and '?' in path:
1933 1935 path, self.query = path.split('?', 1)
1934 1936 if not path:
1935 1937 path = None
1936 1938 if not self.query:
1937 1939 self.query = None
1938 1940
1939 1941 # // is required to specify a host/authority
1940 1942 if path and path.startswith('//'):
1941 1943 parts = path[2:].split('/', 1)
1942 1944 if len(parts) > 1:
1943 1945 self.host, path = parts
1944 1946 else:
1945 1947 self.host = parts[0]
1946 1948 path = None
1947 1949 if not self.host:
1948 1950 self.host = None
1949 1951 # path of file:///d is /d
1950 1952 # path of file:///d:/ is d:/, not /d:/
1951 1953 if path and not hasdriveletter(path):
1952 1954 path = '/' + path
1953 1955
1954 1956 if self.host and '@' in self.host:
1955 1957 self.user, self.host = self.host.rsplit('@', 1)
1956 1958 if ':' in self.user:
1957 1959 self.user, self.passwd = self.user.split(':', 1)
1958 1960 if not self.host:
1959 1961 self.host = None
1960 1962
1961 1963 # Don't split on colons in IPv6 addresses without ports
1962 1964 if (self.host and ':' in self.host and
1963 1965 not (self.host.startswith('[') and self.host.endswith(']'))):
1964 1966 self._hostport = self.host
1965 1967 self.host, self.port = self.host.rsplit(':', 1)
1966 1968 if not self.host:
1967 1969 self.host = None
1968 1970
1969 1971 if (self.host and self.scheme == 'file' and
1970 1972 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1971 1973 raise Abort(_('file:// URLs can only refer to localhost'))
1972 1974
1973 1975 self.path = path
1974 1976
1975 1977 # leave the query string escaped
1976 1978 for a in ('user', 'passwd', 'host', 'port',
1977 1979 'path', 'fragment'):
1978 1980 v = getattr(self, a)
1979 1981 if v is not None:
1980 1982 setattr(self, a, _urlunquote(v))
1981 1983
1982 1984 def __repr__(self):
1983 1985 attrs = []
1984 1986 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1985 1987 'query', 'fragment'):
1986 1988 v = getattr(self, a)
1987 1989 if v is not None:
1988 1990 attrs.append('%s: %r' % (a, v))
1989 1991 return '<url %s>' % ', '.join(attrs)
1990 1992
1991 1993 def __str__(self):
1992 1994 r"""Join the URL's components back into a URL string.
1993 1995
1994 1996 Examples:
1995 1997
1996 1998 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1997 1999 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1998 2000 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1999 2001 'http://user:pw@host:80/?foo=bar&baz=42'
2000 2002 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2001 2003 'http://user:pw@host:80/?foo=bar%3dbaz'
2002 2004 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2003 2005 'ssh://user:pw@[::1]:2200//home/joe#'
2004 2006 >>> str(url('http://localhost:80//'))
2005 2007 'http://localhost:80//'
2006 2008 >>> str(url('http://localhost:80/'))
2007 2009 'http://localhost:80/'
2008 2010 >>> str(url('http://localhost:80'))
2009 2011 'http://localhost:80/'
2010 2012 >>> str(url('bundle:foo'))
2011 2013 'bundle:foo'
2012 2014 >>> str(url('bundle://../foo'))
2013 2015 'bundle:../foo'
2014 2016 >>> str(url('path'))
2015 2017 'path'
2016 2018 >>> str(url('file:///tmp/foo/bar'))
2017 2019 'file:///tmp/foo/bar'
2018 2020 >>> str(url('file:///c:/tmp/foo/bar'))
2019 2021 'file:///c:/tmp/foo/bar'
2020 2022 >>> print url(r'bundle:foo\bar')
2021 2023 bundle:foo\bar
2022 2024 >>> print url(r'file:///D:\data\hg')
2023 2025 file:///D:\data\hg
2024 2026 """
2025 2027 if self._localpath:
2026 2028 s = self.path
2027 2029 if self.scheme == 'bundle':
2028 2030 s = 'bundle:' + s
2029 2031 if self.fragment:
2030 2032 s += '#' + self.fragment
2031 2033 return s
2032 2034
2033 2035 s = self.scheme + ':'
2034 2036 if self.user or self.passwd or self.host:
2035 2037 s += '//'
2036 2038 elif self.scheme and (not self.path or self.path.startswith('/')
2037 2039 or hasdriveletter(self.path)):
2038 2040 s += '//'
2039 2041 if hasdriveletter(self.path):
2040 2042 s += '/'
2041 2043 if self.user:
2042 2044 s += urllib.quote(self.user, safe=self._safechars)
2043 2045 if self.passwd:
2044 2046 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2045 2047 if self.user or self.passwd:
2046 2048 s += '@'
2047 2049 if self.host:
2048 2050 if not (self.host.startswith('[') and self.host.endswith(']')):
2049 2051 s += urllib.quote(self.host)
2050 2052 else:
2051 2053 s += self.host
2052 2054 if self.port:
2053 2055 s += ':' + urllib.quote(self.port)
2054 2056 if self.host:
2055 2057 s += '/'
2056 2058 if self.path:
2057 2059 # TODO: similar to the query string, we should not unescape the
2058 2060 # path when we store it, the path might contain '%2f' = '/',
2059 2061 # which we should *not* escape.
2060 2062 s += urllib.quote(self.path, safe=self._safepchars)
2061 2063 if self.query:
2062 2064 # we store the query in escaped form.
2063 2065 s += '?' + self.query
2064 2066 if self.fragment is not None:
2065 2067 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2066 2068 return s
2067 2069
2068 2070 def authinfo(self):
2069 2071 user, passwd = self.user, self.passwd
2070 2072 try:
2071 2073 self.user, self.passwd = None, None
2072 2074 s = str(self)
2073 2075 finally:
2074 2076 self.user, self.passwd = user, passwd
2075 2077 if not self.user:
2076 2078 return (s, None)
2077 2079 # authinfo[1] is passed to urllib2 password manager, and its
2078 2080 # URIs must not contain credentials. The host is passed in the
2079 2081 # URIs list because Python < 2.4.3 uses only that to search for
2080 2082 # a password.
2081 2083 return (s, (None, (s, self.host),
2082 2084 self.user, self.passwd or ''))
2083 2085
2084 2086 def isabs(self):
2085 2087 if self.scheme and self.scheme != 'file':
2086 2088 return True # remote URL
2087 2089 if hasdriveletter(self.path):
2088 2090 return True # absolute for our purposes - can't be joined()
2089 2091 if self.path.startswith(r'\\'):
2090 2092 return True # Windows UNC path
2091 2093 if self.path.startswith('/'):
2092 2094 return True # POSIX-style
2093 2095 return False
2094 2096
2095 2097 def localpath(self):
2096 2098 if self.scheme == 'file' or self.scheme == 'bundle':
2097 2099 path = self.path or '/'
2098 2100 # For Windows, we need to promote hosts containing drive
2099 2101 # letters to paths with drive letters.
2100 2102 if hasdriveletter(self._hostport):
2101 2103 path = self._hostport + '/' + self.path
2102 2104 elif (self.host is not None and self.path
2103 2105 and not hasdriveletter(path)):
2104 2106 path = '/' + path
2105 2107 return path
2106 2108 return self._origpath
2107 2109
2108 2110 def islocal(self):
2109 2111 '''whether localpath will return something that posixfile can open'''
2110 2112 return (not self.scheme or self.scheme == 'file'
2111 2113 or self.scheme == 'bundle')
2112 2114
2113 2115 def hasscheme(path):
2114 2116 return bool(url(path).scheme)
2115 2117
2116 2118 def hasdriveletter(path):
2117 2119 return path and path[1:2] == ':' and path[0:1].isalpha()
2118 2120
2119 2121 def urllocalpath(path):
2120 2122 return url(path, parsequery=False, parsefragment=False).localpath()
2121 2123
2122 2124 def hidepassword(u):
2123 2125 '''hide user credential in a url string'''
2124 2126 u = url(u)
2125 2127 if u.passwd:
2126 2128 u.passwd = '***'
2127 2129 return str(u)
2128 2130
2129 2131 def removeauth(u):
2130 2132 '''remove all authentication information from a url string'''
2131 2133 u = url(u)
2132 2134 u.user = u.passwd = None
2133 2135 return str(u)
2134 2136
2135 2137 def isatty(fd):
2136 2138 try:
2137 2139 return fd.isatty()
2138 2140 except AttributeError:
2139 2141 return False
2140 2142
2141 2143 timecount = unitcountfn(
2142 2144 (1, 1e3, _('%.0f s')),
2143 2145 (100, 1, _('%.1f s')),
2144 2146 (10, 1, _('%.2f s')),
2145 2147 (1, 1, _('%.3f s')),
2146 2148 (100, 0.001, _('%.1f ms')),
2147 2149 (10, 0.001, _('%.2f ms')),
2148 2150 (1, 0.001, _('%.3f ms')),
2149 2151 (100, 0.000001, _('%.1f us')),
2150 2152 (10, 0.000001, _('%.2f us')),
2151 2153 (1, 0.000001, _('%.3f us')),
2152 2154 (100, 0.000000001, _('%.1f ns')),
2153 2155 (10, 0.000000001, _('%.2f ns')),
2154 2156 (1, 0.000000001, _('%.3f ns')),
2155 2157 )
2156 2158
2157 2159 _timenesting = [0]
2158 2160
2159 2161 def timed(func):
2160 2162 '''Report the execution time of a function call to stderr.
2161 2163
2162 2164 During development, use as a decorator when you need to measure
2163 2165 the cost of a function, e.g. as follows:
2164 2166
2165 2167 @util.timed
2166 2168 def foo(a, b, c):
2167 2169 pass
2168 2170 '''
2169 2171
2170 2172 def wrapper(*args, **kwargs):
2171 2173 start = time.time()
2172 2174 indent = 2
2173 2175 _timenesting[0] += indent
2174 2176 try:
2175 2177 return func(*args, **kwargs)
2176 2178 finally:
2177 2179 elapsed = time.time() - start
2178 2180 _timenesting[0] -= indent
2179 2181 sys.stderr.write('%s%s: %s\n' %
2180 2182 (' ' * _timenesting[0], func.__name__,
2181 2183 timecount(elapsed)))
2182 2184 return wrapper
2183 2185
2184 2186 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2185 2187 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2186 2188
2187 2189 def sizetoint(s):
2188 2190 '''Convert a space specifier to a byte count.
2189 2191
2190 2192 >>> sizetoint('30')
2191 2193 30
2192 2194 >>> sizetoint('2.2kb')
2193 2195 2252
2194 2196 >>> sizetoint('6M')
2195 2197 6291456
2196 2198 '''
2197 2199 t = s.strip().lower()
2198 2200 try:
2199 2201 for k, u in _sizeunits:
2200 2202 if t.endswith(k):
2201 2203 return int(float(t[:-len(k)]) * u)
2202 2204 return int(t)
2203 2205 except ValueError:
2204 2206 raise error.ParseError(_("couldn't parse size: %s") % s)
2205 2207
2206 2208 class hooks(object):
2207 2209 '''A collection of hook functions that can be used to extend a
2208 2210 function's behaviour. Hooks are called in lexicographic order,
2209 2211 based on the names of their sources.'''
2210 2212
2211 2213 def __init__(self):
2212 2214 self._hooks = []
2213 2215
2214 2216 def add(self, source, hook):
2215 2217 self._hooks.append((source, hook))
2216 2218
2217 2219 def __call__(self, *args):
2218 2220 self._hooks.sort(key=lambda x: x[0])
2219 2221 results = []
2220 2222 for source, hook in self._hooks:
2221 2223 results.append(hook(*args))
2222 2224 return results
2223 2225
2224 2226 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2225 2227 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2226 2228 Skips the 'skip' last entries. By default it will flush stdout first.
2227 2229 It can be used everywhere and do intentionally not require an ui object.
2228 2230 Not be used in production code but very convenient while developing.
2229 2231 '''
2230 2232 if otherf:
2231 2233 otherf.flush()
2232 2234 f.write('%s at:\n' % msg)
2233 2235 entries = [('%s:%s' % (fn, ln), func)
2234 2236 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2235 2237 if entries:
2236 2238 fnmax = max(len(entry[0]) for entry in entries)
2237 2239 for fnln, func in entries:
2238 2240 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2239 2241 f.flush()
2240 2242
2241 2243 # convenient shortcut
2242 2244 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now