##// END OF EJS Templates
util: kill Python 2.4 deque.remove hack
Adrian Buehlmann -
r25112:3d14c121 default
parent child Browse files
Show More
@@ -1,2288 +1,2278 b''
1 1 # util.py - Mercurial utility functions and platform specific implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2 or any later version.
9 9
10 10 """Mercurial utility functions and platform specific implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 import i18n
17 17 _ = i18n._
18 18 import error, osutil, encoding, parsers
19 19 import errno, shutil, sys, tempfile, traceback
20 20 import re as remod
21 21 import os, time, datetime, calendar, textwrap, signal, collections
22 22 import imp, socket, urllib, struct
23 23 import gc
24 24
25 25 if os.name == 'nt':
26 26 import windows as platform
27 27 else:
28 28 import posix as platform
29 29
30 30 cachestat = platform.cachestat
31 31 checkexec = platform.checkexec
32 32 checklink = platform.checklink
33 33 copymode = platform.copymode
34 34 executablepath = platform.executablepath
35 35 expandglobs = platform.expandglobs
36 36 explainexit = platform.explainexit
37 37 findexe = platform.findexe
38 38 gethgcmd = platform.gethgcmd
39 39 getuser = platform.getuser
40 40 groupmembers = platform.groupmembers
41 41 groupname = platform.groupname
42 42 hidewindow = platform.hidewindow
43 43 isexec = platform.isexec
44 44 isowner = platform.isowner
45 45 localpath = platform.localpath
46 46 lookupreg = platform.lookupreg
47 47 makedir = platform.makedir
48 48 nlinks = platform.nlinks
49 49 normpath = platform.normpath
50 50 normcase = platform.normcase
51 51 normcasespec = platform.normcasespec
52 52 normcasefallback = platform.normcasefallback
53 53 openhardlinks = platform.openhardlinks
54 54 oslink = platform.oslink
55 55 parsepatchoutput = platform.parsepatchoutput
56 56 pconvert = platform.pconvert
57 57 popen = platform.popen
58 58 posixfile = platform.posixfile
59 59 quotecommand = platform.quotecommand
60 60 readpipe = platform.readpipe
61 61 rename = platform.rename
62 62 removedirs = platform.removedirs
63 63 samedevice = platform.samedevice
64 64 samefile = platform.samefile
65 65 samestat = platform.samestat
66 66 setbinary = platform.setbinary
67 67 setflags = platform.setflags
68 68 setsignalhandler = platform.setsignalhandler
69 69 shellquote = platform.shellquote
70 70 spawndetached = platform.spawndetached
71 71 split = platform.split
72 72 sshargs = platform.sshargs
73 73 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
74 74 statisexec = platform.statisexec
75 75 statislink = platform.statislink
76 76 termwidth = platform.termwidth
77 77 testpid = platform.testpid
78 78 umask = platform.umask
79 79 unlink = platform.unlink
80 80 unlinkpath = platform.unlinkpath
81 81 username = platform.username
82 82
83 83 # Python compatibility
84 84
85 85 _notset = object()
86 86
87 87 def safehasattr(thing, attr):
88 88 return getattr(thing, attr, _notset) is not _notset
89 89
90 90 def sha1(s=''):
91 91 '''
92 92 Low-overhead wrapper around Python's SHA support
93 93
94 94 >>> f = _fastsha1
95 95 >>> a = sha1()
96 96 >>> a = f()
97 97 >>> a.hexdigest()
98 98 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
99 99 '''
100 100
101 101 return _fastsha1(s)
102 102
103 103 def _fastsha1(s=''):
104 104 # This function will import sha1 from hashlib or sha (whichever is
105 105 # available) and overwrite itself with it on the first call.
106 106 # Subsequent calls will go directly to the imported function.
107 107 if sys.version_info >= (2, 5):
108 108 from hashlib import sha1 as _sha1
109 109 else:
110 110 from sha import sha as _sha1
111 111 global _fastsha1, sha1
112 112 _fastsha1 = sha1 = _sha1
113 113 return _sha1(s)
114 114
115 115 def md5(s=''):
116 116 try:
117 117 from hashlib import md5 as _md5
118 118 except ImportError:
119 119 from md5 import md5 as _md5
120 120 global md5
121 121 md5 = _md5
122 122 return _md5(s)
123 123
124 124 DIGESTS = {
125 125 'md5': md5,
126 126 'sha1': sha1,
127 127 }
128 128 # List of digest types from strongest to weakest
129 129 DIGESTS_BY_STRENGTH = ['sha1', 'md5']
130 130
131 131 try:
132 132 import hashlib
133 133 DIGESTS.update({
134 134 'sha512': hashlib.sha512,
135 135 })
136 136 DIGESTS_BY_STRENGTH.insert(0, 'sha512')
137 137 except ImportError:
138 138 pass
139 139
140 140 for k in DIGESTS_BY_STRENGTH:
141 141 assert k in DIGESTS
142 142
143 143 class digester(object):
144 144 """helper to compute digests.
145 145
146 146 This helper can be used to compute one or more digests given their name.
147 147
148 148 >>> d = digester(['md5', 'sha1'])
149 149 >>> d.update('foo')
150 150 >>> [k for k in sorted(d)]
151 151 ['md5', 'sha1']
152 152 >>> d['md5']
153 153 'acbd18db4cc2f85cedef654fccc4a4d8'
154 154 >>> d['sha1']
155 155 '0beec7b5ea3f0fdbc95d0dd47f3c5bc275da8a33'
156 156 >>> digester.preferred(['md5', 'sha1'])
157 157 'sha1'
158 158 """
159 159
160 160 def __init__(self, digests, s=''):
161 161 self._hashes = {}
162 162 for k in digests:
163 163 if k not in DIGESTS:
164 164 raise Abort(_('unknown digest type: %s') % k)
165 165 self._hashes[k] = DIGESTS[k]()
166 166 if s:
167 167 self.update(s)
168 168
169 169 def update(self, data):
170 170 for h in self._hashes.values():
171 171 h.update(data)
172 172
173 173 def __getitem__(self, key):
174 174 if key not in DIGESTS:
175 175 raise Abort(_('unknown digest type: %s') % k)
176 176 return self._hashes[key].hexdigest()
177 177
178 178 def __iter__(self):
179 179 return iter(self._hashes)
180 180
181 181 @staticmethod
182 182 def preferred(supported):
183 183 """returns the strongest digest type in both supported and DIGESTS."""
184 184
185 185 for k in DIGESTS_BY_STRENGTH:
186 186 if k in supported:
187 187 return k
188 188 return None
189 189
190 190 class digestchecker(object):
191 191 """file handle wrapper that additionally checks content against a given
192 192 size and digests.
193 193
194 194 d = digestchecker(fh, size, {'md5': '...'})
195 195
196 196 When multiple digests are given, all of them are validated.
197 197 """
198 198
199 199 def __init__(self, fh, size, digests):
200 200 self._fh = fh
201 201 self._size = size
202 202 self._got = 0
203 203 self._digests = dict(digests)
204 204 self._digester = digester(self._digests.keys())
205 205
206 206 def read(self, length=-1):
207 207 content = self._fh.read(length)
208 208 self._digester.update(content)
209 209 self._got += len(content)
210 210 return content
211 211
212 212 def validate(self):
213 213 if self._size != self._got:
214 214 raise Abort(_('size mismatch: expected %d, got %d') %
215 215 (self._size, self._got))
216 216 for k, v in self._digests.items():
217 217 if v != self._digester[k]:
218 218 # i18n: first parameter is a digest name
219 219 raise Abort(_('%s mismatch: expected %s, got %s') %
220 220 (k, v, self._digester[k]))
221 221
222 222 try:
223 223 buffer = buffer
224 224 except NameError:
225 225 if sys.version_info[0] < 3:
226 226 def buffer(sliceable, offset=0):
227 227 return sliceable[offset:]
228 228 else:
229 229 def buffer(sliceable, offset=0):
230 230 return memoryview(sliceable)[offset:]
231 231
232 232 import subprocess
233 233 closefds = os.name == 'posix'
234 234
235 235 def unpacker(fmt):
236 236 """create a struct unpacker for the specified format"""
237 237 try:
238 238 # 2.5+
239 239 return struct.Struct(fmt).unpack
240 240 except AttributeError:
241 241 # 2.4
242 242 return lambda buf: struct.unpack(fmt, buf)
243 243
244 244 def popen2(cmd, env=None, newlines=False):
245 245 # Setting bufsize to -1 lets the system decide the buffer size.
246 246 # The default for bufsize is 0, meaning unbuffered. This leads to
247 247 # poor performance on Mac OS X: http://bugs.python.org/issue4194
248 248 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
249 249 close_fds=closefds,
250 250 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
251 251 universal_newlines=newlines,
252 252 env=env)
253 253 return p.stdin, p.stdout
254 254
255 255 def popen3(cmd, env=None, newlines=False):
256 256 stdin, stdout, stderr, p = popen4(cmd, env, newlines)
257 257 return stdin, stdout, stderr
258 258
259 259 def popen4(cmd, env=None, newlines=False):
260 260 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
261 261 close_fds=closefds,
262 262 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
263 263 stderr=subprocess.PIPE,
264 264 universal_newlines=newlines,
265 265 env=env)
266 266 return p.stdin, p.stdout, p.stderr, p
267 267
268 268 def version():
269 269 """Return version information if available."""
270 270 try:
271 271 import __version__
272 272 return __version__.version
273 273 except ImportError:
274 274 return 'unknown'
275 275
276 276 # used by parsedate
277 277 defaultdateformats = (
278 278 '%Y-%m-%d %H:%M:%S',
279 279 '%Y-%m-%d %I:%M:%S%p',
280 280 '%Y-%m-%d %H:%M',
281 281 '%Y-%m-%d %I:%M%p',
282 282 '%Y-%m-%d',
283 283 '%m-%d',
284 284 '%m/%d',
285 285 '%m/%d/%y',
286 286 '%m/%d/%Y',
287 287 '%a %b %d %H:%M:%S %Y',
288 288 '%a %b %d %I:%M:%S%p %Y',
289 289 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
290 290 '%b %d %H:%M:%S %Y',
291 291 '%b %d %I:%M:%S%p %Y',
292 292 '%b %d %H:%M:%S',
293 293 '%b %d %I:%M:%S%p',
294 294 '%b %d %H:%M',
295 295 '%b %d %I:%M%p',
296 296 '%b %d %Y',
297 297 '%b %d',
298 298 '%H:%M:%S',
299 299 '%I:%M:%S%p',
300 300 '%H:%M',
301 301 '%I:%M%p',
302 302 )
303 303
304 304 extendeddateformats = defaultdateformats + (
305 305 "%Y",
306 306 "%Y-%m",
307 307 "%b",
308 308 "%b %Y",
309 309 )
310 310
311 311 def cachefunc(func):
312 312 '''cache the result of function calls'''
313 313 # XXX doesn't handle keywords args
314 314 if func.func_code.co_argcount == 0:
315 315 cache = []
316 316 def f():
317 317 if len(cache) == 0:
318 318 cache.append(func())
319 319 return cache[0]
320 320 return f
321 321 cache = {}
322 322 if func.func_code.co_argcount == 1:
323 323 # we gain a small amount of time because
324 324 # we don't need to pack/unpack the list
325 325 def f(arg):
326 326 if arg not in cache:
327 327 cache[arg] = func(arg)
328 328 return cache[arg]
329 329 else:
330 330 def f(*args):
331 331 if args not in cache:
332 332 cache[args] = func(*args)
333 333 return cache[args]
334 334
335 335 return f
336 336
337 try:
338 collections.deque.remove
339 deque = collections.deque
340 except AttributeError:
341 # python 2.4 lacks deque.remove
342 class deque(collections.deque):
343 def remove(self, val):
344 for i, v in enumerate(self):
345 if v == val:
346 del self[i]
347 break
337 deque = collections.deque
348 338
349 339 class sortdict(dict):
350 340 '''a simple sorted dictionary'''
351 341 def __init__(self, data=None):
352 342 self._list = []
353 343 if data:
354 344 self.update(data)
355 345 def copy(self):
356 346 return sortdict(self)
357 347 def __setitem__(self, key, val):
358 348 if key in self:
359 349 self._list.remove(key)
360 350 self._list.append(key)
361 351 dict.__setitem__(self, key, val)
362 352 def __iter__(self):
363 353 return self._list.__iter__()
364 354 def update(self, src):
365 355 if isinstance(src, dict):
366 356 src = src.iteritems()
367 357 for k, v in src:
368 358 self[k] = v
369 359 def clear(self):
370 360 dict.clear(self)
371 361 self._list = []
372 362 def items(self):
373 363 return [(k, self[k]) for k in self._list]
374 364 def __delitem__(self, key):
375 365 dict.__delitem__(self, key)
376 366 self._list.remove(key)
377 367 def pop(self, key, *args, **kwargs):
378 368 dict.pop(self, key, *args, **kwargs)
379 369 try:
380 370 self._list.remove(key)
381 371 except ValueError:
382 372 pass
383 373 def keys(self):
384 374 return self._list
385 375 def iterkeys(self):
386 376 return self._list.__iter__()
387 377 def iteritems(self):
388 378 for k in self._list:
389 379 yield k, self[k]
390 380 def insert(self, index, key, val):
391 381 self._list.insert(index, key)
392 382 dict.__setitem__(self, key, val)
393 383
394 384 class lrucachedict(object):
395 385 '''cache most recent gets from or sets to this dictionary'''
396 386 def __init__(self, maxsize):
397 387 self._cache = {}
398 388 self._maxsize = maxsize
399 389 self._order = deque()
400 390
401 391 def __getitem__(self, key):
402 392 value = self._cache[key]
403 393 self._order.remove(key)
404 394 self._order.append(key)
405 395 return value
406 396
407 397 def __setitem__(self, key, value):
408 398 if key not in self._cache:
409 399 if len(self._cache) >= self._maxsize:
410 400 del self._cache[self._order.popleft()]
411 401 else:
412 402 self._order.remove(key)
413 403 self._cache[key] = value
414 404 self._order.append(key)
415 405
416 406 def __contains__(self, key):
417 407 return key in self._cache
418 408
419 409 def clear(self):
420 410 self._cache.clear()
421 411 self._order = deque()
422 412
423 413 def lrucachefunc(func):
424 414 '''cache most recent results of function calls'''
425 415 cache = {}
426 416 order = deque()
427 417 if func.func_code.co_argcount == 1:
428 418 def f(arg):
429 419 if arg not in cache:
430 420 if len(cache) > 20:
431 421 del cache[order.popleft()]
432 422 cache[arg] = func(arg)
433 423 else:
434 424 order.remove(arg)
435 425 order.append(arg)
436 426 return cache[arg]
437 427 else:
438 428 def f(*args):
439 429 if args not in cache:
440 430 if len(cache) > 20:
441 431 del cache[order.popleft()]
442 432 cache[args] = func(*args)
443 433 else:
444 434 order.remove(args)
445 435 order.append(args)
446 436 return cache[args]
447 437
448 438 return f
449 439
450 440 class propertycache(object):
451 441 def __init__(self, func):
452 442 self.func = func
453 443 self.name = func.__name__
454 444 def __get__(self, obj, type=None):
455 445 result = self.func(obj)
456 446 self.cachevalue(obj, result)
457 447 return result
458 448
459 449 def cachevalue(self, obj, value):
460 450 # __dict__ assignment required to bypass __setattr__ (eg: repoview)
461 451 obj.__dict__[self.name] = value
462 452
463 453 def pipefilter(s, cmd):
464 454 '''filter string S through command CMD, returning its output'''
465 455 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
466 456 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
467 457 pout, perr = p.communicate(s)
468 458 return pout
469 459
470 460 def tempfilter(s, cmd):
471 461 '''filter string S through a pair of temporary files with CMD.
472 462 CMD is used as a template to create the real command to be run,
473 463 with the strings INFILE and OUTFILE replaced by the real names of
474 464 the temporary files generated.'''
475 465 inname, outname = None, None
476 466 try:
477 467 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
478 468 fp = os.fdopen(infd, 'wb')
479 469 fp.write(s)
480 470 fp.close()
481 471 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
482 472 os.close(outfd)
483 473 cmd = cmd.replace('INFILE', inname)
484 474 cmd = cmd.replace('OUTFILE', outname)
485 475 code = os.system(cmd)
486 476 if sys.platform == 'OpenVMS' and code & 1:
487 477 code = 0
488 478 if code:
489 479 raise Abort(_("command '%s' failed: %s") %
490 480 (cmd, explainexit(code)))
491 481 fp = open(outname, 'rb')
492 482 r = fp.read()
493 483 fp.close()
494 484 return r
495 485 finally:
496 486 try:
497 487 if inname:
498 488 os.unlink(inname)
499 489 except OSError:
500 490 pass
501 491 try:
502 492 if outname:
503 493 os.unlink(outname)
504 494 except OSError:
505 495 pass
506 496
507 497 filtertable = {
508 498 'tempfile:': tempfilter,
509 499 'pipe:': pipefilter,
510 500 }
511 501
512 502 def filter(s, cmd):
513 503 "filter a string through a command that transforms its input to its output"
514 504 for name, fn in filtertable.iteritems():
515 505 if cmd.startswith(name):
516 506 return fn(s, cmd[len(name):].lstrip())
517 507 return pipefilter(s, cmd)
518 508
519 509 def binary(s):
520 510 """return true if a string is binary data"""
521 511 return bool(s and '\0' in s)
522 512
523 513 def increasingchunks(source, min=1024, max=65536):
524 514 '''return no less than min bytes per chunk while data remains,
525 515 doubling min after each chunk until it reaches max'''
526 516 def log2(x):
527 517 if not x:
528 518 return 0
529 519 i = 0
530 520 while x:
531 521 x >>= 1
532 522 i += 1
533 523 return i - 1
534 524
535 525 buf = []
536 526 blen = 0
537 527 for chunk in source:
538 528 buf.append(chunk)
539 529 blen += len(chunk)
540 530 if blen >= min:
541 531 if min < max:
542 532 min = min << 1
543 533 nmin = 1 << log2(blen)
544 534 if nmin > min:
545 535 min = nmin
546 536 if min > max:
547 537 min = max
548 538 yield ''.join(buf)
549 539 blen = 0
550 540 buf = []
551 541 if buf:
552 542 yield ''.join(buf)
553 543
554 544 Abort = error.Abort
555 545
556 546 def always(fn):
557 547 return True
558 548
559 549 def never(fn):
560 550 return False
561 551
562 552 def nogc(func):
563 553 """disable garbage collector
564 554
565 555 Python's garbage collector triggers a GC each time a certain number of
566 556 container objects (the number being defined by gc.get_threshold()) are
567 557 allocated even when marked not to be tracked by the collector. Tracking has
568 558 no effect on when GCs are triggered, only on what objects the GC looks
569 559 into. As a workaround, disable GC while building complex (huge)
570 560 containers.
571 561
572 562 This garbage collector issue have been fixed in 2.7.
573 563 """
574 564 def wrapper(*args, **kwargs):
575 565 gcenabled = gc.isenabled()
576 566 gc.disable()
577 567 try:
578 568 return func(*args, **kwargs)
579 569 finally:
580 570 if gcenabled:
581 571 gc.enable()
582 572 return wrapper
583 573
584 574 def pathto(root, n1, n2):
585 575 '''return the relative path from one place to another.
586 576 root should use os.sep to separate directories
587 577 n1 should use os.sep to separate directories
588 578 n2 should use "/" to separate directories
589 579 returns an os.sep-separated path.
590 580
591 581 If n1 is a relative path, it's assumed it's
592 582 relative to root.
593 583 n2 should always be relative to root.
594 584 '''
595 585 if not n1:
596 586 return localpath(n2)
597 587 if os.path.isabs(n1):
598 588 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
599 589 return os.path.join(root, localpath(n2))
600 590 n2 = '/'.join((pconvert(root), n2))
601 591 a, b = splitpath(n1), n2.split('/')
602 592 a.reverse()
603 593 b.reverse()
604 594 while a and b and a[-1] == b[-1]:
605 595 a.pop()
606 596 b.pop()
607 597 b.reverse()
608 598 return os.sep.join((['..'] * len(a)) + b) or '.'
609 599
610 600 def mainfrozen():
611 601 """return True if we are a frozen executable.
612 602
613 603 The code supports py2exe (most common, Windows only) and tools/freeze
614 604 (portable, not much used).
615 605 """
616 606 return (safehasattr(sys, "frozen") or # new py2exe
617 607 safehasattr(sys, "importers") or # old py2exe
618 608 imp.is_frozen("__main__")) # tools/freeze
619 609
620 610 # the location of data files matching the source code
621 611 if mainfrozen():
622 612 # executable version (py2exe) doesn't support __file__
623 613 datapath = os.path.dirname(sys.executable)
624 614 else:
625 615 datapath = os.path.dirname(__file__)
626 616
627 617 i18n.setdatapath(datapath)
628 618
629 619 _hgexecutable = None
630 620
631 621 def hgexecutable():
632 622 """return location of the 'hg' executable.
633 623
634 624 Defaults to $HG or 'hg' in the search path.
635 625 """
636 626 if _hgexecutable is None:
637 627 hg = os.environ.get('HG')
638 628 mainmod = sys.modules['__main__']
639 629 if hg:
640 630 _sethgexecutable(hg)
641 631 elif mainfrozen():
642 632 _sethgexecutable(sys.executable)
643 633 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
644 634 _sethgexecutable(mainmod.__file__)
645 635 else:
646 636 exe = findexe('hg') or os.path.basename(sys.argv[0])
647 637 _sethgexecutable(exe)
648 638 return _hgexecutable
649 639
650 640 def _sethgexecutable(path):
651 641 """set location of the 'hg' executable"""
652 642 global _hgexecutable
653 643 _hgexecutable = path
654 644
655 645 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
656 646 '''enhanced shell command execution.
657 647 run with environment maybe modified, maybe in different dir.
658 648
659 649 if command fails and onerr is None, return status, else raise onerr
660 650 object as exception.
661 651
662 652 if out is specified, it is assumed to be a file-like object that has a
663 653 write() method. stdout and stderr will be redirected to out.'''
664 654 try:
665 655 sys.stdout.flush()
666 656 except Exception:
667 657 pass
668 658 def py2shell(val):
669 659 'convert python object into string that is useful to shell'
670 660 if val is None or val is False:
671 661 return '0'
672 662 if val is True:
673 663 return '1'
674 664 return str(val)
675 665 origcmd = cmd
676 666 cmd = quotecommand(cmd)
677 667 if sys.platform == 'plan9' and (sys.version_info[0] == 2
678 668 and sys.version_info[1] < 7):
679 669 # subprocess kludge to work around issues in half-baked Python
680 670 # ports, notably bichued/python:
681 671 if not cwd is None:
682 672 os.chdir(cwd)
683 673 rc = os.system(cmd)
684 674 else:
685 675 env = dict(os.environ)
686 676 env.update((k, py2shell(v)) for k, v in environ.iteritems())
687 677 env['HG'] = hgexecutable()
688 678 if out is None or out == sys.__stdout__:
689 679 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
690 680 env=env, cwd=cwd)
691 681 else:
692 682 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
693 683 env=env, cwd=cwd, stdout=subprocess.PIPE,
694 684 stderr=subprocess.STDOUT)
695 685 while True:
696 686 line = proc.stdout.readline()
697 687 if not line:
698 688 break
699 689 out.write(line)
700 690 proc.wait()
701 691 rc = proc.returncode
702 692 if sys.platform == 'OpenVMS' and rc & 1:
703 693 rc = 0
704 694 if rc and onerr:
705 695 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
706 696 explainexit(rc)[0])
707 697 if errprefix:
708 698 errmsg = '%s: %s' % (errprefix, errmsg)
709 699 raise onerr(errmsg)
710 700 return rc
711 701
712 702 def checksignature(func):
713 703 '''wrap a function with code to check for calling errors'''
714 704 def check(*args, **kwargs):
715 705 try:
716 706 return func(*args, **kwargs)
717 707 except TypeError:
718 708 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
719 709 raise error.SignatureError
720 710 raise
721 711
722 712 return check
723 713
724 714 def copyfile(src, dest, hardlink=False):
725 715 "copy a file, preserving mode and atime/mtime"
726 716 if os.path.lexists(dest):
727 717 unlink(dest)
728 718 # hardlinks are problematic on CIFS, quietly ignore this flag
729 719 # until we find a way to work around it cleanly (issue4546)
730 720 if False and hardlink:
731 721 try:
732 722 oslink(src, dest)
733 723 return
734 724 except (IOError, OSError):
735 725 pass # fall back to normal copy
736 726 if os.path.islink(src):
737 727 os.symlink(os.readlink(src), dest)
738 728 else:
739 729 try:
740 730 shutil.copyfile(src, dest)
741 731 shutil.copymode(src, dest)
742 732 except shutil.Error, inst:
743 733 raise Abort(str(inst))
744 734
745 735 def copyfiles(src, dst, hardlink=None, progress=lambda t, pos: None):
746 736 """Copy a directory tree using hardlinks if possible."""
747 737 num = 0
748 738
749 739 if hardlink is None:
750 740 hardlink = (os.stat(src).st_dev ==
751 741 os.stat(os.path.dirname(dst)).st_dev)
752 742 if hardlink:
753 743 topic = _('linking')
754 744 else:
755 745 topic = _('copying')
756 746
757 747 if os.path.isdir(src):
758 748 os.mkdir(dst)
759 749 for name, kind in osutil.listdir(src):
760 750 srcname = os.path.join(src, name)
761 751 dstname = os.path.join(dst, name)
762 752 def nprog(t, pos):
763 753 if pos is not None:
764 754 return progress(t, pos + num)
765 755 hardlink, n = copyfiles(srcname, dstname, hardlink, progress=nprog)
766 756 num += n
767 757 else:
768 758 if hardlink:
769 759 try:
770 760 oslink(src, dst)
771 761 except (IOError, OSError):
772 762 hardlink = False
773 763 shutil.copy(src, dst)
774 764 else:
775 765 shutil.copy(src, dst)
776 766 num += 1
777 767 progress(topic, num)
778 768 progress(topic, None)
779 769
780 770 return hardlink, num
781 771
782 772 _winreservednames = '''con prn aux nul
783 773 com1 com2 com3 com4 com5 com6 com7 com8 com9
784 774 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
785 775 _winreservedchars = ':*?"<>|'
786 776 def checkwinfilename(path):
787 777 r'''Check that the base-relative path is a valid filename on Windows.
788 778 Returns None if the path is ok, or a UI string describing the problem.
789 779
790 780 >>> checkwinfilename("just/a/normal/path")
791 781 >>> checkwinfilename("foo/bar/con.xml")
792 782 "filename contains 'con', which is reserved on Windows"
793 783 >>> checkwinfilename("foo/con.xml/bar")
794 784 "filename contains 'con', which is reserved on Windows"
795 785 >>> checkwinfilename("foo/bar/xml.con")
796 786 >>> checkwinfilename("foo/bar/AUX/bla.txt")
797 787 "filename contains 'AUX', which is reserved on Windows"
798 788 >>> checkwinfilename("foo/bar/bla:.txt")
799 789 "filename contains ':', which is reserved on Windows"
800 790 >>> checkwinfilename("foo/bar/b\07la.txt")
801 791 "filename contains '\\x07', which is invalid on Windows"
802 792 >>> checkwinfilename("foo/bar/bla ")
803 793 "filename ends with ' ', which is not allowed on Windows"
804 794 >>> checkwinfilename("../bar")
805 795 >>> checkwinfilename("foo\\")
806 796 "filename ends with '\\', which is invalid on Windows"
807 797 >>> checkwinfilename("foo\\/bar")
808 798 "directory name ends with '\\', which is invalid on Windows"
809 799 '''
810 800 if path.endswith('\\'):
811 801 return _("filename ends with '\\', which is invalid on Windows")
812 802 if '\\/' in path:
813 803 return _("directory name ends with '\\', which is invalid on Windows")
814 804 for n in path.replace('\\', '/').split('/'):
815 805 if not n:
816 806 continue
817 807 for c in n:
818 808 if c in _winreservedchars:
819 809 return _("filename contains '%s', which is reserved "
820 810 "on Windows") % c
821 811 if ord(c) <= 31:
822 812 return _("filename contains %r, which is invalid "
823 813 "on Windows") % c
824 814 base = n.split('.')[0]
825 815 if base and base.lower() in _winreservednames:
826 816 return _("filename contains '%s', which is reserved "
827 817 "on Windows") % base
828 818 t = n[-1]
829 819 if t in '. ' and n not in '..':
830 820 return _("filename ends with '%s', which is not allowed "
831 821 "on Windows") % t
832 822
833 823 if os.name == 'nt':
834 824 checkosfilename = checkwinfilename
835 825 else:
836 826 checkosfilename = platform.checkosfilename
837 827
838 828 def makelock(info, pathname):
839 829 try:
840 830 return os.symlink(info, pathname)
841 831 except OSError, why:
842 832 if why.errno == errno.EEXIST:
843 833 raise
844 834 except AttributeError: # no symlink in os
845 835 pass
846 836
847 837 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
848 838 os.write(ld, info)
849 839 os.close(ld)
850 840
851 841 def readlock(pathname):
852 842 try:
853 843 return os.readlink(pathname)
854 844 except OSError, why:
855 845 if why.errno not in (errno.EINVAL, errno.ENOSYS):
856 846 raise
857 847 except AttributeError: # no symlink in os
858 848 pass
859 849 fp = posixfile(pathname)
860 850 r = fp.read()
861 851 fp.close()
862 852 return r
863 853
864 854 def fstat(fp):
865 855 '''stat file object that may not have fileno method.'''
866 856 try:
867 857 return os.fstat(fp.fileno())
868 858 except AttributeError:
869 859 return os.stat(fp.name)
870 860
871 861 # File system features
872 862
873 863 def checkcase(path):
874 864 """
875 865 Return true if the given path is on a case-sensitive filesystem
876 866
877 867 Requires a path (like /foo/.hg) ending with a foldable final
878 868 directory component.
879 869 """
880 870 s1 = os.lstat(path)
881 871 d, b = os.path.split(path)
882 872 b2 = b.upper()
883 873 if b == b2:
884 874 b2 = b.lower()
885 875 if b == b2:
886 876 return True # no evidence against case sensitivity
887 877 p2 = os.path.join(d, b2)
888 878 try:
889 879 s2 = os.lstat(p2)
890 880 if s2 == s1:
891 881 return False
892 882 return True
893 883 except OSError:
894 884 return True
895 885
896 886 try:
897 887 import re2
898 888 _re2 = None
899 889 except ImportError:
900 890 _re2 = False
901 891
902 892 class _re(object):
903 893 def _checkre2(self):
904 894 global _re2
905 895 try:
906 896 # check if match works, see issue3964
907 897 _re2 = bool(re2.match(r'\[([^\[]+)\]', '[ui]'))
908 898 except ImportError:
909 899 _re2 = False
910 900
911 901 def compile(self, pat, flags=0):
912 902 '''Compile a regular expression, using re2 if possible
913 903
914 904 For best performance, use only re2-compatible regexp features. The
915 905 only flags from the re module that are re2-compatible are
916 906 IGNORECASE and MULTILINE.'''
917 907 if _re2 is None:
918 908 self._checkre2()
919 909 if _re2 and (flags & ~(remod.IGNORECASE | remod.MULTILINE)) == 0:
920 910 if flags & remod.IGNORECASE:
921 911 pat = '(?i)' + pat
922 912 if flags & remod.MULTILINE:
923 913 pat = '(?m)' + pat
924 914 try:
925 915 return re2.compile(pat)
926 916 except re2.error:
927 917 pass
928 918 return remod.compile(pat, flags)
929 919
930 920 @propertycache
931 921 def escape(self):
932 922 '''Return the version of escape corresponding to self.compile.
933 923
934 924 This is imperfect because whether re2 or re is used for a particular
935 925 function depends on the flags, etc, but it's the best we can do.
936 926 '''
937 927 global _re2
938 928 if _re2 is None:
939 929 self._checkre2()
940 930 if _re2:
941 931 return re2.escape
942 932 else:
943 933 return remod.escape
944 934
945 935 re = _re()
946 936
947 937 _fspathcache = {}
948 938 def fspath(name, root):
949 939 '''Get name in the case stored in the filesystem
950 940
951 941 The name should be relative to root, and be normcase-ed for efficiency.
952 942
953 943 Note that this function is unnecessary, and should not be
954 944 called, for case-sensitive filesystems (simply because it's expensive).
955 945
956 946 The root should be normcase-ed, too.
957 947 '''
958 948 def _makefspathcacheentry(dir):
959 949 return dict((normcase(n), n) for n in os.listdir(dir))
960 950
961 951 seps = os.sep
962 952 if os.altsep:
963 953 seps = seps + os.altsep
964 954 # Protect backslashes. This gets silly very quickly.
965 955 seps.replace('\\','\\\\')
966 956 pattern = remod.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
967 957 dir = os.path.normpath(root)
968 958 result = []
969 959 for part, sep in pattern.findall(name):
970 960 if sep:
971 961 result.append(sep)
972 962 continue
973 963
974 964 if dir not in _fspathcache:
975 965 _fspathcache[dir] = _makefspathcacheentry(dir)
976 966 contents = _fspathcache[dir]
977 967
978 968 found = contents.get(part)
979 969 if not found:
980 970 # retry "once per directory" per "dirstate.walk" which
981 971 # may take place for each patches of "hg qpush", for example
982 972 _fspathcache[dir] = contents = _makefspathcacheentry(dir)
983 973 found = contents.get(part)
984 974
985 975 result.append(found or part)
986 976 dir = os.path.join(dir, part)
987 977
988 978 return ''.join(result)
989 979
990 980 def checknlink(testfile):
991 981 '''check whether hardlink count reporting works properly'''
992 982
993 983 # testfile may be open, so we need a separate file for checking to
994 984 # work around issue2543 (or testfile may get lost on Samba shares)
995 985 f1 = testfile + ".hgtmp1"
996 986 if os.path.lexists(f1):
997 987 return False
998 988 try:
999 989 posixfile(f1, 'w').close()
1000 990 except IOError:
1001 991 return False
1002 992
1003 993 f2 = testfile + ".hgtmp2"
1004 994 fd = None
1005 995 try:
1006 996 oslink(f1, f2)
1007 997 # nlinks() may behave differently for files on Windows shares if
1008 998 # the file is open.
1009 999 fd = posixfile(f2)
1010 1000 return nlinks(f2) > 1
1011 1001 except OSError:
1012 1002 return False
1013 1003 finally:
1014 1004 if fd is not None:
1015 1005 fd.close()
1016 1006 for f in (f1, f2):
1017 1007 try:
1018 1008 os.unlink(f)
1019 1009 except OSError:
1020 1010 pass
1021 1011
1022 1012 def endswithsep(path):
1023 1013 '''Check path ends with os.sep or os.altsep.'''
1024 1014 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1025 1015
1026 1016 def splitpath(path):
1027 1017 '''Split path by os.sep.
1028 1018 Note that this function does not use os.altsep because this is
1029 1019 an alternative of simple "xxx.split(os.sep)".
1030 1020 It is recommended to use os.path.normpath() before using this
1031 1021 function if need.'''
1032 1022 return path.split(os.sep)
1033 1023
1034 1024 def gui():
1035 1025 '''Are we running in a GUI?'''
1036 1026 if sys.platform == 'darwin':
1037 1027 if 'SSH_CONNECTION' in os.environ:
1038 1028 # handle SSH access to a box where the user is logged in
1039 1029 return False
1040 1030 elif getattr(osutil, 'isgui', None):
1041 1031 # check if a CoreGraphics session is available
1042 1032 return osutil.isgui()
1043 1033 else:
1044 1034 # pure build; use a safe default
1045 1035 return True
1046 1036 else:
1047 1037 return os.name == "nt" or os.environ.get("DISPLAY")
1048 1038
1049 1039 def mktempcopy(name, emptyok=False, createmode=None):
1050 1040 """Create a temporary file with the same contents from name
1051 1041
1052 1042 The permission bits are copied from the original file.
1053 1043
1054 1044 If the temporary file is going to be truncated immediately, you
1055 1045 can use emptyok=True as an optimization.
1056 1046
1057 1047 Returns the name of the temporary file.
1058 1048 """
1059 1049 d, fn = os.path.split(name)
1060 1050 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1061 1051 os.close(fd)
1062 1052 # Temporary files are created with mode 0600, which is usually not
1063 1053 # what we want. If the original file already exists, just copy
1064 1054 # its mode. Otherwise, manually obey umask.
1065 1055 copymode(name, temp, createmode)
1066 1056 if emptyok:
1067 1057 return temp
1068 1058 try:
1069 1059 try:
1070 1060 ifp = posixfile(name, "rb")
1071 1061 except IOError, inst:
1072 1062 if inst.errno == errno.ENOENT:
1073 1063 return temp
1074 1064 if not getattr(inst, 'filename', None):
1075 1065 inst.filename = name
1076 1066 raise
1077 1067 ofp = posixfile(temp, "wb")
1078 1068 for chunk in filechunkiter(ifp):
1079 1069 ofp.write(chunk)
1080 1070 ifp.close()
1081 1071 ofp.close()
1082 1072 except: # re-raises
1083 1073 try: os.unlink(temp)
1084 1074 except OSError: pass
1085 1075 raise
1086 1076 return temp
1087 1077
1088 1078 class atomictempfile(object):
1089 1079 '''writable file object that atomically updates a file
1090 1080
1091 1081 All writes will go to a temporary copy of the original file. Call
1092 1082 close() when you are done writing, and atomictempfile will rename
1093 1083 the temporary copy to the original name, making the changes
1094 1084 visible. If the object is destroyed without being closed, all your
1095 1085 writes are discarded.
1096 1086 '''
1097 1087 def __init__(self, name, mode='w+b', createmode=None):
1098 1088 self.__name = name # permanent name
1099 1089 self._tempname = mktempcopy(name, emptyok=('w' in mode),
1100 1090 createmode=createmode)
1101 1091 self._fp = posixfile(self._tempname, mode)
1102 1092
1103 1093 # delegated methods
1104 1094 self.write = self._fp.write
1105 1095 self.seek = self._fp.seek
1106 1096 self.tell = self._fp.tell
1107 1097 self.fileno = self._fp.fileno
1108 1098
1109 1099 def close(self):
1110 1100 if not self._fp.closed:
1111 1101 self._fp.close()
1112 1102 rename(self._tempname, localpath(self.__name))
1113 1103
1114 1104 def discard(self):
1115 1105 if not self._fp.closed:
1116 1106 try:
1117 1107 os.unlink(self._tempname)
1118 1108 except OSError:
1119 1109 pass
1120 1110 self._fp.close()
1121 1111
1122 1112 def __del__(self):
1123 1113 if safehasattr(self, '_fp'): # constructor actually did something
1124 1114 self.discard()
1125 1115
1126 1116 def makedirs(name, mode=None, notindexed=False):
1127 1117 """recursive directory creation with parent mode inheritance"""
1128 1118 try:
1129 1119 makedir(name, notindexed)
1130 1120 except OSError, err:
1131 1121 if err.errno == errno.EEXIST:
1132 1122 return
1133 1123 if err.errno != errno.ENOENT or not name:
1134 1124 raise
1135 1125 parent = os.path.dirname(os.path.abspath(name))
1136 1126 if parent == name:
1137 1127 raise
1138 1128 makedirs(parent, mode, notindexed)
1139 1129 makedir(name, notindexed)
1140 1130 if mode is not None:
1141 1131 os.chmod(name, mode)
1142 1132
1143 1133 def ensuredirs(name, mode=None, notindexed=False):
1144 1134 """race-safe recursive directory creation
1145 1135
1146 1136 Newly created directories are marked as "not to be indexed by
1147 1137 the content indexing service", if ``notindexed`` is specified
1148 1138 for "write" mode access.
1149 1139 """
1150 1140 if os.path.isdir(name):
1151 1141 return
1152 1142 parent = os.path.dirname(os.path.abspath(name))
1153 1143 if parent != name:
1154 1144 ensuredirs(parent, mode, notindexed)
1155 1145 try:
1156 1146 makedir(name, notindexed)
1157 1147 except OSError, err:
1158 1148 if err.errno == errno.EEXIST and os.path.isdir(name):
1159 1149 # someone else seems to have won a directory creation race
1160 1150 return
1161 1151 raise
1162 1152 if mode is not None:
1163 1153 os.chmod(name, mode)
1164 1154
1165 1155 def readfile(path):
1166 1156 fp = open(path, 'rb')
1167 1157 try:
1168 1158 return fp.read()
1169 1159 finally:
1170 1160 fp.close()
1171 1161
1172 1162 def writefile(path, text):
1173 1163 fp = open(path, 'wb')
1174 1164 try:
1175 1165 fp.write(text)
1176 1166 finally:
1177 1167 fp.close()
1178 1168
1179 1169 def appendfile(path, text):
1180 1170 fp = open(path, 'ab')
1181 1171 try:
1182 1172 fp.write(text)
1183 1173 finally:
1184 1174 fp.close()
1185 1175
1186 1176 class chunkbuffer(object):
1187 1177 """Allow arbitrary sized chunks of data to be efficiently read from an
1188 1178 iterator over chunks of arbitrary size."""
1189 1179
1190 1180 def __init__(self, in_iter):
1191 1181 """in_iter is the iterator that's iterating over the input chunks.
1192 1182 targetsize is how big a buffer to try to maintain."""
1193 1183 def splitbig(chunks):
1194 1184 for chunk in chunks:
1195 1185 if len(chunk) > 2**20:
1196 1186 pos = 0
1197 1187 while pos < len(chunk):
1198 1188 end = pos + 2 ** 18
1199 1189 yield chunk[pos:end]
1200 1190 pos = end
1201 1191 else:
1202 1192 yield chunk
1203 1193 self.iter = splitbig(in_iter)
1204 1194 self._queue = deque()
1205 1195
1206 1196 def read(self, l=None):
1207 1197 """Read L bytes of data from the iterator of chunks of data.
1208 1198 Returns less than L bytes if the iterator runs dry.
1209 1199
1210 1200 If size parameter is omitted, read everything"""
1211 1201 left = l
1212 1202 buf = []
1213 1203 queue = self._queue
1214 1204 while left is None or left > 0:
1215 1205 # refill the queue
1216 1206 if not queue:
1217 1207 target = 2**18
1218 1208 for chunk in self.iter:
1219 1209 queue.append(chunk)
1220 1210 target -= len(chunk)
1221 1211 if target <= 0:
1222 1212 break
1223 1213 if not queue:
1224 1214 break
1225 1215
1226 1216 chunk = queue.popleft()
1227 1217 if left is not None:
1228 1218 left -= len(chunk)
1229 1219 if left is not None and left < 0:
1230 1220 queue.appendleft(chunk[left:])
1231 1221 buf.append(chunk[:left])
1232 1222 else:
1233 1223 buf.append(chunk)
1234 1224
1235 1225 return ''.join(buf)
1236 1226
1237 1227 def filechunkiter(f, size=65536, limit=None):
1238 1228 """Create a generator that produces the data in the file size
1239 1229 (default 65536) bytes at a time, up to optional limit (default is
1240 1230 to read all data). Chunks may be less than size bytes if the
1241 1231 chunk is the last chunk in the file, or the file is a socket or
1242 1232 some other type of file that sometimes reads less data than is
1243 1233 requested."""
1244 1234 assert size >= 0
1245 1235 assert limit is None or limit >= 0
1246 1236 while True:
1247 1237 if limit is None:
1248 1238 nbytes = size
1249 1239 else:
1250 1240 nbytes = min(limit, size)
1251 1241 s = nbytes and f.read(nbytes)
1252 1242 if not s:
1253 1243 break
1254 1244 if limit:
1255 1245 limit -= len(s)
1256 1246 yield s
1257 1247
1258 1248 def makedate(timestamp=None):
1259 1249 '''Return a unix timestamp (or the current time) as a (unixtime,
1260 1250 offset) tuple based off the local timezone.'''
1261 1251 if timestamp is None:
1262 1252 timestamp = time.time()
1263 1253 if timestamp < 0:
1264 1254 hint = _("check your clock")
1265 1255 raise Abort(_("negative timestamp: %d") % timestamp, hint=hint)
1266 1256 delta = (datetime.datetime.utcfromtimestamp(timestamp) -
1267 1257 datetime.datetime.fromtimestamp(timestamp))
1268 1258 tz = delta.days * 86400 + delta.seconds
1269 1259 return timestamp, tz
1270 1260
1271 1261 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1272 1262 """represent a (unixtime, offset) tuple as a localized time.
1273 1263 unixtime is seconds since the epoch, and offset is the time zone's
1274 1264 number of seconds away from UTC. if timezone is false, do not
1275 1265 append time zone to string."""
1276 1266 t, tz = date or makedate()
1277 1267 if t < 0:
1278 1268 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1279 1269 tz = 0
1280 1270 if "%1" in format or "%2" in format or "%z" in format:
1281 1271 sign = (tz > 0) and "-" or "+"
1282 1272 minutes = abs(tz) // 60
1283 1273 format = format.replace("%z", "%1%2")
1284 1274 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1285 1275 format = format.replace("%2", "%02d" % (minutes % 60))
1286 1276 try:
1287 1277 t = time.gmtime(float(t) - tz)
1288 1278 except ValueError:
1289 1279 # time was out of range
1290 1280 t = time.gmtime(sys.maxint)
1291 1281 s = time.strftime(format, t)
1292 1282 return s
1293 1283
1294 1284 def shortdate(date=None):
1295 1285 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1296 1286 return datestr(date, format='%Y-%m-%d')
1297 1287
1298 1288 def strdate(string, format, defaults=[]):
1299 1289 """parse a localized time string and return a (unixtime, offset) tuple.
1300 1290 if the string cannot be parsed, ValueError is raised."""
1301 1291 def timezone(string):
1302 1292 tz = string.split()[-1]
1303 1293 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1304 1294 sign = (tz[0] == "+") and 1 or -1
1305 1295 hours = int(tz[1:3])
1306 1296 minutes = int(tz[3:5])
1307 1297 return -sign * (hours * 60 + minutes) * 60
1308 1298 if tz == "GMT" or tz == "UTC":
1309 1299 return 0
1310 1300 return None
1311 1301
1312 1302 # NOTE: unixtime = localunixtime + offset
1313 1303 offset, date = timezone(string), string
1314 1304 if offset is not None:
1315 1305 date = " ".join(string.split()[:-1])
1316 1306
1317 1307 # add missing elements from defaults
1318 1308 usenow = False # default to using biased defaults
1319 1309 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1320 1310 found = [True for p in part if ("%"+p) in format]
1321 1311 if not found:
1322 1312 date += "@" + defaults[part][usenow]
1323 1313 format += "@%" + part[0]
1324 1314 else:
1325 1315 # We've found a specific time element, less specific time
1326 1316 # elements are relative to today
1327 1317 usenow = True
1328 1318
1329 1319 timetuple = time.strptime(date, format)
1330 1320 localunixtime = int(calendar.timegm(timetuple))
1331 1321 if offset is None:
1332 1322 # local timezone
1333 1323 unixtime = int(time.mktime(timetuple))
1334 1324 offset = unixtime - localunixtime
1335 1325 else:
1336 1326 unixtime = localunixtime + offset
1337 1327 return unixtime, offset
1338 1328
1339 1329 def parsedate(date, formats=None, bias={}):
1340 1330 """parse a localized date/time and return a (unixtime, offset) tuple.
1341 1331
1342 1332 The date may be a "unixtime offset" string or in one of the specified
1343 1333 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1344 1334
1345 1335 >>> parsedate(' today ') == parsedate(\
1346 1336 datetime.date.today().strftime('%b %d'))
1347 1337 True
1348 1338 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1349 1339 datetime.timedelta(days=1)\
1350 1340 ).strftime('%b %d'))
1351 1341 True
1352 1342 >>> now, tz = makedate()
1353 1343 >>> strnow, strtz = parsedate('now')
1354 1344 >>> (strnow - now) < 1
1355 1345 True
1356 1346 >>> tz == strtz
1357 1347 True
1358 1348 """
1359 1349 if not date:
1360 1350 return 0, 0
1361 1351 if isinstance(date, tuple) and len(date) == 2:
1362 1352 return date
1363 1353 if not formats:
1364 1354 formats = defaultdateformats
1365 1355 date = date.strip()
1366 1356
1367 1357 if date == 'now' or date == _('now'):
1368 1358 return makedate()
1369 1359 if date == 'today' or date == _('today'):
1370 1360 date = datetime.date.today().strftime('%b %d')
1371 1361 elif date == 'yesterday' or date == _('yesterday'):
1372 1362 date = (datetime.date.today() -
1373 1363 datetime.timedelta(days=1)).strftime('%b %d')
1374 1364
1375 1365 try:
1376 1366 when, offset = map(int, date.split(' '))
1377 1367 except ValueError:
1378 1368 # fill out defaults
1379 1369 now = makedate()
1380 1370 defaults = {}
1381 1371 for part in ("d", "mb", "yY", "HI", "M", "S"):
1382 1372 # this piece is for rounding the specific end of unknowns
1383 1373 b = bias.get(part)
1384 1374 if b is None:
1385 1375 if part[0] in "HMS":
1386 1376 b = "00"
1387 1377 else:
1388 1378 b = "0"
1389 1379
1390 1380 # this piece is for matching the generic end to today's date
1391 1381 n = datestr(now, "%" + part[0])
1392 1382
1393 1383 defaults[part] = (b, n)
1394 1384
1395 1385 for format in formats:
1396 1386 try:
1397 1387 when, offset = strdate(date, format, defaults)
1398 1388 except (ValueError, OverflowError):
1399 1389 pass
1400 1390 else:
1401 1391 break
1402 1392 else:
1403 1393 raise Abort(_('invalid date: %r') % date)
1404 1394 # validate explicit (probably user-specified) date and
1405 1395 # time zone offset. values must fit in signed 32 bits for
1406 1396 # current 32-bit linux runtimes. timezones go from UTC-12
1407 1397 # to UTC+14
1408 1398 if abs(when) > 0x7fffffff:
1409 1399 raise Abort(_('date exceeds 32 bits: %d') % when)
1410 1400 if when < 0:
1411 1401 raise Abort(_('negative date value: %d') % when)
1412 1402 if offset < -50400 or offset > 43200:
1413 1403 raise Abort(_('impossible time zone offset: %d') % offset)
1414 1404 return when, offset
1415 1405
1416 1406 def matchdate(date):
1417 1407 """Return a function that matches a given date match specifier
1418 1408
1419 1409 Formats include:
1420 1410
1421 1411 '{date}' match a given date to the accuracy provided
1422 1412
1423 1413 '<{date}' on or before a given date
1424 1414
1425 1415 '>{date}' on or after a given date
1426 1416
1427 1417 >>> p1 = parsedate("10:29:59")
1428 1418 >>> p2 = parsedate("10:30:00")
1429 1419 >>> p3 = parsedate("10:30:59")
1430 1420 >>> p4 = parsedate("10:31:00")
1431 1421 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1432 1422 >>> f = matchdate("10:30")
1433 1423 >>> f(p1[0])
1434 1424 False
1435 1425 >>> f(p2[0])
1436 1426 True
1437 1427 >>> f(p3[0])
1438 1428 True
1439 1429 >>> f(p4[0])
1440 1430 False
1441 1431 >>> f(p5[0])
1442 1432 False
1443 1433 """
1444 1434
1445 1435 def lower(date):
1446 1436 d = {'mb': "1", 'd': "1"}
1447 1437 return parsedate(date, extendeddateformats, d)[0]
1448 1438
1449 1439 def upper(date):
1450 1440 d = {'mb': "12", 'HI': "23", 'M': "59", 'S': "59"}
1451 1441 for days in ("31", "30", "29"):
1452 1442 try:
1453 1443 d["d"] = days
1454 1444 return parsedate(date, extendeddateformats, d)[0]
1455 1445 except Abort:
1456 1446 pass
1457 1447 d["d"] = "28"
1458 1448 return parsedate(date, extendeddateformats, d)[0]
1459 1449
1460 1450 date = date.strip()
1461 1451
1462 1452 if not date:
1463 1453 raise Abort(_("dates cannot consist entirely of whitespace"))
1464 1454 elif date[0] == "<":
1465 1455 if not date[1:]:
1466 1456 raise Abort(_("invalid day spec, use '<DATE'"))
1467 1457 when = upper(date[1:])
1468 1458 return lambda x: x <= when
1469 1459 elif date[0] == ">":
1470 1460 if not date[1:]:
1471 1461 raise Abort(_("invalid day spec, use '>DATE'"))
1472 1462 when = lower(date[1:])
1473 1463 return lambda x: x >= when
1474 1464 elif date[0] == "-":
1475 1465 try:
1476 1466 days = int(date[1:])
1477 1467 except ValueError:
1478 1468 raise Abort(_("invalid day spec: %s") % date[1:])
1479 1469 if days < 0:
1480 1470 raise Abort(_('%s must be nonnegative (see "hg help dates")')
1481 1471 % date[1:])
1482 1472 when = makedate()[0] - days * 3600 * 24
1483 1473 return lambda x: x >= when
1484 1474 elif " to " in date:
1485 1475 a, b = date.split(" to ")
1486 1476 start, stop = lower(a), upper(b)
1487 1477 return lambda x: x >= start and x <= stop
1488 1478 else:
1489 1479 start, stop = lower(date), upper(date)
1490 1480 return lambda x: x >= start and x <= stop
1491 1481
1492 1482 def shortuser(user):
1493 1483 """Return a short representation of a user name or email address."""
1494 1484 f = user.find('@')
1495 1485 if f >= 0:
1496 1486 user = user[:f]
1497 1487 f = user.find('<')
1498 1488 if f >= 0:
1499 1489 user = user[f + 1:]
1500 1490 f = user.find(' ')
1501 1491 if f >= 0:
1502 1492 user = user[:f]
1503 1493 f = user.find('.')
1504 1494 if f >= 0:
1505 1495 user = user[:f]
1506 1496 return user
1507 1497
1508 1498 def emailuser(user):
1509 1499 """Return the user portion of an email address."""
1510 1500 f = user.find('@')
1511 1501 if f >= 0:
1512 1502 user = user[:f]
1513 1503 f = user.find('<')
1514 1504 if f >= 0:
1515 1505 user = user[f + 1:]
1516 1506 return user
1517 1507
1518 1508 def email(author):
1519 1509 '''get email of author.'''
1520 1510 r = author.find('>')
1521 1511 if r == -1:
1522 1512 r = None
1523 1513 return author[author.find('<') + 1:r]
1524 1514
1525 1515 def ellipsis(text, maxlength=400):
1526 1516 """Trim string to at most maxlength (default: 400) columns in display."""
1527 1517 return encoding.trim(text, maxlength, ellipsis='...')
1528 1518
1529 1519 def unitcountfn(*unittable):
1530 1520 '''return a function that renders a readable count of some quantity'''
1531 1521
1532 1522 def go(count):
1533 1523 for multiplier, divisor, format in unittable:
1534 1524 if count >= divisor * multiplier:
1535 1525 return format % (count / float(divisor))
1536 1526 return unittable[-1][2] % count
1537 1527
1538 1528 return go
1539 1529
1540 1530 bytecount = unitcountfn(
1541 1531 (100, 1 << 30, _('%.0f GB')),
1542 1532 (10, 1 << 30, _('%.1f GB')),
1543 1533 (1, 1 << 30, _('%.2f GB')),
1544 1534 (100, 1 << 20, _('%.0f MB')),
1545 1535 (10, 1 << 20, _('%.1f MB')),
1546 1536 (1, 1 << 20, _('%.2f MB')),
1547 1537 (100, 1 << 10, _('%.0f KB')),
1548 1538 (10, 1 << 10, _('%.1f KB')),
1549 1539 (1, 1 << 10, _('%.2f KB')),
1550 1540 (1, 1, _('%.0f bytes')),
1551 1541 )
1552 1542
1553 1543 def uirepr(s):
1554 1544 # Avoid double backslash in Windows path repr()
1555 1545 return repr(s).replace('\\\\', '\\')
1556 1546
1557 1547 # delay import of textwrap
1558 1548 def MBTextWrapper(**kwargs):
1559 1549 class tw(textwrap.TextWrapper):
1560 1550 """
1561 1551 Extend TextWrapper for width-awareness.
1562 1552
1563 1553 Neither number of 'bytes' in any encoding nor 'characters' is
1564 1554 appropriate to calculate terminal columns for specified string.
1565 1555
1566 1556 Original TextWrapper implementation uses built-in 'len()' directly,
1567 1557 so overriding is needed to use width information of each characters.
1568 1558
1569 1559 In addition, characters classified into 'ambiguous' width are
1570 1560 treated as wide in East Asian area, but as narrow in other.
1571 1561
1572 1562 This requires use decision to determine width of such characters.
1573 1563 """
1574 1564 def __init__(self, **kwargs):
1575 1565 textwrap.TextWrapper.__init__(self, **kwargs)
1576 1566
1577 1567 # for compatibility between 2.4 and 2.6
1578 1568 if getattr(self, 'drop_whitespace', None) is None:
1579 1569 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1580 1570
1581 1571 def _cutdown(self, ucstr, space_left):
1582 1572 l = 0
1583 1573 colwidth = encoding.ucolwidth
1584 1574 for i in xrange(len(ucstr)):
1585 1575 l += colwidth(ucstr[i])
1586 1576 if space_left < l:
1587 1577 return (ucstr[:i], ucstr[i:])
1588 1578 return ucstr, ''
1589 1579
1590 1580 # overriding of base class
1591 1581 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1592 1582 space_left = max(width - cur_len, 1)
1593 1583
1594 1584 if self.break_long_words:
1595 1585 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1596 1586 cur_line.append(cut)
1597 1587 reversed_chunks[-1] = res
1598 1588 elif not cur_line:
1599 1589 cur_line.append(reversed_chunks.pop())
1600 1590
1601 1591 # this overriding code is imported from TextWrapper of python 2.6
1602 1592 # to calculate columns of string by 'encoding.ucolwidth()'
1603 1593 def _wrap_chunks(self, chunks):
1604 1594 colwidth = encoding.ucolwidth
1605 1595
1606 1596 lines = []
1607 1597 if self.width <= 0:
1608 1598 raise ValueError("invalid width %r (must be > 0)" % self.width)
1609 1599
1610 1600 # Arrange in reverse order so items can be efficiently popped
1611 1601 # from a stack of chucks.
1612 1602 chunks.reverse()
1613 1603
1614 1604 while chunks:
1615 1605
1616 1606 # Start the list of chunks that will make up the current line.
1617 1607 # cur_len is just the length of all the chunks in cur_line.
1618 1608 cur_line = []
1619 1609 cur_len = 0
1620 1610
1621 1611 # Figure out which static string will prefix this line.
1622 1612 if lines:
1623 1613 indent = self.subsequent_indent
1624 1614 else:
1625 1615 indent = self.initial_indent
1626 1616
1627 1617 # Maximum width for this line.
1628 1618 width = self.width - len(indent)
1629 1619
1630 1620 # First chunk on line is whitespace -- drop it, unless this
1631 1621 # is the very beginning of the text (i.e. no lines started yet).
1632 1622 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1633 1623 del chunks[-1]
1634 1624
1635 1625 while chunks:
1636 1626 l = colwidth(chunks[-1])
1637 1627
1638 1628 # Can at least squeeze this chunk onto the current line.
1639 1629 if cur_len + l <= width:
1640 1630 cur_line.append(chunks.pop())
1641 1631 cur_len += l
1642 1632
1643 1633 # Nope, this line is full.
1644 1634 else:
1645 1635 break
1646 1636
1647 1637 # The current line is full, and the next chunk is too big to
1648 1638 # fit on *any* line (not just this one).
1649 1639 if chunks and colwidth(chunks[-1]) > width:
1650 1640 self._handle_long_word(chunks, cur_line, cur_len, width)
1651 1641
1652 1642 # If the last chunk on this line is all whitespace, drop it.
1653 1643 if (self.drop_whitespace and
1654 1644 cur_line and cur_line[-1].strip() == ''):
1655 1645 del cur_line[-1]
1656 1646
1657 1647 # Convert current line back to a string and store it in list
1658 1648 # of all lines (return value).
1659 1649 if cur_line:
1660 1650 lines.append(indent + ''.join(cur_line))
1661 1651
1662 1652 return lines
1663 1653
1664 1654 global MBTextWrapper
1665 1655 MBTextWrapper = tw
1666 1656 return tw(**kwargs)
1667 1657
1668 1658 def wrap(line, width, initindent='', hangindent=''):
1669 1659 maxindent = max(len(hangindent), len(initindent))
1670 1660 if width <= maxindent:
1671 1661 # adjust for weird terminal size
1672 1662 width = max(78, maxindent + 1)
1673 1663 line = line.decode(encoding.encoding, encoding.encodingmode)
1674 1664 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1675 1665 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1676 1666 wrapper = MBTextWrapper(width=width,
1677 1667 initial_indent=initindent,
1678 1668 subsequent_indent=hangindent)
1679 1669 return wrapper.fill(line).encode(encoding.encoding)
1680 1670
1681 1671 def iterlines(iterator):
1682 1672 for chunk in iterator:
1683 1673 for line in chunk.splitlines():
1684 1674 yield line
1685 1675
1686 1676 def expandpath(path):
1687 1677 return os.path.expanduser(os.path.expandvars(path))
1688 1678
1689 1679 def hgcmd():
1690 1680 """Return the command used to execute current hg
1691 1681
1692 1682 This is different from hgexecutable() because on Windows we want
1693 1683 to avoid things opening new shell windows like batch files, so we
1694 1684 get either the python call or current executable.
1695 1685 """
1696 1686 if mainfrozen():
1697 1687 return [sys.executable]
1698 1688 return gethgcmd()
1699 1689
1700 1690 def rundetached(args, condfn):
1701 1691 """Execute the argument list in a detached process.
1702 1692
1703 1693 condfn is a callable which is called repeatedly and should return
1704 1694 True once the child process is known to have started successfully.
1705 1695 At this point, the child process PID is returned. If the child
1706 1696 process fails to start or finishes before condfn() evaluates to
1707 1697 True, return -1.
1708 1698 """
1709 1699 # Windows case is easier because the child process is either
1710 1700 # successfully starting and validating the condition or exiting
1711 1701 # on failure. We just poll on its PID. On Unix, if the child
1712 1702 # process fails to start, it will be left in a zombie state until
1713 1703 # the parent wait on it, which we cannot do since we expect a long
1714 1704 # running process on success. Instead we listen for SIGCHLD telling
1715 1705 # us our child process terminated.
1716 1706 terminated = set()
1717 1707 def handler(signum, frame):
1718 1708 terminated.add(os.wait())
1719 1709 prevhandler = None
1720 1710 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1721 1711 if SIGCHLD is not None:
1722 1712 prevhandler = signal.signal(SIGCHLD, handler)
1723 1713 try:
1724 1714 pid = spawndetached(args)
1725 1715 while not condfn():
1726 1716 if ((pid in terminated or not testpid(pid))
1727 1717 and not condfn()):
1728 1718 return -1
1729 1719 time.sleep(0.1)
1730 1720 return pid
1731 1721 finally:
1732 1722 if prevhandler is not None:
1733 1723 signal.signal(signal.SIGCHLD, prevhandler)
1734 1724
1735 1725 try:
1736 1726 any, all = any, all
1737 1727 except NameError:
1738 1728 def any(iterable):
1739 1729 for i in iterable:
1740 1730 if i:
1741 1731 return True
1742 1732 return False
1743 1733
1744 1734 def all(iterable):
1745 1735 for i in iterable:
1746 1736 if not i:
1747 1737 return False
1748 1738 return True
1749 1739
1750 1740 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1751 1741 """Return the result of interpolating items in the mapping into string s.
1752 1742
1753 1743 prefix is a single character string, or a two character string with
1754 1744 a backslash as the first character if the prefix needs to be escaped in
1755 1745 a regular expression.
1756 1746
1757 1747 fn is an optional function that will be applied to the replacement text
1758 1748 just before replacement.
1759 1749
1760 1750 escape_prefix is an optional flag that allows using doubled prefix for
1761 1751 its escaping.
1762 1752 """
1763 1753 fn = fn or (lambda s: s)
1764 1754 patterns = '|'.join(mapping.keys())
1765 1755 if escape_prefix:
1766 1756 patterns += '|' + prefix
1767 1757 if len(prefix) > 1:
1768 1758 prefix_char = prefix[1:]
1769 1759 else:
1770 1760 prefix_char = prefix
1771 1761 mapping[prefix_char] = prefix_char
1772 1762 r = remod.compile(r'%s(%s)' % (prefix, patterns))
1773 1763 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1774 1764
1775 1765 def getport(port):
1776 1766 """Return the port for a given network service.
1777 1767
1778 1768 If port is an integer, it's returned as is. If it's a string, it's
1779 1769 looked up using socket.getservbyname(). If there's no matching
1780 1770 service, util.Abort is raised.
1781 1771 """
1782 1772 try:
1783 1773 return int(port)
1784 1774 except ValueError:
1785 1775 pass
1786 1776
1787 1777 try:
1788 1778 return socket.getservbyname(port)
1789 1779 except socket.error:
1790 1780 raise Abort(_("no port number associated with service '%s'") % port)
1791 1781
1792 1782 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1793 1783 '0': False, 'no': False, 'false': False, 'off': False,
1794 1784 'never': False}
1795 1785
1796 1786 def parsebool(s):
1797 1787 """Parse s into a boolean.
1798 1788
1799 1789 If s is not a valid boolean, returns None.
1800 1790 """
1801 1791 return _booleans.get(s.lower(), None)
1802 1792
1803 1793 _hexdig = '0123456789ABCDEFabcdef'
1804 1794 _hextochr = dict((a + b, chr(int(a + b, 16)))
1805 1795 for a in _hexdig for b in _hexdig)
1806 1796
1807 1797 def _urlunquote(s):
1808 1798 """Decode HTTP/HTML % encoding.
1809 1799
1810 1800 >>> _urlunquote('abc%20def')
1811 1801 'abc def'
1812 1802 """
1813 1803 res = s.split('%')
1814 1804 # fastpath
1815 1805 if len(res) == 1:
1816 1806 return s
1817 1807 s = res[0]
1818 1808 for item in res[1:]:
1819 1809 try:
1820 1810 s += _hextochr[item[:2]] + item[2:]
1821 1811 except KeyError:
1822 1812 s += '%' + item
1823 1813 except UnicodeDecodeError:
1824 1814 s += unichr(int(item[:2], 16)) + item[2:]
1825 1815 return s
1826 1816
1827 1817 class url(object):
1828 1818 r"""Reliable URL parser.
1829 1819
1830 1820 This parses URLs and provides attributes for the following
1831 1821 components:
1832 1822
1833 1823 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1834 1824
1835 1825 Missing components are set to None. The only exception is
1836 1826 fragment, which is set to '' if present but empty.
1837 1827
1838 1828 If parsefragment is False, fragment is included in query. If
1839 1829 parsequery is False, query is included in path. If both are
1840 1830 False, both fragment and query are included in path.
1841 1831
1842 1832 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1843 1833
1844 1834 Note that for backward compatibility reasons, bundle URLs do not
1845 1835 take host names. That means 'bundle://../' has a path of '../'.
1846 1836
1847 1837 Examples:
1848 1838
1849 1839 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1850 1840 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1851 1841 >>> url('ssh://[::1]:2200//home/joe/repo')
1852 1842 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1853 1843 >>> url('file:///home/joe/repo')
1854 1844 <url scheme: 'file', path: '/home/joe/repo'>
1855 1845 >>> url('file:///c:/temp/foo/')
1856 1846 <url scheme: 'file', path: 'c:/temp/foo/'>
1857 1847 >>> url('bundle:foo')
1858 1848 <url scheme: 'bundle', path: 'foo'>
1859 1849 >>> url('bundle://../foo')
1860 1850 <url scheme: 'bundle', path: '../foo'>
1861 1851 >>> url(r'c:\foo\bar')
1862 1852 <url path: 'c:\\foo\\bar'>
1863 1853 >>> url(r'\\blah\blah\blah')
1864 1854 <url path: '\\\\blah\\blah\\blah'>
1865 1855 >>> url(r'\\blah\blah\blah#baz')
1866 1856 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1867 1857 >>> url(r'file:///C:\users\me')
1868 1858 <url scheme: 'file', path: 'C:\\users\\me'>
1869 1859
1870 1860 Authentication credentials:
1871 1861
1872 1862 >>> url('ssh://joe:xyz@x/repo')
1873 1863 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1874 1864 >>> url('ssh://joe@x/repo')
1875 1865 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1876 1866
1877 1867 Query strings and fragments:
1878 1868
1879 1869 >>> url('http://host/a?b#c')
1880 1870 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1881 1871 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1882 1872 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1883 1873 """
1884 1874
1885 1875 _safechars = "!~*'()+"
1886 1876 _safepchars = "/!~*'()+:\\"
1887 1877 _matchscheme = remod.compile(r'^[a-zA-Z0-9+.\-]+:').match
1888 1878
1889 1879 def __init__(self, path, parsequery=True, parsefragment=True):
1890 1880 # We slowly chomp away at path until we have only the path left
1891 1881 self.scheme = self.user = self.passwd = self.host = None
1892 1882 self.port = self.path = self.query = self.fragment = None
1893 1883 self._localpath = True
1894 1884 self._hostport = ''
1895 1885 self._origpath = path
1896 1886
1897 1887 if parsefragment and '#' in path:
1898 1888 path, self.fragment = path.split('#', 1)
1899 1889 if not path:
1900 1890 path = None
1901 1891
1902 1892 # special case for Windows drive letters and UNC paths
1903 1893 if hasdriveletter(path) or path.startswith(r'\\'):
1904 1894 self.path = path
1905 1895 return
1906 1896
1907 1897 # For compatibility reasons, we can't handle bundle paths as
1908 1898 # normal URLS
1909 1899 if path.startswith('bundle:'):
1910 1900 self.scheme = 'bundle'
1911 1901 path = path[7:]
1912 1902 if path.startswith('//'):
1913 1903 path = path[2:]
1914 1904 self.path = path
1915 1905 return
1916 1906
1917 1907 if self._matchscheme(path):
1918 1908 parts = path.split(':', 1)
1919 1909 if parts[0]:
1920 1910 self.scheme, path = parts
1921 1911 self._localpath = False
1922 1912
1923 1913 if not path:
1924 1914 path = None
1925 1915 if self._localpath:
1926 1916 self.path = ''
1927 1917 return
1928 1918 else:
1929 1919 if self._localpath:
1930 1920 self.path = path
1931 1921 return
1932 1922
1933 1923 if parsequery and '?' in path:
1934 1924 path, self.query = path.split('?', 1)
1935 1925 if not path:
1936 1926 path = None
1937 1927 if not self.query:
1938 1928 self.query = None
1939 1929
1940 1930 # // is required to specify a host/authority
1941 1931 if path and path.startswith('//'):
1942 1932 parts = path[2:].split('/', 1)
1943 1933 if len(parts) > 1:
1944 1934 self.host, path = parts
1945 1935 else:
1946 1936 self.host = parts[0]
1947 1937 path = None
1948 1938 if not self.host:
1949 1939 self.host = None
1950 1940 # path of file:///d is /d
1951 1941 # path of file:///d:/ is d:/, not /d:/
1952 1942 if path and not hasdriveletter(path):
1953 1943 path = '/' + path
1954 1944
1955 1945 if self.host and '@' in self.host:
1956 1946 self.user, self.host = self.host.rsplit('@', 1)
1957 1947 if ':' in self.user:
1958 1948 self.user, self.passwd = self.user.split(':', 1)
1959 1949 if not self.host:
1960 1950 self.host = None
1961 1951
1962 1952 # Don't split on colons in IPv6 addresses without ports
1963 1953 if (self.host and ':' in self.host and
1964 1954 not (self.host.startswith('[') and self.host.endswith(']'))):
1965 1955 self._hostport = self.host
1966 1956 self.host, self.port = self.host.rsplit(':', 1)
1967 1957 if not self.host:
1968 1958 self.host = None
1969 1959
1970 1960 if (self.host and self.scheme == 'file' and
1971 1961 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1972 1962 raise Abort(_('file:// URLs can only refer to localhost'))
1973 1963
1974 1964 self.path = path
1975 1965
1976 1966 # leave the query string escaped
1977 1967 for a in ('user', 'passwd', 'host', 'port',
1978 1968 'path', 'fragment'):
1979 1969 v = getattr(self, a)
1980 1970 if v is not None:
1981 1971 setattr(self, a, _urlunquote(v))
1982 1972
1983 1973 def __repr__(self):
1984 1974 attrs = []
1985 1975 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1986 1976 'query', 'fragment'):
1987 1977 v = getattr(self, a)
1988 1978 if v is not None:
1989 1979 attrs.append('%s: %r' % (a, v))
1990 1980 return '<url %s>' % ', '.join(attrs)
1991 1981
1992 1982 def __str__(self):
1993 1983 r"""Join the URL's components back into a URL string.
1994 1984
1995 1985 Examples:
1996 1986
1997 1987 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1998 1988 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1999 1989 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
2000 1990 'http://user:pw@host:80/?foo=bar&baz=42'
2001 1991 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
2002 1992 'http://user:pw@host:80/?foo=bar%3dbaz'
2003 1993 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
2004 1994 'ssh://user:pw@[::1]:2200//home/joe#'
2005 1995 >>> str(url('http://localhost:80//'))
2006 1996 'http://localhost:80//'
2007 1997 >>> str(url('http://localhost:80/'))
2008 1998 'http://localhost:80/'
2009 1999 >>> str(url('http://localhost:80'))
2010 2000 'http://localhost:80/'
2011 2001 >>> str(url('bundle:foo'))
2012 2002 'bundle:foo'
2013 2003 >>> str(url('bundle://../foo'))
2014 2004 'bundle:../foo'
2015 2005 >>> str(url('path'))
2016 2006 'path'
2017 2007 >>> str(url('file:///tmp/foo/bar'))
2018 2008 'file:///tmp/foo/bar'
2019 2009 >>> str(url('file:///c:/tmp/foo/bar'))
2020 2010 'file:///c:/tmp/foo/bar'
2021 2011 >>> print url(r'bundle:foo\bar')
2022 2012 bundle:foo\bar
2023 2013 >>> print url(r'file:///D:\data\hg')
2024 2014 file:///D:\data\hg
2025 2015 """
2026 2016 if self._localpath:
2027 2017 s = self.path
2028 2018 if self.scheme == 'bundle':
2029 2019 s = 'bundle:' + s
2030 2020 if self.fragment:
2031 2021 s += '#' + self.fragment
2032 2022 return s
2033 2023
2034 2024 s = self.scheme + ':'
2035 2025 if self.user or self.passwd or self.host:
2036 2026 s += '//'
2037 2027 elif self.scheme and (not self.path or self.path.startswith('/')
2038 2028 or hasdriveletter(self.path)):
2039 2029 s += '//'
2040 2030 if hasdriveletter(self.path):
2041 2031 s += '/'
2042 2032 if self.user:
2043 2033 s += urllib.quote(self.user, safe=self._safechars)
2044 2034 if self.passwd:
2045 2035 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
2046 2036 if self.user or self.passwd:
2047 2037 s += '@'
2048 2038 if self.host:
2049 2039 if not (self.host.startswith('[') and self.host.endswith(']')):
2050 2040 s += urllib.quote(self.host)
2051 2041 else:
2052 2042 s += self.host
2053 2043 if self.port:
2054 2044 s += ':' + urllib.quote(self.port)
2055 2045 if self.host:
2056 2046 s += '/'
2057 2047 if self.path:
2058 2048 # TODO: similar to the query string, we should not unescape the
2059 2049 # path when we store it, the path might contain '%2f' = '/',
2060 2050 # which we should *not* escape.
2061 2051 s += urllib.quote(self.path, safe=self._safepchars)
2062 2052 if self.query:
2063 2053 # we store the query in escaped form.
2064 2054 s += '?' + self.query
2065 2055 if self.fragment is not None:
2066 2056 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
2067 2057 return s
2068 2058
2069 2059 def authinfo(self):
2070 2060 user, passwd = self.user, self.passwd
2071 2061 try:
2072 2062 self.user, self.passwd = None, None
2073 2063 s = str(self)
2074 2064 finally:
2075 2065 self.user, self.passwd = user, passwd
2076 2066 if not self.user:
2077 2067 return (s, None)
2078 2068 # authinfo[1] is passed to urllib2 password manager, and its
2079 2069 # URIs must not contain credentials. The host is passed in the
2080 2070 # URIs list because Python < 2.4.3 uses only that to search for
2081 2071 # a password.
2082 2072 return (s, (None, (s, self.host),
2083 2073 self.user, self.passwd or ''))
2084 2074
2085 2075 def isabs(self):
2086 2076 if self.scheme and self.scheme != 'file':
2087 2077 return True # remote URL
2088 2078 if hasdriveletter(self.path):
2089 2079 return True # absolute for our purposes - can't be joined()
2090 2080 if self.path.startswith(r'\\'):
2091 2081 return True # Windows UNC path
2092 2082 if self.path.startswith('/'):
2093 2083 return True # POSIX-style
2094 2084 return False
2095 2085
2096 2086 def localpath(self):
2097 2087 if self.scheme == 'file' or self.scheme == 'bundle':
2098 2088 path = self.path or '/'
2099 2089 # For Windows, we need to promote hosts containing drive
2100 2090 # letters to paths with drive letters.
2101 2091 if hasdriveletter(self._hostport):
2102 2092 path = self._hostport + '/' + self.path
2103 2093 elif (self.host is not None and self.path
2104 2094 and not hasdriveletter(path)):
2105 2095 path = '/' + path
2106 2096 return path
2107 2097 return self._origpath
2108 2098
2109 2099 def islocal(self):
2110 2100 '''whether localpath will return something that posixfile can open'''
2111 2101 return (not self.scheme or self.scheme == 'file'
2112 2102 or self.scheme == 'bundle')
2113 2103
2114 2104 def hasscheme(path):
2115 2105 return bool(url(path).scheme)
2116 2106
2117 2107 def hasdriveletter(path):
2118 2108 return path and path[1:2] == ':' and path[0:1].isalpha()
2119 2109
2120 2110 def urllocalpath(path):
2121 2111 return url(path, parsequery=False, parsefragment=False).localpath()
2122 2112
2123 2113 def hidepassword(u):
2124 2114 '''hide user credential in a url string'''
2125 2115 u = url(u)
2126 2116 if u.passwd:
2127 2117 u.passwd = '***'
2128 2118 return str(u)
2129 2119
2130 2120 def removeauth(u):
2131 2121 '''remove all authentication information from a url string'''
2132 2122 u = url(u)
2133 2123 u.user = u.passwd = None
2134 2124 return str(u)
2135 2125
2136 2126 def isatty(fd):
2137 2127 try:
2138 2128 return fd.isatty()
2139 2129 except AttributeError:
2140 2130 return False
2141 2131
2142 2132 timecount = unitcountfn(
2143 2133 (1, 1e3, _('%.0f s')),
2144 2134 (100, 1, _('%.1f s')),
2145 2135 (10, 1, _('%.2f s')),
2146 2136 (1, 1, _('%.3f s')),
2147 2137 (100, 0.001, _('%.1f ms')),
2148 2138 (10, 0.001, _('%.2f ms')),
2149 2139 (1, 0.001, _('%.3f ms')),
2150 2140 (100, 0.000001, _('%.1f us')),
2151 2141 (10, 0.000001, _('%.2f us')),
2152 2142 (1, 0.000001, _('%.3f us')),
2153 2143 (100, 0.000000001, _('%.1f ns')),
2154 2144 (10, 0.000000001, _('%.2f ns')),
2155 2145 (1, 0.000000001, _('%.3f ns')),
2156 2146 )
2157 2147
2158 2148 _timenesting = [0]
2159 2149
2160 2150 def timed(func):
2161 2151 '''Report the execution time of a function call to stderr.
2162 2152
2163 2153 During development, use as a decorator when you need to measure
2164 2154 the cost of a function, e.g. as follows:
2165 2155
2166 2156 @util.timed
2167 2157 def foo(a, b, c):
2168 2158 pass
2169 2159 '''
2170 2160
2171 2161 def wrapper(*args, **kwargs):
2172 2162 start = time.time()
2173 2163 indent = 2
2174 2164 _timenesting[0] += indent
2175 2165 try:
2176 2166 return func(*args, **kwargs)
2177 2167 finally:
2178 2168 elapsed = time.time() - start
2179 2169 _timenesting[0] -= indent
2180 2170 sys.stderr.write('%s%s: %s\n' %
2181 2171 (' ' * _timenesting[0], func.__name__,
2182 2172 timecount(elapsed)))
2183 2173 return wrapper
2184 2174
2185 2175 _sizeunits = (('m', 2**20), ('k', 2**10), ('g', 2**30),
2186 2176 ('kb', 2**10), ('mb', 2**20), ('gb', 2**30), ('b', 1))
2187 2177
2188 2178 def sizetoint(s):
2189 2179 '''Convert a space specifier to a byte count.
2190 2180
2191 2181 >>> sizetoint('30')
2192 2182 30
2193 2183 >>> sizetoint('2.2kb')
2194 2184 2252
2195 2185 >>> sizetoint('6M')
2196 2186 6291456
2197 2187 '''
2198 2188 t = s.strip().lower()
2199 2189 try:
2200 2190 for k, u in _sizeunits:
2201 2191 if t.endswith(k):
2202 2192 return int(float(t[:-len(k)]) * u)
2203 2193 return int(t)
2204 2194 except ValueError:
2205 2195 raise error.ParseError(_("couldn't parse size: %s") % s)
2206 2196
2207 2197 class hooks(object):
2208 2198 '''A collection of hook functions that can be used to extend a
2209 2199 function's behaviour. Hooks are called in lexicographic order,
2210 2200 based on the names of their sources.'''
2211 2201
2212 2202 def __init__(self):
2213 2203 self._hooks = []
2214 2204
2215 2205 def add(self, source, hook):
2216 2206 self._hooks.append((source, hook))
2217 2207
2218 2208 def __call__(self, *args):
2219 2209 self._hooks.sort(key=lambda x: x[0])
2220 2210 results = []
2221 2211 for source, hook in self._hooks:
2222 2212 results.append(hook(*args))
2223 2213 return results
2224 2214
2225 2215 def debugstacktrace(msg='stacktrace', skip=0, f=sys.stderr, otherf=sys.stdout):
2226 2216 '''Writes a message to f (stderr) with a nicely formatted stacktrace.
2227 2217 Skips the 'skip' last entries. By default it will flush stdout first.
2228 2218 It can be used everywhere and do intentionally not require an ui object.
2229 2219 Not be used in production code but very convenient while developing.
2230 2220 '''
2231 2221 if otherf:
2232 2222 otherf.flush()
2233 2223 f.write('%s at:\n' % msg)
2234 2224 entries = [('%s:%s' % (fn, ln), func)
2235 2225 for fn, ln, func, _text in traceback.extract_stack()[:-skip - 1]]
2236 2226 if entries:
2237 2227 fnmax = max(len(entry[0]) for entry in entries)
2238 2228 for fnln, func in entries:
2239 2229 f.write(' %-*s in %s\n' % (fnmax, fnln, func))
2240 2230 f.flush()
2241 2231
2242 2232 class dirs(object):
2243 2233 '''a multiset of directory names from a dirstate or manifest'''
2244 2234
2245 2235 def __init__(self, map, skip=None):
2246 2236 self._dirs = {}
2247 2237 addpath = self.addpath
2248 2238 if safehasattr(map, 'iteritems') and skip is not None:
2249 2239 for f, s in map.iteritems():
2250 2240 if s[0] != skip:
2251 2241 addpath(f)
2252 2242 else:
2253 2243 for f in map:
2254 2244 addpath(f)
2255 2245
2256 2246 def addpath(self, path):
2257 2247 dirs = self._dirs
2258 2248 for base in finddirs(path):
2259 2249 if base in dirs:
2260 2250 dirs[base] += 1
2261 2251 return
2262 2252 dirs[base] = 1
2263 2253
2264 2254 def delpath(self, path):
2265 2255 dirs = self._dirs
2266 2256 for base in finddirs(path):
2267 2257 if dirs[base] > 1:
2268 2258 dirs[base] -= 1
2269 2259 return
2270 2260 del dirs[base]
2271 2261
2272 2262 def __iter__(self):
2273 2263 return self._dirs.iterkeys()
2274 2264
2275 2265 def __contains__(self, d):
2276 2266 return d in self._dirs
2277 2267
2278 2268 if safehasattr(parsers, 'dirs'):
2279 2269 dirs = parsers.dirs
2280 2270
2281 2271 def finddirs(path):
2282 2272 pos = path.rfind('/')
2283 2273 while pos != -1:
2284 2274 yield path[:pos]
2285 2275 pos = path.rfind('/', 0, pos)
2286 2276
2287 2277 # convenient shortcut
2288 2278 dst = debugstacktrace
General Comments 0
You need to be logged in to leave comments. Login now