##// END OF EJS Templates
util: remove warnings when importing md5 and sha
Sune Foldager -
r8295:1ea7e7d9 default
parent child Browse files
Show More
@@ -1,1483 +1,1481
1 1 # util.py - Mercurial utility functions and platform specfic implementations
2 2 #
3 3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 6 #
7 7 # This software may be used and distributed according to the terms of the
8 8 # GNU General Public License version 2, incorporated herein by reference.
9 9
10 10 """Mercurial utility functions and platform specfic implementations.
11 11
12 12 This contains helper routines that are independent of the SCM core and
13 13 hide platform-specific details from the core.
14 14 """
15 15
16 16 from i18n import _
17 17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
18 18 import os, stat, threading, time, calendar, glob, osutil, random
19 19 import imp
20 20
21 21 # Python compatibility
22 22
23 23 def md5(s):
24 24 try:
25 25 import hashlib
26 26 _md5 = hashlib.md5
27 27 except ImportError:
28 import md5
29 _md5 = md5.md5
28 from md5 import md5 as _md5
30 29 global md5
31 30 md5 = _md5
32 31 return _md5(s)
33 32
34 33 def sha1(s):
35 34 try:
36 35 import hashlib
37 36 _sha1 = hashlib.sha1
38 37 except ImportError:
39 import sha
40 _sha1 = sha.sha
38 from sha import sha as _sha1
41 39 global sha1
42 40 sha1 = _sha1
43 41 return _sha1(s)
44 42
45 43 import subprocess
46 44 closefds = os.name == 'posix'
47 45 def popen2(cmd, mode='t', bufsize=-1):
48 46 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
49 47 close_fds=closefds,
50 48 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
51 49 return p.stdin, p.stdout
52 50 def popen3(cmd, mode='t', bufsize=-1):
53 51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
54 52 close_fds=closefds,
55 53 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
56 54 stderr=subprocess.PIPE)
57 55 return p.stdin, p.stdout, p.stderr
58 56 def Popen3(cmd, capturestderr=False, bufsize=-1):
59 57 stderr = capturestderr and subprocess.PIPE or None
60 58 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
61 59 close_fds=closefds,
62 60 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
63 61 stderr=stderr)
64 62 p.fromchild = p.stdout
65 63 p.tochild = p.stdin
66 64 p.childerr = p.stderr
67 65 return p
68 66
69 67 def version():
70 68 """Return version information if available."""
71 69 try:
72 70 import __version__
73 71 return __version__.version
74 72 except ImportError:
75 73 return 'unknown'
76 74
77 75 # used by parsedate
78 76 defaultdateformats = (
79 77 '%Y-%m-%d %H:%M:%S',
80 78 '%Y-%m-%d %I:%M:%S%p',
81 79 '%Y-%m-%d %H:%M',
82 80 '%Y-%m-%d %I:%M%p',
83 81 '%Y-%m-%d',
84 82 '%m-%d',
85 83 '%m/%d',
86 84 '%m/%d/%y',
87 85 '%m/%d/%Y',
88 86 '%a %b %d %H:%M:%S %Y',
89 87 '%a %b %d %I:%M:%S%p %Y',
90 88 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
91 89 '%b %d %H:%M:%S %Y',
92 90 '%b %d %I:%M:%S%p %Y',
93 91 '%b %d %H:%M:%S',
94 92 '%b %d %I:%M:%S%p',
95 93 '%b %d %H:%M',
96 94 '%b %d %I:%M%p',
97 95 '%b %d %Y',
98 96 '%b %d',
99 97 '%H:%M:%S',
100 98 '%I:%M:%SP',
101 99 '%H:%M',
102 100 '%I:%M%p',
103 101 )
104 102
105 103 extendeddateformats = defaultdateformats + (
106 104 "%Y",
107 105 "%Y-%m",
108 106 "%b",
109 107 "%b %Y",
110 108 )
111 109
112 110 def cachefunc(func):
113 111 '''cache the result of function calls'''
114 112 # XXX doesn't handle keywords args
115 113 cache = {}
116 114 if func.func_code.co_argcount == 1:
117 115 # we gain a small amount of time because
118 116 # we don't need to pack/unpack the list
119 117 def f(arg):
120 118 if arg not in cache:
121 119 cache[arg] = func(arg)
122 120 return cache[arg]
123 121 else:
124 122 def f(*args):
125 123 if args not in cache:
126 124 cache[args] = func(*args)
127 125 return cache[args]
128 126
129 127 return f
130 128
131 129 class propertycache(object):
132 130 def __init__(self, func):
133 131 self.func = func
134 132 self.name = func.__name__
135 133 def __get__(self, obj, type=None):
136 134 result = self.func(obj)
137 135 setattr(obj, self.name, result)
138 136 return result
139 137
140 138 def pipefilter(s, cmd):
141 139 '''filter string S through command CMD, returning its output'''
142 140 (pin, pout) = popen2(cmd, 'b')
143 141 def writer():
144 142 try:
145 143 pin.write(s)
146 144 pin.close()
147 145 except IOError, inst:
148 146 if inst.errno != errno.EPIPE:
149 147 raise
150 148
151 149 # we should use select instead on UNIX, but this will work on most
152 150 # systems, including Windows
153 151 w = threading.Thread(target=writer)
154 152 w.start()
155 153 f = pout.read()
156 154 pout.close()
157 155 w.join()
158 156 return f
159 157
160 158 def tempfilter(s, cmd):
161 159 '''filter string S through a pair of temporary files with CMD.
162 160 CMD is used as a template to create the real command to be run,
163 161 with the strings INFILE and OUTFILE replaced by the real names of
164 162 the temporary files generated.'''
165 163 inname, outname = None, None
166 164 try:
167 165 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 166 fp = os.fdopen(infd, 'wb')
169 167 fp.write(s)
170 168 fp.close()
171 169 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 170 os.close(outfd)
173 171 cmd = cmd.replace('INFILE', inname)
174 172 cmd = cmd.replace('OUTFILE', outname)
175 173 code = os.system(cmd)
176 174 if sys.platform == 'OpenVMS' and code & 1:
177 175 code = 0
178 176 if code: raise Abort(_("command '%s' failed: %s") %
179 177 (cmd, explain_exit(code)))
180 178 return open(outname, 'rb').read()
181 179 finally:
182 180 try:
183 181 if inname: os.unlink(inname)
184 182 except: pass
185 183 try:
186 184 if outname: os.unlink(outname)
187 185 except: pass
188 186
189 187 filtertable = {
190 188 'tempfile:': tempfilter,
191 189 'pipe:': pipefilter,
192 190 }
193 191
194 192 def filter(s, cmd):
195 193 "filter a string through a command that transforms its input to its output"
196 194 for name, fn in filtertable.iteritems():
197 195 if cmd.startswith(name):
198 196 return fn(s, cmd[len(name):].lstrip())
199 197 return pipefilter(s, cmd)
200 198
201 199 def binary(s):
202 200 """return true if a string is binary data"""
203 201 return bool(s and '\0' in s)
204 202
205 203 def increasingchunks(source, min=1024, max=65536):
206 204 '''return no less than min bytes per chunk while data remains,
207 205 doubling min after each chunk until it reaches max'''
208 206 def log2(x):
209 207 if not x:
210 208 return 0
211 209 i = 0
212 210 while x:
213 211 x >>= 1
214 212 i += 1
215 213 return i - 1
216 214
217 215 buf = []
218 216 blen = 0
219 217 for chunk in source:
220 218 buf.append(chunk)
221 219 blen += len(chunk)
222 220 if blen >= min:
223 221 if min < max:
224 222 min = min << 1
225 223 nmin = 1 << log2(blen)
226 224 if nmin > min:
227 225 min = nmin
228 226 if min > max:
229 227 min = max
230 228 yield ''.join(buf)
231 229 blen = 0
232 230 buf = []
233 231 if buf:
234 232 yield ''.join(buf)
235 233
236 234 Abort = error.Abort
237 235
238 236 def always(fn): return True
239 237 def never(fn): return False
240 238
241 239 def patkind(name, default):
242 240 """Split a string into an optional pattern kind prefix and the
243 241 actual pattern."""
244 242 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
245 243 if name.startswith(prefix + ':'): return name.split(':', 1)
246 244 return default, name
247 245
248 246 def globre(pat, head='^', tail='$'):
249 247 "convert a glob pattern into a regexp"
250 248 i, n = 0, len(pat)
251 249 res = ''
252 250 group = 0
253 251 def peek(): return i < n and pat[i]
254 252 while i < n:
255 253 c = pat[i]
256 254 i = i+1
257 255 if c == '*':
258 256 if peek() == '*':
259 257 i += 1
260 258 res += '.*'
261 259 else:
262 260 res += '[^/]*'
263 261 elif c == '?':
264 262 res += '.'
265 263 elif c == '[':
266 264 j = i
267 265 if j < n and pat[j] in '!]':
268 266 j += 1
269 267 while j < n and pat[j] != ']':
270 268 j += 1
271 269 if j >= n:
272 270 res += '\\['
273 271 else:
274 272 stuff = pat[i:j].replace('\\','\\\\')
275 273 i = j + 1
276 274 if stuff[0] == '!':
277 275 stuff = '^' + stuff[1:]
278 276 elif stuff[0] == '^':
279 277 stuff = '\\' + stuff
280 278 res = '%s[%s]' % (res, stuff)
281 279 elif c == '{':
282 280 group += 1
283 281 res += '(?:'
284 282 elif c == '}' and group:
285 283 res += ')'
286 284 group -= 1
287 285 elif c == ',' and group:
288 286 res += '|'
289 287 elif c == '\\':
290 288 p = peek()
291 289 if p:
292 290 i += 1
293 291 res += re.escape(p)
294 292 else:
295 293 res += re.escape(c)
296 294 else:
297 295 res += re.escape(c)
298 296 return head + res + tail
299 297
300 298 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
301 299
302 300 def pathto(root, n1, n2):
303 301 '''return the relative path from one place to another.
304 302 root should use os.sep to separate directories
305 303 n1 should use os.sep to separate directories
306 304 n2 should use "/" to separate directories
307 305 returns an os.sep-separated path.
308 306
309 307 If n1 is a relative path, it's assumed it's
310 308 relative to root.
311 309 n2 should always be relative to root.
312 310 '''
313 311 if not n1: return localpath(n2)
314 312 if os.path.isabs(n1):
315 313 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
316 314 return os.path.join(root, localpath(n2))
317 315 n2 = '/'.join((pconvert(root), n2))
318 316 a, b = splitpath(n1), n2.split('/')
319 317 a.reverse()
320 318 b.reverse()
321 319 while a and b and a[-1] == b[-1]:
322 320 a.pop()
323 321 b.pop()
324 322 b.reverse()
325 323 return os.sep.join((['..'] * len(a)) + b) or '.'
326 324
327 325 def canonpath(root, cwd, myname):
328 326 """return the canonical path of myname, given cwd and root"""
329 327 if root == os.sep:
330 328 rootsep = os.sep
331 329 elif endswithsep(root):
332 330 rootsep = root
333 331 else:
334 332 rootsep = root + os.sep
335 333 name = myname
336 334 if not os.path.isabs(name):
337 335 name = os.path.join(root, cwd, name)
338 336 name = os.path.normpath(name)
339 337 audit_path = path_auditor(root)
340 338 if name != rootsep and name.startswith(rootsep):
341 339 name = name[len(rootsep):]
342 340 audit_path(name)
343 341 return pconvert(name)
344 342 elif name == root:
345 343 return ''
346 344 else:
347 345 # Determine whether `name' is in the hierarchy at or beneath `root',
348 346 # by iterating name=dirname(name) until that causes no change (can't
349 347 # check name == '/', because that doesn't work on windows). For each
350 348 # `name', compare dev/inode numbers. If they match, the list `rel'
351 349 # holds the reversed list of components making up the relative file
352 350 # name we want.
353 351 root_st = os.stat(root)
354 352 rel = []
355 353 while True:
356 354 try:
357 355 name_st = os.stat(name)
358 356 except OSError:
359 357 break
360 358 if samestat(name_st, root_st):
361 359 if not rel:
362 360 # name was actually the same as root (maybe a symlink)
363 361 return ''
364 362 rel.reverse()
365 363 name = os.path.join(*rel)
366 364 audit_path(name)
367 365 return pconvert(name)
368 366 dirname, basename = os.path.split(name)
369 367 rel.append(basename)
370 368 if dirname == name:
371 369 break
372 370 name = dirname
373 371
374 372 raise Abort('%s not under root' % myname)
375 373
376 374 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
377 375 """build a function to match a set of file patterns
378 376
379 377 arguments:
380 378 canonroot - the canonical root of the tree you're matching against
381 379 cwd - the current working directory, if relevant
382 380 names - patterns to find
383 381 inc - patterns to include
384 382 exc - patterns to exclude
385 383 dflt_pat - if a pattern in names has no explicit type, assume this one
386 384 src - where these patterns came from (e.g. .hgignore)
387 385
388 386 a pattern is one of:
389 387 'glob:<glob>' - a glob relative to cwd
390 388 're:<regexp>' - a regular expression
391 389 'path:<path>' - a path relative to canonroot
392 390 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
393 391 'relpath:<path>' - a path relative to cwd
394 392 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
395 393 '<something>' - one of the cases above, selected by the dflt_pat argument
396 394
397 395 returns:
398 396 a 3-tuple containing
399 397 - list of roots (places where one should start a recursive walk of the fs);
400 398 this often matches the explicit non-pattern names passed in, but also
401 399 includes the initial part of glob: patterns that has no glob characters
402 400 - a bool match(filename) function
403 401 - a bool indicating if any patterns were passed in
404 402 """
405 403
406 404 # a common case: no patterns at all
407 405 if not names and not inc and not exc:
408 406 return [], always, False
409 407
410 408 def contains_glob(name):
411 409 for c in name:
412 410 if c in _globchars: return True
413 411 return False
414 412
415 413 def regex(kind, name, tail):
416 414 '''convert a pattern into a regular expression'''
417 415 if not name:
418 416 return ''
419 417 if kind == 're':
420 418 return name
421 419 elif kind == 'path':
422 420 return '^' + re.escape(name) + '(?:/|$)'
423 421 elif kind == 'relglob':
424 422 return globre(name, '(?:|.*/)', tail)
425 423 elif kind == 'relpath':
426 424 return re.escape(name) + '(?:/|$)'
427 425 elif kind == 'relre':
428 426 if name.startswith('^'):
429 427 return name
430 428 return '.*' + name
431 429 return globre(name, '', tail)
432 430
433 431 def matchfn(pats, tail):
434 432 """build a matching function from a set of patterns"""
435 433 if not pats:
436 434 return
437 435 try:
438 436 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
439 437 if len(pat) > 20000:
440 438 raise OverflowError()
441 439 return re.compile(pat).match
442 440 except OverflowError:
443 441 # We're using a Python with a tiny regex engine and we
444 442 # made it explode, so we'll divide the pattern list in two
445 443 # until it works
446 444 l = len(pats)
447 445 if l < 2:
448 446 raise
449 447 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
450 448 return lambda s: a(s) or b(s)
451 449 except re.error:
452 450 for k, p in pats:
453 451 try:
454 452 re.compile('(?:%s)' % regex(k, p, tail))
455 453 except re.error:
456 454 if src:
457 455 raise Abort("%s: invalid pattern (%s): %s" %
458 456 (src, k, p))
459 457 else:
460 458 raise Abort("invalid pattern (%s): %s" % (k, p))
461 459 raise Abort("invalid pattern")
462 460
463 461 def globprefix(pat):
464 462 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
465 463 root = []
466 464 for p in pat.split('/'):
467 465 if contains_glob(p): break
468 466 root.append(p)
469 467 return '/'.join(root) or '.'
470 468
471 469 def normalizepats(names, default):
472 470 pats = []
473 471 roots = []
474 472 anypats = False
475 473 for kind, name in [patkind(p, default) for p in names]:
476 474 if kind in ('glob', 'relpath'):
477 475 name = canonpath(canonroot, cwd, name)
478 476 elif kind in ('relglob', 'path'):
479 477 name = normpath(name)
480 478
481 479 pats.append((kind, name))
482 480
483 481 if kind in ('glob', 're', 'relglob', 'relre'):
484 482 anypats = True
485 483
486 484 if kind == 'glob':
487 485 root = globprefix(name)
488 486 roots.append(root)
489 487 elif kind in ('relpath', 'path'):
490 488 roots.append(name or '.')
491 489 elif kind == 'relglob':
492 490 roots.append('.')
493 491 return roots, pats, anypats
494 492
495 493 roots, pats, anypats = normalizepats(names, dflt_pat)
496 494
497 495 patmatch = matchfn(pats, '$') or always
498 496 incmatch = always
499 497 if inc:
500 498 dummy, inckinds, dummy = normalizepats(inc, 'glob')
501 499 incmatch = matchfn(inckinds, '(?:/|$)')
502 500 excmatch = never
503 501 if exc:
504 502 dummy, exckinds, dummy = normalizepats(exc, 'glob')
505 503 excmatch = matchfn(exckinds, '(?:/|$)')
506 504
507 505 if not names and inc and not exc:
508 506 # common case: hgignore patterns
509 507 match = incmatch
510 508 else:
511 509 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
512 510
513 511 return (roots, match, (inc or exc or anypats) and True)
514 512
515 513 _hgexecutable = None
516 514
517 515 def main_is_frozen():
518 516 """return True if we are a frozen executable.
519 517
520 518 The code supports py2exe (most common, Windows only) and tools/freeze
521 519 (portable, not much used).
522 520 """
523 521 return (hasattr(sys, "frozen") or # new py2exe
524 522 hasattr(sys, "importers") or # old py2exe
525 523 imp.is_frozen("__main__")) # tools/freeze
526 524
527 525 def hgexecutable():
528 526 """return location of the 'hg' executable.
529 527
530 528 Defaults to $HG or 'hg' in the search path.
531 529 """
532 530 if _hgexecutable is None:
533 531 hg = os.environ.get('HG')
534 532 if hg:
535 533 set_hgexecutable(hg)
536 534 elif main_is_frozen():
537 535 set_hgexecutable(sys.executable)
538 536 else:
539 537 set_hgexecutable(find_exe('hg') or 'hg')
540 538 return _hgexecutable
541 539
542 540 def set_hgexecutable(path):
543 541 """set location of the 'hg' executable"""
544 542 global _hgexecutable
545 543 _hgexecutable = path
546 544
547 545 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
548 546 '''enhanced shell command execution.
549 547 run with environment maybe modified, maybe in different dir.
550 548
551 549 if command fails and onerr is None, return status. if ui object,
552 550 print error message and return status, else raise onerr object as
553 551 exception.'''
554 552 def py2shell(val):
555 553 'convert python object into string that is useful to shell'
556 554 if val in (None, False):
557 555 return '0'
558 556 if val == True:
559 557 return '1'
560 558 return str(val)
561 559 oldenv = {}
562 560 for k in environ:
563 561 oldenv[k] = os.environ.get(k)
564 562 if cwd is not None:
565 563 oldcwd = os.getcwd()
566 564 origcmd = cmd
567 565 if os.name == 'nt':
568 566 cmd = '"%s"' % cmd
569 567 try:
570 568 for k, v in environ.iteritems():
571 569 os.environ[k] = py2shell(v)
572 570 os.environ['HG'] = hgexecutable()
573 571 if cwd is not None and oldcwd != cwd:
574 572 os.chdir(cwd)
575 573 rc = os.system(cmd)
576 574 if sys.platform == 'OpenVMS' and rc & 1:
577 575 rc = 0
578 576 if rc and onerr:
579 577 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
580 578 explain_exit(rc)[0])
581 579 if errprefix:
582 580 errmsg = '%s: %s' % (errprefix, errmsg)
583 581 try:
584 582 onerr.warn(errmsg + '\n')
585 583 except AttributeError:
586 584 raise onerr(errmsg)
587 585 return rc
588 586 finally:
589 587 for k, v in oldenv.iteritems():
590 588 if v is None:
591 589 del os.environ[k]
592 590 else:
593 591 os.environ[k] = v
594 592 if cwd is not None and oldcwd != cwd:
595 593 os.chdir(oldcwd)
596 594
597 595 def checksignature(func):
598 596 '''wrap a function with code to check for calling errors'''
599 597 def check(*args, **kwargs):
600 598 try:
601 599 return func(*args, **kwargs)
602 600 except TypeError:
603 601 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
604 602 raise error.SignatureError
605 603 raise
606 604
607 605 return check
608 606
609 607 # os.path.lexists is not available on python2.3
610 608 def lexists(filename):
611 609 "test whether a file with this name exists. does not follow symlinks"
612 610 try:
613 611 os.lstat(filename)
614 612 except:
615 613 return False
616 614 return True
617 615
618 616 def rename(src, dst):
619 617 """forcibly rename a file"""
620 618 try:
621 619 os.rename(src, dst)
622 620 except OSError, err: # FIXME: check err (EEXIST ?)
623 621
624 622 # On windows, rename to existing file is not allowed, so we
625 623 # must delete destination first. But if a file is open, unlink
626 624 # schedules it for delete but does not delete it. Rename
627 625 # happens immediately even for open files, so we rename
628 626 # destination to a temporary name, then delete that. Then
629 627 # rename is safe to do.
630 628 # The temporary name is chosen at random to avoid the situation
631 629 # where a file is left lying around from a previous aborted run.
632 630 # The usual race condition this introduces can't be avoided as
633 631 # we need the name to rename into, and not the file itself. Due
634 632 # to the nature of the operation however, any races will at worst
635 633 # lead to the rename failing and the current operation aborting.
636 634
637 635 def tempname(prefix):
638 636 for tries in xrange(10):
639 637 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
640 638 if not os.path.exists(temp):
641 639 return temp
642 640 raise IOError, (errno.EEXIST, "No usable temporary filename found")
643 641
644 642 temp = tempname(dst)
645 643 os.rename(dst, temp)
646 644 os.unlink(temp)
647 645 os.rename(src, dst)
648 646
649 647 def unlink(f):
650 648 """unlink and remove the directory if it is empty"""
651 649 os.unlink(f)
652 650 # try removing directories that might now be empty
653 651 try:
654 652 os.removedirs(os.path.dirname(f))
655 653 except OSError:
656 654 pass
657 655
658 656 def copyfile(src, dest):
659 657 "copy a file, preserving mode and atime/mtime"
660 658 if os.path.islink(src):
661 659 try:
662 660 os.unlink(dest)
663 661 except:
664 662 pass
665 663 os.symlink(os.readlink(src), dest)
666 664 else:
667 665 try:
668 666 shutil.copyfile(src, dest)
669 667 shutil.copystat(src, dest)
670 668 except shutil.Error, inst:
671 669 raise Abort(str(inst))
672 670
673 671 def copyfiles(src, dst, hardlink=None):
674 672 """Copy a directory tree using hardlinks if possible"""
675 673
676 674 if hardlink is None:
677 675 hardlink = (os.stat(src).st_dev ==
678 676 os.stat(os.path.dirname(dst)).st_dev)
679 677
680 678 if os.path.isdir(src):
681 679 os.mkdir(dst)
682 680 for name, kind in osutil.listdir(src):
683 681 srcname = os.path.join(src, name)
684 682 dstname = os.path.join(dst, name)
685 683 copyfiles(srcname, dstname, hardlink)
686 684 else:
687 685 if hardlink:
688 686 try:
689 687 os_link(src, dst)
690 688 except (IOError, OSError):
691 689 hardlink = False
692 690 shutil.copy(src, dst)
693 691 else:
694 692 shutil.copy(src, dst)
695 693
696 694 class path_auditor(object):
697 695 '''ensure that a filesystem path contains no banned components.
698 696 the following properties of a path are checked:
699 697
700 698 - under top-level .hg
701 699 - starts at the root of a windows drive
702 700 - contains ".."
703 701 - traverses a symlink (e.g. a/symlink_here/b)
704 702 - inside a nested repository'''
705 703
706 704 def __init__(self, root):
707 705 self.audited = set()
708 706 self.auditeddir = set()
709 707 self.root = root
710 708
711 709 def __call__(self, path):
712 710 if path in self.audited:
713 711 return
714 712 normpath = os.path.normcase(path)
715 713 parts = splitpath(normpath)
716 714 if (os.path.splitdrive(path)[0]
717 715 or parts[0].lower() in ('.hg', '.hg.', '')
718 716 or os.pardir in parts):
719 717 raise Abort(_("path contains illegal component: %s") % path)
720 718 if '.hg' in path.lower():
721 719 lparts = [p.lower() for p in parts]
722 720 for p in '.hg', '.hg.':
723 721 if p in lparts[1:]:
724 722 pos = lparts.index(p)
725 723 base = os.path.join(*parts[:pos])
726 724 raise Abort(_('path %r is inside repo %r') % (path, base))
727 725 def check(prefix):
728 726 curpath = os.path.join(self.root, prefix)
729 727 try:
730 728 st = os.lstat(curpath)
731 729 except OSError, err:
732 730 # EINVAL can be raised as invalid path syntax under win32.
733 731 # They must be ignored for patterns can be checked too.
734 732 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
735 733 raise
736 734 else:
737 735 if stat.S_ISLNK(st.st_mode):
738 736 raise Abort(_('path %r traverses symbolic link %r') %
739 737 (path, prefix))
740 738 elif (stat.S_ISDIR(st.st_mode) and
741 739 os.path.isdir(os.path.join(curpath, '.hg'))):
742 740 raise Abort(_('path %r is inside repo %r') %
743 741 (path, prefix))
744 742 parts.pop()
745 743 prefixes = []
746 744 for n in range(len(parts)):
747 745 prefix = os.sep.join(parts)
748 746 if prefix in self.auditeddir:
749 747 break
750 748 check(prefix)
751 749 prefixes.append(prefix)
752 750 parts.pop()
753 751
754 752 self.audited.add(path)
755 753 # only add prefixes to the cache after checking everything: we don't
756 754 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
757 755 self.auditeddir.update(prefixes)
758 756
759 757 def nlinks(pathname):
760 758 """Return number of hardlinks for the given file."""
761 759 return os.lstat(pathname).st_nlink
762 760
763 761 if hasattr(os, 'link'):
764 762 os_link = os.link
765 763 else:
766 764 def os_link(src, dst):
767 765 raise OSError(0, _("Hardlinks not supported"))
768 766
769 767 def lookup_reg(key, name=None, scope=None):
770 768 return None
771 769
772 770 if os.name == 'nt':
773 771 from windows import *
774 772 def expand_glob(pats):
775 773 '''On Windows, expand the implicit globs in a list of patterns'''
776 774 ret = []
777 775 for p in pats:
778 776 kind, name = patkind(p, None)
779 777 if kind is None:
780 778 globbed = glob.glob(name)
781 779 if globbed:
782 780 ret.extend(globbed)
783 781 continue
784 782 # if we couldn't expand the glob, just keep it around
785 783 ret.append(p)
786 784 return ret
787 785 else:
788 786 from posix import *
789 787
790 788 def makelock(info, pathname):
791 789 try:
792 790 return os.symlink(info, pathname)
793 791 except OSError, why:
794 792 if why.errno == errno.EEXIST:
795 793 raise
796 794 except AttributeError: # no symlink in os
797 795 pass
798 796
799 797 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
800 798 os.write(ld, info)
801 799 os.close(ld)
802 800
803 801 def readlock(pathname):
804 802 try:
805 803 return os.readlink(pathname)
806 804 except OSError, why:
807 805 if why.errno not in (errno.EINVAL, errno.ENOSYS):
808 806 raise
809 807 except AttributeError: # no symlink in os
810 808 pass
811 809 return posixfile(pathname).read()
812 810
813 811 def fstat(fp):
814 812 '''stat file object that may not have fileno method.'''
815 813 try:
816 814 return os.fstat(fp.fileno())
817 815 except AttributeError:
818 816 return os.stat(fp.name)
819 817
820 818 # File system features
821 819
822 820 def checkcase(path):
823 821 """
824 822 Check whether the given path is on a case-sensitive filesystem
825 823
826 824 Requires a path (like /foo/.hg) ending with a foldable final
827 825 directory component.
828 826 """
829 827 s1 = os.stat(path)
830 828 d, b = os.path.split(path)
831 829 p2 = os.path.join(d, b.upper())
832 830 if path == p2:
833 831 p2 = os.path.join(d, b.lower())
834 832 try:
835 833 s2 = os.stat(p2)
836 834 if s2 == s1:
837 835 return False
838 836 return True
839 837 except:
840 838 return True
841 839
842 840 _fspathcache = {}
843 841 def fspath(name, root):
844 842 '''Get name in the case stored in the filesystem
845 843
846 844 The name is either relative to root, or it is an absolute path starting
847 845 with root. Note that this function is unnecessary, and should not be
848 846 called, for case-sensitive filesystems (simply because it's expensive).
849 847 '''
850 848 # If name is absolute, make it relative
851 849 if name.lower().startswith(root.lower()):
852 850 l = len(root)
853 851 if name[l] == os.sep or name[l] == os.altsep:
854 852 l = l + 1
855 853 name = name[l:]
856 854
857 855 if not os.path.exists(os.path.join(root, name)):
858 856 return None
859 857
860 858 seps = os.sep
861 859 if os.altsep:
862 860 seps = seps + os.altsep
863 861 # Protect backslashes. This gets silly very quickly.
864 862 seps.replace('\\','\\\\')
865 863 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
866 864 dir = os.path.normcase(os.path.normpath(root))
867 865 result = []
868 866 for part, sep in pattern.findall(name):
869 867 if sep:
870 868 result.append(sep)
871 869 continue
872 870
873 871 if dir not in _fspathcache:
874 872 _fspathcache[dir] = os.listdir(dir)
875 873 contents = _fspathcache[dir]
876 874
877 875 lpart = part.lower()
878 876 for n in contents:
879 877 if n.lower() == lpart:
880 878 result.append(n)
881 879 break
882 880 else:
883 881 # Cannot happen, as the file exists!
884 882 result.append(part)
885 883 dir = os.path.join(dir, lpart)
886 884
887 885 return ''.join(result)
888 886
889 887 def checkexec(path):
890 888 """
891 889 Check whether the given path is on a filesystem with UNIX-like exec flags
892 890
893 891 Requires a directory (like /foo/.hg)
894 892 """
895 893
896 894 # VFAT on some Linux versions can flip mode but it doesn't persist
897 895 # a FS remount. Frequently we can detect it if files are created
898 896 # with exec bit on.
899 897
900 898 try:
901 899 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
902 900 fh, fn = tempfile.mkstemp("", "", path)
903 901 try:
904 902 os.close(fh)
905 903 m = os.stat(fn).st_mode & 0777
906 904 new_file_has_exec = m & EXECFLAGS
907 905 os.chmod(fn, m ^ EXECFLAGS)
908 906 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
909 907 finally:
910 908 os.unlink(fn)
911 909 except (IOError, OSError):
912 910 # we don't care, the user probably won't be able to commit anyway
913 911 return False
914 912 return not (new_file_has_exec or exec_flags_cannot_flip)
915 913
916 914 def checklink(path):
917 915 """check whether the given path is on a symlink-capable filesystem"""
918 916 # mktemp is not racy because symlink creation will fail if the
919 917 # file already exists
920 918 name = tempfile.mktemp(dir=path)
921 919 try:
922 920 os.symlink(".", name)
923 921 os.unlink(name)
924 922 return True
925 923 except (OSError, AttributeError):
926 924 return False
927 925
928 926 def needbinarypatch():
929 927 """return True if patches should be applied in binary mode by default."""
930 928 return os.name == 'nt'
931 929
932 930 def endswithsep(path):
933 931 '''Check path ends with os.sep or os.altsep.'''
934 932 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
935 933
936 934 def splitpath(path):
937 935 '''Split path by os.sep.
938 936 Note that this function does not use os.altsep because this is
939 937 an alternative of simple "xxx.split(os.sep)".
940 938 It is recommended to use os.path.normpath() before using this
941 939 function if need.'''
942 940 return path.split(os.sep)
943 941
944 942 def gui():
945 943 '''Are we running in a GUI?'''
946 944 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
947 945
948 946 def mktempcopy(name, emptyok=False, createmode=None):
949 947 """Create a temporary file with the same contents from name
950 948
951 949 The permission bits are copied from the original file.
952 950
953 951 If the temporary file is going to be truncated immediately, you
954 952 can use emptyok=True as an optimization.
955 953
956 954 Returns the name of the temporary file.
957 955 """
958 956 d, fn = os.path.split(name)
959 957 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
960 958 os.close(fd)
961 959 # Temporary files are created with mode 0600, which is usually not
962 960 # what we want. If the original file already exists, just copy
963 961 # its mode. Otherwise, manually obey umask.
964 962 try:
965 963 st_mode = os.lstat(name).st_mode & 0777
966 964 except OSError, inst:
967 965 if inst.errno != errno.ENOENT:
968 966 raise
969 967 st_mode = createmode
970 968 if st_mode is None:
971 969 st_mode = ~umask
972 970 st_mode &= 0666
973 971 os.chmod(temp, st_mode)
974 972 if emptyok:
975 973 return temp
976 974 try:
977 975 try:
978 976 ifp = posixfile(name, "rb")
979 977 except IOError, inst:
980 978 if inst.errno == errno.ENOENT:
981 979 return temp
982 980 if not getattr(inst, 'filename', None):
983 981 inst.filename = name
984 982 raise
985 983 ofp = posixfile(temp, "wb")
986 984 for chunk in filechunkiter(ifp):
987 985 ofp.write(chunk)
988 986 ifp.close()
989 987 ofp.close()
990 988 except:
991 989 try: os.unlink(temp)
992 990 except: pass
993 991 raise
994 992 return temp
995 993
996 994 class atomictempfile(posixfile):
997 995 """file-like object that atomically updates a file
998 996
999 997 All writes will be redirected to a temporary copy of the original
1000 998 file. When rename is called, the copy is renamed to the original
1001 999 name, making the changes visible.
1002 1000 """
1003 1001 def __init__(self, name, mode, createmode):
1004 1002 self.__name = name
1005 1003 self.temp = mktempcopy(name, emptyok=('w' in mode),
1006 1004 createmode=createmode)
1007 1005 posixfile.__init__(self, self.temp, mode)
1008 1006
1009 1007 def rename(self):
1010 1008 if not self.closed:
1011 1009 posixfile.close(self)
1012 1010 rename(self.temp, localpath(self.__name))
1013 1011
1014 1012 def __del__(self):
1015 1013 if not self.closed:
1016 1014 try:
1017 1015 os.unlink(self.temp)
1018 1016 except: pass
1019 1017 posixfile.close(self)
1020 1018
1021 1019 def makedirs(name, mode=None):
1022 1020 """recursive directory creation with parent mode inheritance"""
1023 1021 try:
1024 1022 os.mkdir(name)
1025 1023 if mode is not None:
1026 1024 os.chmod(name, mode)
1027 1025 return
1028 1026 except OSError, err:
1029 1027 if err.errno == errno.EEXIST:
1030 1028 return
1031 1029 if err.errno != errno.ENOENT:
1032 1030 raise
1033 1031 parent = os.path.abspath(os.path.dirname(name))
1034 1032 makedirs(parent, mode)
1035 1033 makedirs(name, mode)
1036 1034
1037 1035 class opener(object):
1038 1036 """Open files relative to a base directory
1039 1037
1040 1038 This class is used to hide the details of COW semantics and
1041 1039 remote file access from higher level code.
1042 1040 """
1043 1041 def __init__(self, base, audit=True):
1044 1042 self.base = base
1045 1043 if audit:
1046 1044 self.audit_path = path_auditor(base)
1047 1045 else:
1048 1046 self.audit_path = always
1049 1047 self.createmode = None
1050 1048
1051 1049 def __getattr__(self, name):
1052 1050 if name == '_can_symlink':
1053 1051 self._can_symlink = checklink(self.base)
1054 1052 return self._can_symlink
1055 1053 raise AttributeError(name)
1056 1054
1057 1055 def _fixfilemode(self, name):
1058 1056 if self.createmode is None:
1059 1057 return
1060 1058 os.chmod(name, self.createmode & 0666)
1061 1059
1062 1060 def __call__(self, path, mode="r", text=False, atomictemp=False):
1063 1061 self.audit_path(path)
1064 1062 f = os.path.join(self.base, path)
1065 1063
1066 1064 if not text and "b" not in mode:
1067 1065 mode += "b" # for that other OS
1068 1066
1069 1067 nlink = -1
1070 1068 if mode not in ("r", "rb"):
1071 1069 try:
1072 1070 nlink = nlinks(f)
1073 1071 except OSError:
1074 1072 nlink = 0
1075 1073 d = os.path.dirname(f)
1076 1074 if not os.path.isdir(d):
1077 1075 makedirs(d, self.createmode)
1078 1076 if atomictemp:
1079 1077 return atomictempfile(f, mode, self.createmode)
1080 1078 if nlink > 1:
1081 1079 rename(mktempcopy(f), f)
1082 1080 fp = posixfile(f, mode)
1083 1081 if nlink == 0:
1084 1082 self._fixfilemode(f)
1085 1083 return fp
1086 1084
1087 1085 def symlink(self, src, dst):
1088 1086 self.audit_path(dst)
1089 1087 linkname = os.path.join(self.base, dst)
1090 1088 try:
1091 1089 os.unlink(linkname)
1092 1090 except OSError:
1093 1091 pass
1094 1092
1095 1093 dirname = os.path.dirname(linkname)
1096 1094 if not os.path.exists(dirname):
1097 1095 makedirs(dirname, self.createmode)
1098 1096
1099 1097 if self._can_symlink:
1100 1098 try:
1101 1099 os.symlink(src, linkname)
1102 1100 except OSError, err:
1103 1101 raise OSError(err.errno, _('could not symlink to %r: %s') %
1104 1102 (src, err.strerror), linkname)
1105 1103 else:
1106 1104 f = self(dst, "w")
1107 1105 f.write(src)
1108 1106 f.close()
1109 1107 self._fixfilemode(dst)
1110 1108
1111 1109 class chunkbuffer(object):
1112 1110 """Allow arbitrary sized chunks of data to be efficiently read from an
1113 1111 iterator over chunks of arbitrary size."""
1114 1112
1115 1113 def __init__(self, in_iter):
1116 1114 """in_iter is the iterator that's iterating over the input chunks.
1117 1115 targetsize is how big a buffer to try to maintain."""
1118 1116 self.iter = iter(in_iter)
1119 1117 self.buf = ''
1120 1118 self.targetsize = 2**16
1121 1119
1122 1120 def read(self, l):
1123 1121 """Read L bytes of data from the iterator of chunks of data.
1124 1122 Returns less than L bytes if the iterator runs dry."""
1125 1123 if l > len(self.buf) and self.iter:
1126 1124 # Clamp to a multiple of self.targetsize
1127 1125 targetsize = max(l, self.targetsize)
1128 1126 collector = cStringIO.StringIO()
1129 1127 collector.write(self.buf)
1130 1128 collected = len(self.buf)
1131 1129 for chunk in self.iter:
1132 1130 collector.write(chunk)
1133 1131 collected += len(chunk)
1134 1132 if collected >= targetsize:
1135 1133 break
1136 1134 if collected < targetsize:
1137 1135 self.iter = False
1138 1136 self.buf = collector.getvalue()
1139 1137 if len(self.buf) == l:
1140 1138 s, self.buf = str(self.buf), ''
1141 1139 else:
1142 1140 s, self.buf = self.buf[:l], buffer(self.buf, l)
1143 1141 return s
1144 1142
1145 1143 def filechunkiter(f, size=65536, limit=None):
1146 1144 """Create a generator that produces the data in the file size
1147 1145 (default 65536) bytes at a time, up to optional limit (default is
1148 1146 to read all data). Chunks may be less than size bytes if the
1149 1147 chunk is the last chunk in the file, or the file is a socket or
1150 1148 some other type of file that sometimes reads less data than is
1151 1149 requested."""
1152 1150 assert size >= 0
1153 1151 assert limit is None or limit >= 0
1154 1152 while True:
1155 1153 if limit is None: nbytes = size
1156 1154 else: nbytes = min(limit, size)
1157 1155 s = nbytes and f.read(nbytes)
1158 1156 if not s: break
1159 1157 if limit: limit -= len(s)
1160 1158 yield s
1161 1159
1162 1160 def makedate():
1163 1161 lt = time.localtime()
1164 1162 if lt[8] == 1 and time.daylight:
1165 1163 tz = time.altzone
1166 1164 else:
1167 1165 tz = time.timezone
1168 1166 return time.mktime(lt), tz
1169 1167
1170 1168 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1171 1169 """represent a (unixtime, offset) tuple as a localized time.
1172 1170 unixtime is seconds since the epoch, and offset is the time zone's
1173 1171 number of seconds away from UTC. if timezone is false, do not
1174 1172 append time zone to string."""
1175 1173 t, tz = date or makedate()
1176 1174 if "%1" in format or "%2" in format:
1177 1175 sign = (tz > 0) and "-" or "+"
1178 1176 minutes = abs(tz) / 60
1179 1177 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1180 1178 format = format.replace("%2", "%02d" % (minutes % 60))
1181 1179 s = time.strftime(format, time.gmtime(float(t) - tz))
1182 1180 return s
1183 1181
1184 1182 def shortdate(date=None):
1185 1183 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1186 1184 return datestr(date, format='%Y-%m-%d')
1187 1185
1188 1186 def strdate(string, format, defaults=[]):
1189 1187 """parse a localized time string and return a (unixtime, offset) tuple.
1190 1188 if the string cannot be parsed, ValueError is raised."""
1191 1189 def timezone(string):
1192 1190 tz = string.split()[-1]
1193 1191 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1194 1192 sign = (tz[0] == "+") and 1 or -1
1195 1193 hours = int(tz[1:3])
1196 1194 minutes = int(tz[3:5])
1197 1195 return -sign * (hours * 60 + minutes) * 60
1198 1196 if tz == "GMT" or tz == "UTC":
1199 1197 return 0
1200 1198 return None
1201 1199
1202 1200 # NOTE: unixtime = localunixtime + offset
1203 1201 offset, date = timezone(string), string
1204 1202 if offset != None:
1205 1203 date = " ".join(string.split()[:-1])
1206 1204
1207 1205 # add missing elements from defaults
1208 1206 for part in defaults:
1209 1207 found = [True for p in part if ("%"+p) in format]
1210 1208 if not found:
1211 1209 date += "@" + defaults[part]
1212 1210 format += "@%" + part[0]
1213 1211
1214 1212 timetuple = time.strptime(date, format)
1215 1213 localunixtime = int(calendar.timegm(timetuple))
1216 1214 if offset is None:
1217 1215 # local timezone
1218 1216 unixtime = int(time.mktime(timetuple))
1219 1217 offset = unixtime - localunixtime
1220 1218 else:
1221 1219 unixtime = localunixtime + offset
1222 1220 return unixtime, offset
1223 1221
1224 1222 def parsedate(date, formats=None, defaults=None):
1225 1223 """parse a localized date/time string and return a (unixtime, offset) tuple.
1226 1224
1227 1225 The date may be a "unixtime offset" string or in one of the specified
1228 1226 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1229 1227 """
1230 1228 if not date:
1231 1229 return 0, 0
1232 1230 if isinstance(date, tuple) and len(date) == 2:
1233 1231 return date
1234 1232 if not formats:
1235 1233 formats = defaultdateformats
1236 1234 date = date.strip()
1237 1235 try:
1238 1236 when, offset = map(int, date.split(' '))
1239 1237 except ValueError:
1240 1238 # fill out defaults
1241 1239 if not defaults:
1242 1240 defaults = {}
1243 1241 now = makedate()
1244 1242 for part in "d mb yY HI M S".split():
1245 1243 if part not in defaults:
1246 1244 if part[0] in "HMS":
1247 1245 defaults[part] = "00"
1248 1246 else:
1249 1247 defaults[part] = datestr(now, "%" + part[0])
1250 1248
1251 1249 for format in formats:
1252 1250 try:
1253 1251 when, offset = strdate(date, format, defaults)
1254 1252 except (ValueError, OverflowError):
1255 1253 pass
1256 1254 else:
1257 1255 break
1258 1256 else:
1259 1257 raise Abort(_('invalid date: %r ') % date)
1260 1258 # validate explicit (probably user-specified) date and
1261 1259 # time zone offset. values must fit in signed 32 bits for
1262 1260 # current 32-bit linux runtimes. timezones go from UTC-12
1263 1261 # to UTC+14
1264 1262 if abs(when) > 0x7fffffff:
1265 1263 raise Abort(_('date exceeds 32 bits: %d') % when)
1266 1264 if offset < -50400 or offset > 43200:
1267 1265 raise Abort(_('impossible time zone offset: %d') % offset)
1268 1266 return when, offset
1269 1267
1270 1268 def matchdate(date):
1271 1269 """Return a function that matches a given date match specifier
1272 1270
1273 1271 Formats include:
1274 1272
1275 1273 '{date}' match a given date to the accuracy provided
1276 1274
1277 1275 '<{date}' on or before a given date
1278 1276
1279 1277 '>{date}' on or after a given date
1280 1278
1281 1279 """
1282 1280
1283 1281 def lower(date):
1284 1282 d = dict(mb="1", d="1")
1285 1283 return parsedate(date, extendeddateformats, d)[0]
1286 1284
1287 1285 def upper(date):
1288 1286 d = dict(mb="12", HI="23", M="59", S="59")
1289 1287 for days in "31 30 29".split():
1290 1288 try:
1291 1289 d["d"] = days
1292 1290 return parsedate(date, extendeddateformats, d)[0]
1293 1291 except:
1294 1292 pass
1295 1293 d["d"] = "28"
1296 1294 return parsedate(date, extendeddateformats, d)[0]
1297 1295
1298 1296 date = date.strip()
1299 1297 if date[0] == "<":
1300 1298 when = upper(date[1:])
1301 1299 return lambda x: x <= when
1302 1300 elif date[0] == ">":
1303 1301 when = lower(date[1:])
1304 1302 return lambda x: x >= when
1305 1303 elif date[0] == "-":
1306 1304 try:
1307 1305 days = int(date[1:])
1308 1306 except ValueError:
1309 1307 raise Abort(_("invalid day spec: %s") % date[1:])
1310 1308 when = makedate()[0] - days * 3600 * 24
1311 1309 return lambda x: x >= when
1312 1310 elif " to " in date:
1313 1311 a, b = date.split(" to ")
1314 1312 start, stop = lower(a), upper(b)
1315 1313 return lambda x: x >= start and x <= stop
1316 1314 else:
1317 1315 start, stop = lower(date), upper(date)
1318 1316 return lambda x: x >= start and x <= stop
1319 1317
1320 1318 def shortuser(user):
1321 1319 """Return a short representation of a user name or email address."""
1322 1320 f = user.find('@')
1323 1321 if f >= 0:
1324 1322 user = user[:f]
1325 1323 f = user.find('<')
1326 1324 if f >= 0:
1327 1325 user = user[f+1:]
1328 1326 f = user.find(' ')
1329 1327 if f >= 0:
1330 1328 user = user[:f]
1331 1329 f = user.find('.')
1332 1330 if f >= 0:
1333 1331 user = user[:f]
1334 1332 return user
1335 1333
1336 1334 def email(author):
1337 1335 '''get email of author.'''
1338 1336 r = author.find('>')
1339 1337 if r == -1: r = None
1340 1338 return author[author.find('<')+1:r]
1341 1339
1342 1340 def ellipsis(text, maxlength=400):
1343 1341 """Trim string to at most maxlength (default: 400) characters."""
1344 1342 if len(text) <= maxlength:
1345 1343 return text
1346 1344 else:
1347 1345 return "%s..." % (text[:maxlength-3])
1348 1346
1349 1347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1350 1348 '''yield every hg repository under path, recursively.'''
1351 1349 def errhandler(err):
1352 1350 if err.filename == path:
1353 1351 raise err
1354 1352 if followsym and hasattr(os.path, 'samestat'):
1355 1353 def _add_dir_if_not_there(dirlst, dirname):
1356 1354 match = False
1357 1355 samestat = os.path.samestat
1358 1356 dirstat = os.stat(dirname)
1359 1357 for lstdirstat in dirlst:
1360 1358 if samestat(dirstat, lstdirstat):
1361 1359 match = True
1362 1360 break
1363 1361 if not match:
1364 1362 dirlst.append(dirstat)
1365 1363 return not match
1366 1364 else:
1367 1365 followsym = False
1368 1366
1369 1367 if (seen_dirs is None) and followsym:
1370 1368 seen_dirs = []
1371 1369 _add_dir_if_not_there(seen_dirs, path)
1372 1370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1373 1371 if '.hg' in dirs:
1374 1372 yield root # found a repository
1375 1373 qroot = os.path.join(root, '.hg', 'patches')
1376 1374 if os.path.isdir(os.path.join(qroot, '.hg')):
1377 1375 yield qroot # we have a patch queue repo here
1378 1376 if recurse:
1379 1377 # avoid recursing inside the .hg directory
1380 1378 dirs.remove('.hg')
1381 1379 else:
1382 1380 dirs[:] = [] # don't descend further
1383 1381 elif followsym:
1384 1382 newdirs = []
1385 1383 for d in dirs:
1386 1384 fname = os.path.join(root, d)
1387 1385 if _add_dir_if_not_there(seen_dirs, fname):
1388 1386 if os.path.islink(fname):
1389 1387 for hgname in walkrepos(fname, True, seen_dirs):
1390 1388 yield hgname
1391 1389 else:
1392 1390 newdirs.append(d)
1393 1391 dirs[:] = newdirs
1394 1392
1395 1393 _rcpath = None
1396 1394
1397 1395 def os_rcpath():
1398 1396 '''return default os-specific hgrc search path'''
1399 1397 path = system_rcpath()
1400 1398 path.extend(user_rcpath())
1401 1399 path = [os.path.normpath(f) for f in path]
1402 1400 return path
1403 1401
1404 1402 def rcpath():
1405 1403 '''return hgrc search path. if env var HGRCPATH is set, use it.
1406 1404 for each item in path, if directory, use files ending in .rc,
1407 1405 else use item.
1408 1406 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1409 1407 if no HGRCPATH, use default os-specific path.'''
1410 1408 global _rcpath
1411 1409 if _rcpath is None:
1412 1410 if 'HGRCPATH' in os.environ:
1413 1411 _rcpath = []
1414 1412 for p in os.environ['HGRCPATH'].split(os.pathsep):
1415 1413 if not p: continue
1416 1414 if os.path.isdir(p):
1417 1415 for f, kind in osutil.listdir(p):
1418 1416 if f.endswith('.rc'):
1419 1417 _rcpath.append(os.path.join(p, f))
1420 1418 else:
1421 1419 _rcpath.append(p)
1422 1420 else:
1423 1421 _rcpath = os_rcpath()
1424 1422 return _rcpath
1425 1423
1426 1424 def bytecount(nbytes):
1427 1425 '''return byte count formatted as readable string, with units'''
1428 1426
1429 1427 units = (
1430 1428 (100, 1<<30, _('%.0f GB')),
1431 1429 (10, 1<<30, _('%.1f GB')),
1432 1430 (1, 1<<30, _('%.2f GB')),
1433 1431 (100, 1<<20, _('%.0f MB')),
1434 1432 (10, 1<<20, _('%.1f MB')),
1435 1433 (1, 1<<20, _('%.2f MB')),
1436 1434 (100, 1<<10, _('%.0f KB')),
1437 1435 (10, 1<<10, _('%.1f KB')),
1438 1436 (1, 1<<10, _('%.2f KB')),
1439 1437 (1, 1, _('%.0f bytes')),
1440 1438 )
1441 1439
1442 1440 for multiplier, divisor, format in units:
1443 1441 if nbytes >= divisor * multiplier:
1444 1442 return format % (nbytes / float(divisor))
1445 1443 return units[-1][2] % nbytes
1446 1444
1447 1445 def drop_scheme(scheme, path):
1448 1446 sc = scheme + ':'
1449 1447 if path.startswith(sc):
1450 1448 path = path[len(sc):]
1451 1449 if path.startswith('//'):
1452 1450 path = path[2:]
1453 1451 return path
1454 1452
1455 1453 def uirepr(s):
1456 1454 # Avoid double backslash in Windows path repr()
1457 1455 return repr(s).replace('\\\\', '\\')
1458 1456
1459 1457 def termwidth():
1460 1458 if 'COLUMNS' in os.environ:
1461 1459 try:
1462 1460 return int(os.environ['COLUMNS'])
1463 1461 except ValueError:
1464 1462 pass
1465 1463 try:
1466 1464 import termios, array, fcntl
1467 1465 for dev in (sys.stdout, sys.stdin):
1468 1466 try:
1469 1467 fd = dev.fileno()
1470 1468 if not os.isatty(fd):
1471 1469 continue
1472 1470 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1473 1471 return array.array('h', arri)[1]
1474 1472 except ValueError:
1475 1473 pass
1476 1474 except ImportError:
1477 1475 pass
1478 1476 return 80
1479 1477
1480 1478 def iterlines(iterator):
1481 1479 for chunk in iterator:
1482 1480 for line in chunk.splitlines():
1483 1481 yield line
General Comments 0
You need to be logged in to leave comments. Login now