##// END OF EJS Templates
move util.Abort to error.py
Matt Mackall -
r7947:a454eeb1 default
parent child Browse files
Show More
@@ -1,64 +1,67
1 1 """
2 2 error.py - Mercurial exceptions
3 3
4 4 This allows us to catch exceptions at higher levels without forcing imports
5 5
6 6 Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10 """
11 11
12 12 # Do not import anything here, please
13 13
14 14 class RevlogError(Exception):
15 15 pass
16 16
17 17 class LookupError(RevlogError, KeyError):
18 18 def __init__(self, name, index, message):
19 19 self.name = name
20 20 if isinstance(name, str) and len(name) == 20:
21 21 from node import short
22 22 name = short(name)
23 23 RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
24 24
25 25 def __str__(self):
26 26 return RevlogError.__str__(self)
27 27
28 28 class ParseError(Exception):
29 29 """Exception raised on errors in parsing the command line."""
30 30
31 31 class RepoError(Exception):
32 32 pass
33 33
34 34 class CapabilityError(RepoError):
35 35 pass
36 36
37 37 class LockError(IOError):
38 38 def __init__(self, errno, strerror, filename, desc):
39 39 IOError.__init__(self, errno, strerror, filename)
40 40 self.desc = desc
41 41
42 42 class LockHeld(LockError):
43 43 def __init__(self, errno, filename, desc, locker):
44 44 LockError.__init__(self, errno, 'Lock held', filename, desc)
45 45 self.locker = locker
46 46
47 47 class LockUnavailable(LockError):
48 48 pass
49 49
50 50 class ResponseError(Exception):
51 51 """Raised to print an error with part of output and exit."""
52 52
53 53 class UnknownCommand(Exception):
54 54 """Exception raised if command is not in the command table."""
55 55
56 56 class AmbiguousCommand(Exception):
57 57 """Exception raised if command shortcut matches more than one command."""
58 58
59 59 # derived from KeyboardInterrupt to simplify some breakout code
60 60 class SignalInterrupt(KeyboardInterrupt):
61 61 """Exception raised on SIGTERM and SIGHUP."""
62 62
63 63 class SignatureError(Exception):
64 64 pass
65
66 class Abort(Exception):
67 """Raised if a command needs to print an error and exit."""
@@ -1,1568 +1,1567
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 17 import os, stat, threading, time, calendar, ConfigParser, locale, glob, osutil
18 18 import imp, unicodedata
19 19
20 20 # Python compatibility
21 21
22 22 try:
23 23 set = set
24 24 frozenset = frozenset
25 25 except NameError:
26 26 from sets import Set as set, ImmutableSet as frozenset
27 27
28 28 _md5 = None
29 29 def md5(s):
30 30 global _md5
31 31 if _md5 is None:
32 32 try:
33 33 import hashlib
34 34 _md5 = hashlib.md5
35 35 except ImportError:
36 36 import md5
37 37 _md5 = md5.md5
38 38 return _md5(s)
39 39
40 40 _sha1 = None
41 41 def sha1(s):
42 42 global _sha1
43 43 if _sha1 is None:
44 44 try:
45 45 import hashlib
46 46 _sha1 = hashlib.sha1
47 47 except ImportError:
48 48 import sha
49 49 _sha1 = sha.sha
50 50 return _sha1(s)
51 51
52 52 try:
53 53 import subprocess
54 54 subprocess.Popen # trigger ImportError early
55 55 closefds = os.name == 'posix'
56 56 def popen2(cmd, mode='t', bufsize=-1):
57 57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 58 close_fds=closefds,
59 59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 60 return p.stdin, p.stdout
61 61 def popen3(cmd, mode='t', bufsize=-1):
62 62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 63 close_fds=closefds,
64 64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 65 stderr=subprocess.PIPE)
66 66 return p.stdin, p.stdout, p.stderr
67 67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 68 stderr = capturestderr and subprocess.PIPE or None
69 69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 70 close_fds=closefds,
71 71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 72 stderr=stderr)
73 73 p.fromchild = p.stdout
74 74 p.tochild = p.stdin
75 75 p.childerr = p.stderr
76 76 return p
77 77 except ImportError:
78 78 subprocess = None
79 79 from popen2 import Popen3
80 80 popen2 = os.popen2
81 81 popen3 = os.popen3
82 82
83 83
84 84 _encodingfixup = {'646': 'ascii', 'ANSI_X3.4-1968': 'ascii'}
85 85
86 86 try:
87 87 _encoding = os.environ.get("HGENCODING")
88 88 if sys.platform == 'darwin' and not _encoding:
89 89 # On darwin, getpreferredencoding ignores the locale environment and
90 90 # always returns mac-roman. We override this if the environment is
91 91 # not C (has been customized by the user).
92 92 locale.setlocale(locale.LC_CTYPE, '')
93 93 _encoding = locale.getlocale()[1]
94 94 if not _encoding:
95 95 _encoding = locale.getpreferredencoding() or 'ascii'
96 96 _encoding = _encodingfixup.get(_encoding, _encoding)
97 97 except locale.Error:
98 98 _encoding = 'ascii'
99 99 _encodingmode = os.environ.get("HGENCODINGMODE", "strict")
100 100 _fallbackencoding = 'ISO-8859-1'
101 101
102 102 def tolocal(s):
103 103 """
104 104 Convert a string from internal UTF-8 to local encoding
105 105
106 106 All internal strings should be UTF-8 but some repos before the
107 107 implementation of locale support may contain latin1 or possibly
108 108 other character sets. We attempt to decode everything strictly
109 109 using UTF-8, then Latin-1, and failing that, we use UTF-8 and
110 110 replace unknown characters.
111 111 """
112 112 for e in ('UTF-8', _fallbackencoding):
113 113 try:
114 114 u = s.decode(e) # attempt strict decoding
115 115 return u.encode(_encoding, "replace")
116 116 except LookupError, k:
117 117 raise Abort(_("%s, please check your locale settings") % k)
118 118 except UnicodeDecodeError:
119 119 pass
120 120 u = s.decode("utf-8", "replace") # last ditch
121 121 return u.encode(_encoding, "replace")
122 122
123 123 def fromlocal(s):
124 124 """
125 125 Convert a string from the local character encoding to UTF-8
126 126
127 127 We attempt to decode strings using the encoding mode set by
128 128 HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown
129 129 characters will cause an error message. Other modes include
130 130 'replace', which replaces unknown characters with a special
131 131 Unicode character, and 'ignore', which drops the character.
132 132 """
133 133 try:
134 134 return s.decode(_encoding, _encodingmode).encode("utf-8")
135 135 except UnicodeDecodeError, inst:
136 136 sub = s[max(0, inst.start-10):inst.start+10]
137 137 raise Abort("decoding near '%s': %s!" % (sub, inst))
138 138 except LookupError, k:
139 139 raise Abort(_("%s, please check your locale settings") % k)
140 140
141 141 def colwidth(s):
142 142 "Find the column width of a UTF-8 string for display"
143 143 d = s.decode(_encoding, 'replace')
144 144 if hasattr(unicodedata, 'east_asian_width'):
145 145 w = unicodedata.east_asian_width
146 146 return sum([w(c) in 'WF' and 2 or 1 for c in d])
147 147 return len(d)
148 148
149 149 def version():
150 150 """Return version information if available."""
151 151 try:
152 152 import __version__
153 153 return __version__.version
154 154 except ImportError:
155 155 return 'unknown'
156 156
157 157 # used by parsedate
158 158 defaultdateformats = (
159 159 '%Y-%m-%d %H:%M:%S',
160 160 '%Y-%m-%d %I:%M:%S%p',
161 161 '%Y-%m-%d %H:%M',
162 162 '%Y-%m-%d %I:%M%p',
163 163 '%Y-%m-%d',
164 164 '%m-%d',
165 165 '%m/%d',
166 166 '%m/%d/%y',
167 167 '%m/%d/%Y',
168 168 '%a %b %d %H:%M:%S %Y',
169 169 '%a %b %d %I:%M:%S%p %Y',
170 170 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
171 171 '%b %d %H:%M:%S %Y',
172 172 '%b %d %I:%M:%S%p %Y',
173 173 '%b %d %H:%M:%S',
174 174 '%b %d %I:%M:%S%p',
175 175 '%b %d %H:%M',
176 176 '%b %d %I:%M%p',
177 177 '%b %d %Y',
178 178 '%b %d',
179 179 '%H:%M:%S',
180 180 '%I:%M:%SP',
181 181 '%H:%M',
182 182 '%I:%M%p',
183 183 )
184 184
185 185 extendeddateformats = defaultdateformats + (
186 186 "%Y",
187 187 "%Y-%m",
188 188 "%b",
189 189 "%b %Y",
190 190 )
191 191
192 192 # differences from SafeConfigParser:
193 193 # - case-sensitive keys
194 194 # - allows values that are not strings (this means that you may not
195 195 # be able to save the configuration to a file)
196 196 class configparser(ConfigParser.SafeConfigParser):
197 197 def optionxform(self, optionstr):
198 198 return optionstr
199 199
200 200 def set(self, section, option, value):
201 201 return ConfigParser.ConfigParser.set(self, section, option, value)
202 202
203 203 def _interpolate(self, section, option, rawval, vars):
204 204 if not isinstance(rawval, basestring):
205 205 return rawval
206 206 return ConfigParser.SafeConfigParser._interpolate(self, section,
207 207 option, rawval, vars)
208 208
209 209 def cachefunc(func):
210 210 '''cache the result of function calls'''
211 211 # XXX doesn't handle keywords args
212 212 cache = {}
213 213 if func.func_code.co_argcount == 1:
214 214 # we gain a small amount of time because
215 215 # we don't need to pack/unpack the list
216 216 def f(arg):
217 217 if arg not in cache:
218 218 cache[arg] = func(arg)
219 219 return cache[arg]
220 220 else:
221 221 def f(*args):
222 222 if args not in cache:
223 223 cache[args] = func(*args)
224 224 return cache[args]
225 225
226 226 return f
227 227
228 228 def pipefilter(s, cmd):
229 229 '''filter string S through command CMD, returning its output'''
230 230 (pin, pout) = popen2(cmd, 'b')
231 231 def writer():
232 232 try:
233 233 pin.write(s)
234 234 pin.close()
235 235 except IOError, inst:
236 236 if inst.errno != errno.EPIPE:
237 237 raise
238 238
239 239 # we should use select instead on UNIX, but this will work on most
240 240 # systems, including Windows
241 241 w = threading.Thread(target=writer)
242 242 w.start()
243 243 f = pout.read()
244 244 pout.close()
245 245 w.join()
246 246 return f
247 247
248 248 def tempfilter(s, cmd):
249 249 '''filter string S through a pair of temporary files with CMD.
250 250 CMD is used as a template to create the real command to be run,
251 251 with the strings INFILE and OUTFILE replaced by the real names of
252 252 the temporary files generated.'''
253 253 inname, outname = None, None
254 254 try:
255 255 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
256 256 fp = os.fdopen(infd, 'wb')
257 257 fp.write(s)
258 258 fp.close()
259 259 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
260 260 os.close(outfd)
261 261 cmd = cmd.replace('INFILE', inname)
262 262 cmd = cmd.replace('OUTFILE', outname)
263 263 code = os.system(cmd)
264 264 if sys.platform == 'OpenVMS' and code & 1:
265 265 code = 0
266 266 if code: raise Abort(_("command '%s' failed: %s") %
267 267 (cmd, explain_exit(code)))
268 268 return open(outname, 'rb').read()
269 269 finally:
270 270 try:
271 271 if inname: os.unlink(inname)
272 272 except: pass
273 273 try:
274 274 if outname: os.unlink(outname)
275 275 except: pass
276 276
277 277 filtertable = {
278 278 'tempfile:': tempfilter,
279 279 'pipe:': pipefilter,
280 280 }
281 281
282 282 def filter(s, cmd):
283 283 "filter a string through a command that transforms its input to its output"
284 284 for name, fn in filtertable.iteritems():
285 285 if cmd.startswith(name):
286 286 return fn(s, cmd[len(name):].lstrip())
287 287 return pipefilter(s, cmd)
288 288
289 289 def binary(s):
290 290 """return true if a string is binary data"""
291 291 if s and '\0' in s:
292 292 return True
293 293 return False
294 294
295 295 def unique(g):
296 296 """return the uniq elements of iterable g"""
297 297 return dict.fromkeys(g).keys()
298 298
299 299 def sort(l):
300 300 if not isinstance(l, list):
301 301 l = list(l)
302 302 l.sort()
303 303 return l
304 304
305 305 def increasingchunks(source, min=1024, max=65536):
306 306 '''return no less than min bytes per chunk while data remains,
307 307 doubling min after each chunk until it reaches max'''
308 308 def log2(x):
309 309 if not x:
310 310 return 0
311 311 i = 0
312 312 while x:
313 313 x >>= 1
314 314 i += 1
315 315 return i - 1
316 316
317 317 buf = []
318 318 blen = 0
319 319 for chunk in source:
320 320 buf.append(chunk)
321 321 blen += len(chunk)
322 322 if blen >= min:
323 323 if min < max:
324 324 min = min << 1
325 325 nmin = 1 << log2(blen)
326 326 if nmin > min:
327 327 min = nmin
328 328 if min > max:
329 329 min = max
330 330 yield ''.join(buf)
331 331 blen = 0
332 332 buf = []
333 333 if buf:
334 334 yield ''.join(buf)
335 335
336 class Abort(Exception):
337 """Raised if a command needs to print an error and exit."""
336 Abort = error.Abort
338 337
339 338 def always(fn): return True
340 339 def never(fn): return False
341 340
342 341 def patkind(name, default):
343 342 """Split a string into an optional pattern kind prefix and the
344 343 actual pattern."""
345 344 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
346 345 if name.startswith(prefix + ':'): return name.split(':', 1)
347 346 return default, name
348 347
349 348 def globre(pat, head='^', tail='$'):
350 349 "convert a glob pattern into a regexp"
351 350 i, n = 0, len(pat)
352 351 res = ''
353 352 group = 0
354 353 def peek(): return i < n and pat[i]
355 354 while i < n:
356 355 c = pat[i]
357 356 i = i+1
358 357 if c == '*':
359 358 if peek() == '*':
360 359 i += 1
361 360 res += '.*'
362 361 else:
363 362 res += '[^/]*'
364 363 elif c == '?':
365 364 res += '.'
366 365 elif c == '[':
367 366 j = i
368 367 if j < n and pat[j] in '!]':
369 368 j += 1
370 369 while j < n and pat[j] != ']':
371 370 j += 1
372 371 if j >= n:
373 372 res += '\\['
374 373 else:
375 374 stuff = pat[i:j].replace('\\','\\\\')
376 375 i = j + 1
377 376 if stuff[0] == '!':
378 377 stuff = '^' + stuff[1:]
379 378 elif stuff[0] == '^':
380 379 stuff = '\\' + stuff
381 380 res = '%s[%s]' % (res, stuff)
382 381 elif c == '{':
383 382 group += 1
384 383 res += '(?:'
385 384 elif c == '}' and group:
386 385 res += ')'
387 386 group -= 1
388 387 elif c == ',' and group:
389 388 res += '|'
390 389 elif c == '\\':
391 390 p = peek()
392 391 if p:
393 392 i += 1
394 393 res += re.escape(p)
395 394 else:
396 395 res += re.escape(c)
397 396 else:
398 397 res += re.escape(c)
399 398 return head + res + tail
400 399
401 400 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
402 401
403 402 def pathto(root, n1, n2):
404 403 '''return the relative path from one place to another.
405 404 root should use os.sep to separate directories
406 405 n1 should use os.sep to separate directories
407 406 n2 should use "/" to separate directories
408 407 returns an os.sep-separated path.
409 408
410 409 If n1 is a relative path, it's assumed it's
411 410 relative to root.
412 411 n2 should always be relative to root.
413 412 '''
414 413 if not n1: return localpath(n2)
415 414 if os.path.isabs(n1):
416 415 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
417 416 return os.path.join(root, localpath(n2))
418 417 n2 = '/'.join((pconvert(root), n2))
419 418 a, b = splitpath(n1), n2.split('/')
420 419 a.reverse()
421 420 b.reverse()
422 421 while a and b and a[-1] == b[-1]:
423 422 a.pop()
424 423 b.pop()
425 424 b.reverse()
426 425 return os.sep.join((['..'] * len(a)) + b) or '.'
427 426
428 427 def canonpath(root, cwd, myname):
429 428 """return the canonical path of myname, given cwd and root"""
430 429 if root == os.sep:
431 430 rootsep = os.sep
432 431 elif endswithsep(root):
433 432 rootsep = root
434 433 else:
435 434 rootsep = root + os.sep
436 435 name = myname
437 436 if not os.path.isabs(name):
438 437 name = os.path.join(root, cwd, name)
439 438 name = os.path.normpath(name)
440 439 audit_path = path_auditor(root)
441 440 if name != rootsep and name.startswith(rootsep):
442 441 name = name[len(rootsep):]
443 442 audit_path(name)
444 443 return pconvert(name)
445 444 elif name == root:
446 445 return ''
447 446 else:
448 447 # Determine whether `name' is in the hierarchy at or beneath `root',
449 448 # by iterating name=dirname(name) until that causes no change (can't
450 449 # check name == '/', because that doesn't work on windows). For each
451 450 # `name', compare dev/inode numbers. If they match, the list `rel'
452 451 # holds the reversed list of components making up the relative file
453 452 # name we want.
454 453 root_st = os.stat(root)
455 454 rel = []
456 455 while True:
457 456 try:
458 457 name_st = os.stat(name)
459 458 except OSError:
460 459 break
461 460 if samestat(name_st, root_st):
462 461 if not rel:
463 462 # name was actually the same as root (maybe a symlink)
464 463 return ''
465 464 rel.reverse()
466 465 name = os.path.join(*rel)
467 466 audit_path(name)
468 467 return pconvert(name)
469 468 dirname, basename = os.path.split(name)
470 469 rel.append(basename)
471 470 if dirname == name:
472 471 break
473 472 name = dirname
474 473
475 474 raise Abort('%s not under root' % myname)
476 475
477 476 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
478 477 """build a function to match a set of file patterns
479 478
480 479 arguments:
481 480 canonroot - the canonical root of the tree you're matching against
482 481 cwd - the current working directory, if relevant
483 482 names - patterns to find
484 483 inc - patterns to include
485 484 exc - patterns to exclude
486 485 dflt_pat - if a pattern in names has no explicit type, assume this one
487 486 src - where these patterns came from (e.g. .hgignore)
488 487
489 488 a pattern is one of:
490 489 'glob:<glob>' - a glob relative to cwd
491 490 're:<regexp>' - a regular expression
492 491 'path:<path>' - a path relative to canonroot
493 492 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
494 493 'relpath:<path>' - a path relative to cwd
495 494 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
496 495 '<something>' - one of the cases above, selected by the dflt_pat argument
497 496
498 497 returns:
499 498 a 3-tuple containing
500 499 - list of roots (places where one should start a recursive walk of the fs);
501 500 this often matches the explicit non-pattern names passed in, but also
502 501 includes the initial part of glob: patterns that has no glob characters
503 502 - a bool match(filename) function
504 503 - a bool indicating if any patterns were passed in
505 504 """
506 505
507 506 # a common case: no patterns at all
508 507 if not names and not inc and not exc:
509 508 return [], always, False
510 509
511 510 def contains_glob(name):
512 511 for c in name:
513 512 if c in _globchars: return True
514 513 return False
515 514
516 515 def regex(kind, name, tail):
517 516 '''convert a pattern into a regular expression'''
518 517 if not name:
519 518 return ''
520 519 if kind == 're':
521 520 return name
522 521 elif kind == 'path':
523 522 return '^' + re.escape(name) + '(?:/|$)'
524 523 elif kind == 'relglob':
525 524 return globre(name, '(?:|.*/)', tail)
526 525 elif kind == 'relpath':
527 526 return re.escape(name) + '(?:/|$)'
528 527 elif kind == 'relre':
529 528 if name.startswith('^'):
530 529 return name
531 530 return '.*' + name
532 531 return globre(name, '', tail)
533 532
534 533 def matchfn(pats, tail):
535 534 """build a matching function from a set of patterns"""
536 535 if not pats:
537 536 return
538 537 try:
539 538 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
540 539 if len(pat) > 20000:
541 540 raise OverflowError()
542 541 return re.compile(pat).match
543 542 except OverflowError:
544 543 # We're using a Python with a tiny regex engine and we
545 544 # made it explode, so we'll divide the pattern list in two
546 545 # until it works
547 546 l = len(pats)
548 547 if l < 2:
549 548 raise
550 549 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
551 550 return lambda s: a(s) or b(s)
552 551 except re.error:
553 552 for k, p in pats:
554 553 try:
555 554 re.compile('(?:%s)' % regex(k, p, tail))
556 555 except re.error:
557 556 if src:
558 557 raise Abort("%s: invalid pattern (%s): %s" %
559 558 (src, k, p))
560 559 else:
561 560 raise Abort("invalid pattern (%s): %s" % (k, p))
562 561 raise Abort("invalid pattern")
563 562
564 563 def globprefix(pat):
565 564 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
566 565 root = []
567 566 for p in pat.split('/'):
568 567 if contains_glob(p): break
569 568 root.append(p)
570 569 return '/'.join(root) or '.'
571 570
572 571 def normalizepats(names, default):
573 572 pats = []
574 573 roots = []
575 574 anypats = False
576 575 for kind, name in [patkind(p, default) for p in names]:
577 576 if kind in ('glob', 'relpath'):
578 577 name = canonpath(canonroot, cwd, name)
579 578 elif kind in ('relglob', 'path'):
580 579 name = normpath(name)
581 580
582 581 pats.append((kind, name))
583 582
584 583 if kind in ('glob', 're', 'relglob', 'relre'):
585 584 anypats = True
586 585
587 586 if kind == 'glob':
588 587 root = globprefix(name)
589 588 roots.append(root)
590 589 elif kind in ('relpath', 'path'):
591 590 roots.append(name or '.')
592 591 elif kind == 'relglob':
593 592 roots.append('.')
594 593 return roots, pats, anypats
595 594
596 595 roots, pats, anypats = normalizepats(names, dflt_pat)
597 596
598 597 patmatch = matchfn(pats, '$') or always
599 598 incmatch = always
600 599 if inc:
601 600 dummy, inckinds, dummy = normalizepats(inc, 'glob')
602 601 incmatch = matchfn(inckinds, '(?:/|$)')
603 602 excmatch = never
604 603 if exc:
605 604 dummy, exckinds, dummy = normalizepats(exc, 'glob')
606 605 excmatch = matchfn(exckinds, '(?:/|$)')
607 606
608 607 if not names and inc and not exc:
609 608 # common case: hgignore patterns
610 609 match = incmatch
611 610 else:
612 611 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
613 612
614 613 return (roots, match, (inc or exc or anypats) and True)
615 614
616 615 _hgexecutable = None
617 616
618 617 def main_is_frozen():
619 618 """return True if we are a frozen executable.
620 619
621 620 The code supports py2exe (most common, Windows only) and tools/freeze
622 621 (portable, not much used).
623 622 """
624 623 return (hasattr(sys, "frozen") or # new py2exe
625 624 hasattr(sys, "importers") or # old py2exe
626 625 imp.is_frozen("__main__")) # tools/freeze
627 626
628 627 def hgexecutable():
629 628 """return location of the 'hg' executable.
630 629
631 630 Defaults to $HG or 'hg' in the search path.
632 631 """
633 632 if _hgexecutable is None:
634 633 hg = os.environ.get('HG')
635 634 if hg:
636 635 set_hgexecutable(hg)
637 636 elif main_is_frozen():
638 637 set_hgexecutable(sys.executable)
639 638 else:
640 639 set_hgexecutable(find_exe('hg') or 'hg')
641 640 return _hgexecutable
642 641
643 642 def set_hgexecutable(path):
644 643 """set location of the 'hg' executable"""
645 644 global _hgexecutable
646 645 _hgexecutable = path
647 646
648 647 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
649 648 '''enhanced shell command execution.
650 649 run with environment maybe modified, maybe in different dir.
651 650
652 651 if command fails and onerr is None, return status. if ui object,
653 652 print error message and return status, else raise onerr object as
654 653 exception.'''
655 654 def py2shell(val):
656 655 'convert python object into string that is useful to shell'
657 656 if val in (None, False):
658 657 return '0'
659 658 if val == True:
660 659 return '1'
661 660 return str(val)
662 661 oldenv = {}
663 662 for k in environ:
664 663 oldenv[k] = os.environ.get(k)
665 664 if cwd is not None:
666 665 oldcwd = os.getcwd()
667 666 origcmd = cmd
668 667 if os.name == 'nt':
669 668 cmd = '"%s"' % cmd
670 669 try:
671 670 for k, v in environ.iteritems():
672 671 os.environ[k] = py2shell(v)
673 672 os.environ['HG'] = hgexecutable()
674 673 if cwd is not None and oldcwd != cwd:
675 674 os.chdir(cwd)
676 675 rc = os.system(cmd)
677 676 if sys.platform == 'OpenVMS' and rc & 1:
678 677 rc = 0
679 678 if rc and onerr:
680 679 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
681 680 explain_exit(rc)[0])
682 681 if errprefix:
683 682 errmsg = '%s: %s' % (errprefix, errmsg)
684 683 try:
685 684 onerr.warn(errmsg + '\n')
686 685 except AttributeError:
687 686 raise onerr(errmsg)
688 687 return rc
689 688 finally:
690 689 for k, v in oldenv.iteritems():
691 690 if v is None:
692 691 del os.environ[k]
693 692 else:
694 693 os.environ[k] = v
695 694 if cwd is not None and oldcwd != cwd:
696 695 os.chdir(oldcwd)
697 696
698 697 def checksignature(func):
699 698 '''wrap a function with code to check for calling errors'''
700 699 def check(*args, **kwargs):
701 700 try:
702 701 return func(*args, **kwargs)
703 702 except TypeError:
704 703 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
705 704 raise error.SignatureError
706 705 raise
707 706
708 707 return check
709 708
710 709 # os.path.lexists is not available on python2.3
711 710 def lexists(filename):
712 711 "test whether a file with this name exists. does not follow symlinks"
713 712 try:
714 713 os.lstat(filename)
715 714 except:
716 715 return False
717 716 return True
718 717
719 718 def rename(src, dst):
720 719 """forcibly rename a file"""
721 720 try:
722 721 os.rename(src, dst)
723 722 except OSError, err: # FIXME: check err (EEXIST ?)
724 723 # on windows, rename to existing file is not allowed, so we
725 724 # must delete destination first. but if file is open, unlink
726 725 # schedules it for delete but does not delete it. rename
727 726 # happens immediately even for open files, so we rename
728 727 # destination to a temporary name, then delete that. then
729 728 # rename is safe to do.
730 729 temp = dst + "-force-rename"
731 730 os.rename(dst, temp)
732 731 os.unlink(temp)
733 732 os.rename(src, dst)
734 733
735 734 def unlink(f):
736 735 """unlink and remove the directory if it is empty"""
737 736 os.unlink(f)
738 737 # try removing directories that might now be empty
739 738 try:
740 739 os.removedirs(os.path.dirname(f))
741 740 except OSError:
742 741 pass
743 742
744 743 def copyfile(src, dest):
745 744 "copy a file, preserving mode and atime/mtime"
746 745 if os.path.islink(src):
747 746 try:
748 747 os.unlink(dest)
749 748 except:
750 749 pass
751 750 os.symlink(os.readlink(src), dest)
752 751 else:
753 752 try:
754 753 shutil.copyfile(src, dest)
755 754 shutil.copystat(src, dest)
756 755 except shutil.Error, inst:
757 756 raise Abort(str(inst))
758 757
759 758 def copyfiles(src, dst, hardlink=None):
760 759 """Copy a directory tree using hardlinks if possible"""
761 760
762 761 if hardlink is None:
763 762 hardlink = (os.stat(src).st_dev ==
764 763 os.stat(os.path.dirname(dst)).st_dev)
765 764
766 765 if os.path.isdir(src):
767 766 os.mkdir(dst)
768 767 for name, kind in osutil.listdir(src):
769 768 srcname = os.path.join(src, name)
770 769 dstname = os.path.join(dst, name)
771 770 copyfiles(srcname, dstname, hardlink)
772 771 else:
773 772 if hardlink:
774 773 try:
775 774 os_link(src, dst)
776 775 except (IOError, OSError):
777 776 hardlink = False
778 777 shutil.copy(src, dst)
779 778 else:
780 779 shutil.copy(src, dst)
781 780
782 781 class path_auditor(object):
783 782 '''ensure that a filesystem path contains no banned components.
784 783 the following properties of a path are checked:
785 784
786 785 - under top-level .hg
787 786 - starts at the root of a windows drive
788 787 - contains ".."
789 788 - traverses a symlink (e.g. a/symlink_here/b)
790 789 - inside a nested repository'''
791 790
792 791 def __init__(self, root):
793 792 self.audited = set()
794 793 self.auditeddir = set()
795 794 self.root = root
796 795
797 796 def __call__(self, path):
798 797 if path in self.audited:
799 798 return
800 799 normpath = os.path.normcase(path)
801 800 parts = splitpath(normpath)
802 801 if (os.path.splitdrive(path)[0]
803 802 or parts[0].lower() in ('.hg', '.hg.', '')
804 803 or os.pardir in parts):
805 804 raise Abort(_("path contains illegal component: %s") % path)
806 805 if '.hg' in path.lower():
807 806 lparts = [p.lower() for p in parts]
808 807 for p in '.hg', '.hg.':
809 808 if p in lparts[1:]:
810 809 pos = lparts.index(p)
811 810 base = os.path.join(*parts[:pos])
812 811 raise Abort(_('path %r is inside repo %r') % (path, base))
813 812 def check(prefix):
814 813 curpath = os.path.join(self.root, prefix)
815 814 try:
816 815 st = os.lstat(curpath)
817 816 except OSError, err:
818 817 # EINVAL can be raised as invalid path syntax under win32.
819 818 # They must be ignored for patterns can be checked too.
820 819 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
821 820 raise
822 821 else:
823 822 if stat.S_ISLNK(st.st_mode):
824 823 raise Abort(_('path %r traverses symbolic link %r') %
825 824 (path, prefix))
826 825 elif (stat.S_ISDIR(st.st_mode) and
827 826 os.path.isdir(os.path.join(curpath, '.hg'))):
828 827 raise Abort(_('path %r is inside repo %r') %
829 828 (path, prefix))
830 829 parts.pop()
831 830 prefixes = []
832 831 for n in range(len(parts)):
833 832 prefix = os.sep.join(parts)
834 833 if prefix in self.auditeddir:
835 834 break
836 835 check(prefix)
837 836 prefixes.append(prefix)
838 837 parts.pop()
839 838
840 839 self.audited.add(path)
841 840 # only add prefixes to the cache after checking everything: we don't
842 841 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
843 842 self.auditeddir.update(prefixes)
844 843
845 844 if os.name == 'nt':
846 845 from windows import *
847 846 def expand_glob(pats):
848 847 '''On Windows, expand the implicit globs in a list of patterns'''
849 848 ret = []
850 849 for p in pats:
851 850 kind, name = patkind(p, None)
852 851 if kind is None:
853 852 globbed = glob.glob(name)
854 853 if globbed:
855 854 ret.extend(globbed)
856 855 continue
857 856 # if we couldn't expand the glob, just keep it around
858 857 ret.append(p)
859 858 return ret
860 859 else:
861 860 from posix import *
862 861
863 862 def makelock(info, pathname):
864 863 try:
865 864 return os.symlink(info, pathname)
866 865 except OSError, why:
867 866 if why.errno == errno.EEXIST:
868 867 raise
869 868 except AttributeError: # no symlink in os
870 869 pass
871 870
872 871 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
873 872 os.write(ld, info)
874 873 os.close(ld)
875 874
876 875 def readlock(pathname):
877 876 try:
878 877 return os.readlink(pathname)
879 878 except OSError, why:
880 879 if why.errno not in (errno.EINVAL, errno.ENOSYS):
881 880 raise
882 881 except AttributeError: # no symlink in os
883 882 pass
884 883 return posixfile(pathname).read()
885 884
886 885 def nlinks(pathname):
887 886 """Return number of hardlinks for the given file."""
888 887 return os.lstat(pathname).st_nlink
889 888
890 889 if hasattr(os, 'link'):
891 890 os_link = os.link
892 891 else:
893 892 def os_link(src, dst):
894 893 raise OSError(0, _("Hardlinks not supported"))
895 894
896 895 def fstat(fp):
897 896 '''stat file object that may not have fileno method.'''
898 897 try:
899 898 return os.fstat(fp.fileno())
900 899 except AttributeError:
901 900 return os.stat(fp.name)
902 901
903 902 # File system features
904 903
905 904 def checkcase(path):
906 905 """
907 906 Check whether the given path is on a case-sensitive filesystem
908 907
909 908 Requires a path (like /foo/.hg) ending with a foldable final
910 909 directory component.
911 910 """
912 911 s1 = os.stat(path)
913 912 d, b = os.path.split(path)
914 913 p2 = os.path.join(d, b.upper())
915 914 if path == p2:
916 915 p2 = os.path.join(d, b.lower())
917 916 try:
918 917 s2 = os.stat(p2)
919 918 if s2 == s1:
920 919 return False
921 920 return True
922 921 except:
923 922 return True
924 923
925 924 _fspathcache = {}
926 925 def fspath(name, root):
927 926 '''Get name in the case stored in the filesystem
928 927
929 928 The name is either relative to root, or it is an absolute path starting
930 929 with root. Note that this function is unnecessary, and should not be
931 930 called, for case-sensitive filesystems (simply because it's expensive).
932 931 '''
933 932 # If name is absolute, make it relative
934 933 if name.lower().startswith(root.lower()):
935 934 l = len(root)
936 935 if name[l] == os.sep or name[l] == os.altsep:
937 936 l = l + 1
938 937 name = name[l:]
939 938
940 939 if not os.path.exists(os.path.join(root, name)):
941 940 return None
942 941
943 942 seps = os.sep
944 943 if os.altsep:
945 944 seps = seps + os.altsep
946 945 # Protect backslashes. This gets silly very quickly.
947 946 seps.replace('\\','\\\\')
948 947 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
949 948 dir = os.path.normcase(os.path.normpath(root))
950 949 result = []
951 950 for part, sep in pattern.findall(name):
952 951 if sep:
953 952 result.append(sep)
954 953 continue
955 954
956 955 if dir not in _fspathcache:
957 956 _fspathcache[dir] = os.listdir(dir)
958 957 contents = _fspathcache[dir]
959 958
960 959 lpart = part.lower()
961 960 for n in contents:
962 961 if n.lower() == lpart:
963 962 result.append(n)
964 963 break
965 964 else:
966 965 # Cannot happen, as the file exists!
967 966 result.append(part)
968 967 dir = os.path.join(dir, lpart)
969 968
970 969 return ''.join(result)
971 970
972 971 def checkexec(path):
973 972 """
974 973 Check whether the given path is on a filesystem with UNIX-like exec flags
975 974
976 975 Requires a directory (like /foo/.hg)
977 976 """
978 977
979 978 # VFAT on some Linux versions can flip mode but it doesn't persist
980 979 # a FS remount. Frequently we can detect it if files are created
981 980 # with exec bit on.
982 981
983 982 try:
984 983 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
985 984 fh, fn = tempfile.mkstemp("", "", path)
986 985 try:
987 986 os.close(fh)
988 987 m = os.stat(fn).st_mode & 0777
989 988 new_file_has_exec = m & EXECFLAGS
990 989 os.chmod(fn, m ^ EXECFLAGS)
991 990 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
992 991 finally:
993 992 os.unlink(fn)
994 993 except (IOError, OSError):
995 994 # we don't care, the user probably won't be able to commit anyway
996 995 return False
997 996 return not (new_file_has_exec or exec_flags_cannot_flip)
998 997
999 998 def checklink(path):
1000 999 """check whether the given path is on a symlink-capable filesystem"""
1001 1000 # mktemp is not racy because symlink creation will fail if the
1002 1001 # file already exists
1003 1002 name = tempfile.mktemp(dir=path)
1004 1003 try:
1005 1004 os.symlink(".", name)
1006 1005 os.unlink(name)
1007 1006 return True
1008 1007 except (OSError, AttributeError):
1009 1008 return False
1010 1009
1011 1010 def needbinarypatch():
1012 1011 """return True if patches should be applied in binary mode by default."""
1013 1012 return os.name == 'nt'
1014 1013
1015 1014 def endswithsep(path):
1016 1015 '''Check path ends with os.sep or os.altsep.'''
1017 1016 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
1018 1017
1019 1018 def splitpath(path):
1020 1019 '''Split path by os.sep.
1021 1020 Note that this function does not use os.altsep because this is
1022 1021 an alternative of simple "xxx.split(os.sep)".
1023 1022 It is recommended to use os.path.normpath() before using this
1024 1023 function if need.'''
1025 1024 return path.split(os.sep)
1026 1025
1027 1026 def gui():
1028 1027 '''Are we running in a GUI?'''
1029 1028 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
1030 1029
1031 1030 def lookup_reg(key, name=None, scope=None):
1032 1031 return None
1033 1032
1034 1033 def mktempcopy(name, emptyok=False, createmode=None):
1035 1034 """Create a temporary file with the same contents from name
1036 1035
1037 1036 The permission bits are copied from the original file.
1038 1037
1039 1038 If the temporary file is going to be truncated immediately, you
1040 1039 can use emptyok=True as an optimization.
1041 1040
1042 1041 Returns the name of the temporary file.
1043 1042 """
1044 1043 d, fn = os.path.split(name)
1045 1044 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
1046 1045 os.close(fd)
1047 1046 # Temporary files are created with mode 0600, which is usually not
1048 1047 # what we want. If the original file already exists, just copy
1049 1048 # its mode. Otherwise, manually obey umask.
1050 1049 try:
1051 1050 st_mode = os.lstat(name).st_mode & 0777
1052 1051 except OSError, inst:
1053 1052 if inst.errno != errno.ENOENT:
1054 1053 raise
1055 1054 st_mode = createmode
1056 1055 if st_mode is None:
1057 1056 st_mode = ~umask
1058 1057 st_mode &= 0666
1059 1058 os.chmod(temp, st_mode)
1060 1059 if emptyok:
1061 1060 return temp
1062 1061 try:
1063 1062 try:
1064 1063 ifp = posixfile(name, "rb")
1065 1064 except IOError, inst:
1066 1065 if inst.errno == errno.ENOENT:
1067 1066 return temp
1068 1067 if not getattr(inst, 'filename', None):
1069 1068 inst.filename = name
1070 1069 raise
1071 1070 ofp = posixfile(temp, "wb")
1072 1071 for chunk in filechunkiter(ifp):
1073 1072 ofp.write(chunk)
1074 1073 ifp.close()
1075 1074 ofp.close()
1076 1075 except:
1077 1076 try: os.unlink(temp)
1078 1077 except: pass
1079 1078 raise
1080 1079 return temp
1081 1080
1082 1081 class atomictempfile(posixfile):
1083 1082 """file-like object that atomically updates a file
1084 1083
1085 1084 All writes will be redirected to a temporary copy of the original
1086 1085 file. When rename is called, the copy is renamed to the original
1087 1086 name, making the changes visible.
1088 1087 """
1089 1088 def __init__(self, name, mode, createmode):
1090 1089 self.__name = name
1091 1090 self.temp = mktempcopy(name, emptyok=('w' in mode),
1092 1091 createmode=createmode)
1093 1092 posixfile.__init__(self, self.temp, mode)
1094 1093
1095 1094 def rename(self):
1096 1095 if not self.closed:
1097 1096 posixfile.close(self)
1098 1097 rename(self.temp, localpath(self.__name))
1099 1098
1100 1099 def __del__(self):
1101 1100 if not self.closed:
1102 1101 try:
1103 1102 os.unlink(self.temp)
1104 1103 except: pass
1105 1104 posixfile.close(self)
1106 1105
1107 1106 def makedirs(name, mode=None):
1108 1107 """recursive directory creation with parent mode inheritance"""
1109 1108 try:
1110 1109 os.mkdir(name)
1111 1110 if mode is not None:
1112 1111 os.chmod(name, mode)
1113 1112 return
1114 1113 except OSError, err:
1115 1114 if err.errno == errno.EEXIST:
1116 1115 return
1117 1116 if err.errno != errno.ENOENT:
1118 1117 raise
1119 1118 parent = os.path.abspath(os.path.dirname(name))
1120 1119 makedirs(parent, mode)
1121 1120 makedirs(name, mode)
1122 1121
1123 1122 class opener(object):
1124 1123 """Open files relative to a base directory
1125 1124
1126 1125 This class is used to hide the details of COW semantics and
1127 1126 remote file access from higher level code.
1128 1127 """
1129 1128 def __init__(self, base, audit=True):
1130 1129 self.base = base
1131 1130 if audit:
1132 1131 self.audit_path = path_auditor(base)
1133 1132 else:
1134 1133 self.audit_path = always
1135 1134 self.createmode = None
1136 1135
1137 1136 def __getattr__(self, name):
1138 1137 if name == '_can_symlink':
1139 1138 self._can_symlink = checklink(self.base)
1140 1139 return self._can_symlink
1141 1140 raise AttributeError(name)
1142 1141
1143 1142 def _fixfilemode(self, name):
1144 1143 if self.createmode is None:
1145 1144 return
1146 1145 os.chmod(name, self.createmode & 0666)
1147 1146
1148 1147 def __call__(self, path, mode="r", text=False, atomictemp=False):
1149 1148 self.audit_path(path)
1150 1149 f = os.path.join(self.base, path)
1151 1150
1152 1151 if not text and "b" not in mode:
1153 1152 mode += "b" # for that other OS
1154 1153
1155 1154 nlink = -1
1156 1155 if mode not in ("r", "rb"):
1157 1156 try:
1158 1157 nlink = nlinks(f)
1159 1158 except OSError:
1160 1159 nlink = 0
1161 1160 d = os.path.dirname(f)
1162 1161 if not os.path.isdir(d):
1163 1162 makedirs(d, self.createmode)
1164 1163 if atomictemp:
1165 1164 return atomictempfile(f, mode, self.createmode)
1166 1165 if nlink > 1:
1167 1166 rename(mktempcopy(f), f)
1168 1167 fp = posixfile(f, mode)
1169 1168 if nlink == 0:
1170 1169 self._fixfilemode(f)
1171 1170 return fp
1172 1171
1173 1172 def symlink(self, src, dst):
1174 1173 self.audit_path(dst)
1175 1174 linkname = os.path.join(self.base, dst)
1176 1175 try:
1177 1176 os.unlink(linkname)
1178 1177 except OSError:
1179 1178 pass
1180 1179
1181 1180 dirname = os.path.dirname(linkname)
1182 1181 if not os.path.exists(dirname):
1183 1182 makedirs(dirname, self.createmode)
1184 1183
1185 1184 if self._can_symlink:
1186 1185 try:
1187 1186 os.symlink(src, linkname)
1188 1187 except OSError, err:
1189 1188 raise OSError(err.errno, _('could not symlink to %r: %s') %
1190 1189 (src, err.strerror), linkname)
1191 1190 else:
1192 1191 f = self(dst, "w")
1193 1192 f.write(src)
1194 1193 f.close()
1195 1194 self._fixfilemode(dst)
1196 1195
1197 1196 class chunkbuffer(object):
1198 1197 """Allow arbitrary sized chunks of data to be efficiently read from an
1199 1198 iterator over chunks of arbitrary size."""
1200 1199
1201 1200 def __init__(self, in_iter):
1202 1201 """in_iter is the iterator that's iterating over the input chunks.
1203 1202 targetsize is how big a buffer to try to maintain."""
1204 1203 self.iter = iter(in_iter)
1205 1204 self.buf = ''
1206 1205 self.targetsize = 2**16
1207 1206
1208 1207 def read(self, l):
1209 1208 """Read L bytes of data from the iterator of chunks of data.
1210 1209 Returns less than L bytes if the iterator runs dry."""
1211 1210 if l > len(self.buf) and self.iter:
1212 1211 # Clamp to a multiple of self.targetsize
1213 1212 targetsize = max(l, self.targetsize)
1214 1213 collector = cStringIO.StringIO()
1215 1214 collector.write(self.buf)
1216 1215 collected = len(self.buf)
1217 1216 for chunk in self.iter:
1218 1217 collector.write(chunk)
1219 1218 collected += len(chunk)
1220 1219 if collected >= targetsize:
1221 1220 break
1222 1221 if collected < targetsize:
1223 1222 self.iter = False
1224 1223 self.buf = collector.getvalue()
1225 1224 if len(self.buf) == l:
1226 1225 s, self.buf = str(self.buf), ''
1227 1226 else:
1228 1227 s, self.buf = self.buf[:l], buffer(self.buf, l)
1229 1228 return s
1230 1229
1231 1230 def filechunkiter(f, size=65536, limit=None):
1232 1231 """Create a generator that produces the data in the file size
1233 1232 (default 65536) bytes at a time, up to optional limit (default is
1234 1233 to read all data). Chunks may be less than size bytes if the
1235 1234 chunk is the last chunk in the file, or the file is a socket or
1236 1235 some other type of file that sometimes reads less data than is
1237 1236 requested."""
1238 1237 assert size >= 0
1239 1238 assert limit is None or limit >= 0
1240 1239 while True:
1241 1240 if limit is None: nbytes = size
1242 1241 else: nbytes = min(limit, size)
1243 1242 s = nbytes and f.read(nbytes)
1244 1243 if not s: break
1245 1244 if limit: limit -= len(s)
1246 1245 yield s
1247 1246
1248 1247 def makedate():
1249 1248 lt = time.localtime()
1250 1249 if lt[8] == 1 and time.daylight:
1251 1250 tz = time.altzone
1252 1251 else:
1253 1252 tz = time.timezone
1254 1253 return time.mktime(lt), tz
1255 1254
1256 1255 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1257 1256 """represent a (unixtime, offset) tuple as a localized time.
1258 1257 unixtime is seconds since the epoch, and offset is the time zone's
1259 1258 number of seconds away from UTC. if timezone is false, do not
1260 1259 append time zone to string."""
1261 1260 t, tz = date or makedate()
1262 1261 if "%1" in format or "%2" in format:
1263 1262 sign = (tz > 0) and "-" or "+"
1264 1263 minutes = abs(tz) / 60
1265 1264 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1266 1265 format = format.replace("%2", "%02d" % (minutes % 60))
1267 1266 s = time.strftime(format, time.gmtime(float(t) - tz))
1268 1267 return s
1269 1268
1270 1269 def shortdate(date=None):
1271 1270 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1272 1271 return datestr(date, format='%Y-%m-%d')
1273 1272
1274 1273 def strdate(string, format, defaults=[]):
1275 1274 """parse a localized time string and return a (unixtime, offset) tuple.
1276 1275 if the string cannot be parsed, ValueError is raised."""
1277 1276 def timezone(string):
1278 1277 tz = string.split()[-1]
1279 1278 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1280 1279 sign = (tz[0] == "+") and 1 or -1
1281 1280 hours = int(tz[1:3])
1282 1281 minutes = int(tz[3:5])
1283 1282 return -sign * (hours * 60 + minutes) * 60
1284 1283 if tz == "GMT" or tz == "UTC":
1285 1284 return 0
1286 1285 return None
1287 1286
1288 1287 # NOTE: unixtime = localunixtime + offset
1289 1288 offset, date = timezone(string), string
1290 1289 if offset != None:
1291 1290 date = " ".join(string.split()[:-1])
1292 1291
1293 1292 # add missing elements from defaults
1294 1293 for part in defaults:
1295 1294 found = [True for p in part if ("%"+p) in format]
1296 1295 if not found:
1297 1296 date += "@" + defaults[part]
1298 1297 format += "@%" + part[0]
1299 1298
1300 1299 timetuple = time.strptime(date, format)
1301 1300 localunixtime = int(calendar.timegm(timetuple))
1302 1301 if offset is None:
1303 1302 # local timezone
1304 1303 unixtime = int(time.mktime(timetuple))
1305 1304 offset = unixtime - localunixtime
1306 1305 else:
1307 1306 unixtime = localunixtime + offset
1308 1307 return unixtime, offset
1309 1308
1310 1309 def parsedate(date, formats=None, defaults=None):
1311 1310 """parse a localized date/time string and return a (unixtime, offset) tuple.
1312 1311
1313 1312 The date may be a "unixtime offset" string or in one of the specified
1314 1313 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1315 1314 """
1316 1315 if not date:
1317 1316 return 0, 0
1318 1317 if isinstance(date, tuple) and len(date) == 2:
1319 1318 return date
1320 1319 if not formats:
1321 1320 formats = defaultdateformats
1322 1321 date = date.strip()
1323 1322 try:
1324 1323 when, offset = map(int, date.split(' '))
1325 1324 except ValueError:
1326 1325 # fill out defaults
1327 1326 if not defaults:
1328 1327 defaults = {}
1329 1328 now = makedate()
1330 1329 for part in "d mb yY HI M S".split():
1331 1330 if part not in defaults:
1332 1331 if part[0] in "HMS":
1333 1332 defaults[part] = "00"
1334 1333 else:
1335 1334 defaults[part] = datestr(now, "%" + part[0])
1336 1335
1337 1336 for format in formats:
1338 1337 try:
1339 1338 when, offset = strdate(date, format, defaults)
1340 1339 except (ValueError, OverflowError):
1341 1340 pass
1342 1341 else:
1343 1342 break
1344 1343 else:
1345 1344 raise Abort(_('invalid date: %r ') % date)
1346 1345 # validate explicit (probably user-specified) date and
1347 1346 # time zone offset. values must fit in signed 32 bits for
1348 1347 # current 32-bit linux runtimes. timezones go from UTC-12
1349 1348 # to UTC+14
1350 1349 if abs(when) > 0x7fffffff:
1351 1350 raise Abort(_('date exceeds 32 bits: %d') % when)
1352 1351 if offset < -50400 or offset > 43200:
1353 1352 raise Abort(_('impossible time zone offset: %d') % offset)
1354 1353 return when, offset
1355 1354
1356 1355 def matchdate(date):
1357 1356 """Return a function that matches a given date match specifier
1358 1357
1359 1358 Formats include:
1360 1359
1361 1360 '{date}' match a given date to the accuracy provided
1362 1361
1363 1362 '<{date}' on or before a given date
1364 1363
1365 1364 '>{date}' on or after a given date
1366 1365
1367 1366 """
1368 1367
1369 1368 def lower(date):
1370 1369 d = dict(mb="1", d="1")
1371 1370 return parsedate(date, extendeddateformats, d)[0]
1372 1371
1373 1372 def upper(date):
1374 1373 d = dict(mb="12", HI="23", M="59", S="59")
1375 1374 for days in "31 30 29".split():
1376 1375 try:
1377 1376 d["d"] = days
1378 1377 return parsedate(date, extendeddateformats, d)[0]
1379 1378 except:
1380 1379 pass
1381 1380 d["d"] = "28"
1382 1381 return parsedate(date, extendeddateformats, d)[0]
1383 1382
1384 1383 if date[0] == "<":
1385 1384 when = upper(date[1:])
1386 1385 return lambda x: x <= when
1387 1386 elif date[0] == ">":
1388 1387 when = lower(date[1:])
1389 1388 return lambda x: x >= when
1390 1389 elif date[0] == "-":
1391 1390 try:
1392 1391 days = int(date[1:])
1393 1392 except ValueError:
1394 1393 raise Abort(_("invalid day spec: %s") % date[1:])
1395 1394 when = makedate()[0] - days * 3600 * 24
1396 1395 return lambda x: x >= when
1397 1396 elif " to " in date:
1398 1397 a, b = date.split(" to ")
1399 1398 start, stop = lower(a), upper(b)
1400 1399 return lambda x: x >= start and x <= stop
1401 1400 else:
1402 1401 start, stop = lower(date), upper(date)
1403 1402 return lambda x: x >= start and x <= stop
1404 1403
1405 1404 def shortuser(user):
1406 1405 """Return a short representation of a user name or email address."""
1407 1406 f = user.find('@')
1408 1407 if f >= 0:
1409 1408 user = user[:f]
1410 1409 f = user.find('<')
1411 1410 if f >= 0:
1412 1411 user = user[f+1:]
1413 1412 f = user.find(' ')
1414 1413 if f >= 0:
1415 1414 user = user[:f]
1416 1415 f = user.find('.')
1417 1416 if f >= 0:
1418 1417 user = user[:f]
1419 1418 return user
1420 1419
1421 1420 def email(author):
1422 1421 '''get email of author.'''
1423 1422 r = author.find('>')
1424 1423 if r == -1: r = None
1425 1424 return author[author.find('<')+1:r]
1426 1425
1427 1426 def ellipsis(text, maxlength=400):
1428 1427 """Trim string to at most maxlength (default: 400) characters."""
1429 1428 if len(text) <= maxlength:
1430 1429 return text
1431 1430 else:
1432 1431 return "%s..." % (text[:maxlength-3])
1433 1432
1434 1433 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1435 1434 '''yield every hg repository under path, recursively.'''
1436 1435 def errhandler(err):
1437 1436 if err.filename == path:
1438 1437 raise err
1439 1438 if followsym and hasattr(os.path, 'samestat'):
1440 1439 def _add_dir_if_not_there(dirlst, dirname):
1441 1440 match = False
1442 1441 samestat = os.path.samestat
1443 1442 dirstat = os.stat(dirname)
1444 1443 for lstdirstat in dirlst:
1445 1444 if samestat(dirstat, lstdirstat):
1446 1445 match = True
1447 1446 break
1448 1447 if not match:
1449 1448 dirlst.append(dirstat)
1450 1449 return not match
1451 1450 else:
1452 1451 followsym = False
1453 1452
1454 1453 if (seen_dirs is None) and followsym:
1455 1454 seen_dirs = []
1456 1455 _add_dir_if_not_there(seen_dirs, path)
1457 1456 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1458 1457 if '.hg' in dirs:
1459 1458 yield root # found a repository
1460 1459 qroot = os.path.join(root, '.hg', 'patches')
1461 1460 if os.path.isdir(os.path.join(qroot, '.hg')):
1462 1461 yield qroot # we have a patch queue repo here
1463 1462 if recurse:
1464 1463 # avoid recursing inside the .hg directory
1465 1464 dirs.remove('.hg')
1466 1465 else:
1467 1466 dirs[:] = [] # don't descend further
1468 1467 elif followsym:
1469 1468 newdirs = []
1470 1469 for d in dirs:
1471 1470 fname = os.path.join(root, d)
1472 1471 if _add_dir_if_not_there(seen_dirs, fname):
1473 1472 if os.path.islink(fname):
1474 1473 for hgname in walkrepos(fname, True, seen_dirs):
1475 1474 yield hgname
1476 1475 else:
1477 1476 newdirs.append(d)
1478 1477 dirs[:] = newdirs
1479 1478
1480 1479 _rcpath = None
1481 1480
1482 1481 def os_rcpath():
1483 1482 '''return default os-specific hgrc search path'''
1484 1483 path = system_rcpath()
1485 1484 path.extend(user_rcpath())
1486 1485 path = [os.path.normpath(f) for f in path]
1487 1486 return path
1488 1487
1489 1488 def rcpath():
1490 1489 '''return hgrc search path. if env var HGRCPATH is set, use it.
1491 1490 for each item in path, if directory, use files ending in .rc,
1492 1491 else use item.
1493 1492 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1494 1493 if no HGRCPATH, use default os-specific path.'''
1495 1494 global _rcpath
1496 1495 if _rcpath is None:
1497 1496 if 'HGRCPATH' in os.environ:
1498 1497 _rcpath = []
1499 1498 for p in os.environ['HGRCPATH'].split(os.pathsep):
1500 1499 if not p: continue
1501 1500 if os.path.isdir(p):
1502 1501 for f, kind in osutil.listdir(p):
1503 1502 if f.endswith('.rc'):
1504 1503 _rcpath.append(os.path.join(p, f))
1505 1504 else:
1506 1505 _rcpath.append(p)
1507 1506 else:
1508 1507 _rcpath = os_rcpath()
1509 1508 return _rcpath
1510 1509
1511 1510 def bytecount(nbytes):
1512 1511 '''return byte count formatted as readable string, with units'''
1513 1512
1514 1513 units = (
1515 1514 (100, 1<<30, _('%.0f GB')),
1516 1515 (10, 1<<30, _('%.1f GB')),
1517 1516 (1, 1<<30, _('%.2f GB')),
1518 1517 (100, 1<<20, _('%.0f MB')),
1519 1518 (10, 1<<20, _('%.1f MB')),
1520 1519 (1, 1<<20, _('%.2f MB')),
1521 1520 (100, 1<<10, _('%.0f KB')),
1522 1521 (10, 1<<10, _('%.1f KB')),
1523 1522 (1, 1<<10, _('%.2f KB')),
1524 1523 (1, 1, _('%.0f bytes')),
1525 1524 )
1526 1525
1527 1526 for multiplier, divisor, format in units:
1528 1527 if nbytes >= divisor * multiplier:
1529 1528 return format % (nbytes / float(divisor))
1530 1529 return units[-1][2] % nbytes
1531 1530
1532 1531 def drop_scheme(scheme, path):
1533 1532 sc = scheme + ':'
1534 1533 if path.startswith(sc):
1535 1534 path = path[len(sc):]
1536 1535 if path.startswith('//'):
1537 1536 path = path[2:]
1538 1537 return path
1539 1538
1540 1539 def uirepr(s):
1541 1540 # Avoid double backslash in Windows path repr()
1542 1541 return repr(s).replace('\\\\', '\\')
1543 1542
1544 1543 def termwidth():
1545 1544 if 'COLUMNS' in os.environ:
1546 1545 try:
1547 1546 return int(os.environ['COLUMNS'])
1548 1547 except ValueError:
1549 1548 pass
1550 1549 try:
1551 1550 import termios, array, fcntl
1552 1551 for dev in (sys.stdout, sys.stdin):
1553 1552 try:
1554 1553 fd = dev.fileno()
1555 1554 if not os.isatty(fd):
1556 1555 continue
1557 1556 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1558 1557 return array.array('h', arri)[1]
1559 1558 except ValueError:
1560 1559 pass
1561 1560 except ImportError:
1562 1561 pass
1563 1562 return 80
1564 1563
1565 1564 def iterlines(iterator):
1566 1565 for chunk in iterator:
1567 1566 for line in chunk.splitlines():
1568 1567 yield line
General Comments 0
You need to be logged in to leave comments. Login now