##// END OF EJS Templates
Correct a bug on date formats with '>' or '<' accompanied by space characters.
Justin Peng -
r7953:8c6f823e default
parent child Browse files
Show More
@@ -1,1502 +1,1503 b''
1 1 """
2 2 util.py - Mercurial utility functions and platform specfic implementations
3 3
4 4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7 7
8 8 This software may be used and distributed according to the terms
9 9 of the GNU General Public License, incorporated herein by reference.
10 10
11 11 This contains helper routines that are independent of the SCM core and hide
12 12 platform-specific details from the core.
13 13 """
14 14
15 15 from i18n import _
16 16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 17 import os, stat, threading, time, calendar, ConfigParser, glob, osutil
18 18 import imp
19 19
20 20 # Python compatibility
21 21
22 22 try:
23 23 set = set
24 24 frozenset = frozenset
25 25 except NameError:
26 26 from sets import Set as set, ImmutableSet as frozenset
27 27
28 28 _md5 = None
29 29 def md5(s):
30 30 global _md5
31 31 if _md5 is None:
32 32 try:
33 33 import hashlib
34 34 _md5 = hashlib.md5
35 35 except ImportError:
36 36 import md5
37 37 _md5 = md5.md5
38 38 return _md5(s)
39 39
40 40 _sha1 = None
41 41 def sha1(s):
42 42 global _sha1
43 43 if _sha1 is None:
44 44 try:
45 45 import hashlib
46 46 _sha1 = hashlib.sha1
47 47 except ImportError:
48 48 import sha
49 49 _sha1 = sha.sha
50 50 return _sha1(s)
51 51
52 52 try:
53 53 import subprocess
54 54 subprocess.Popen # trigger ImportError early
55 55 closefds = os.name == 'posix'
56 56 def popen2(cmd, mode='t', bufsize=-1):
57 57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 58 close_fds=closefds,
59 59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 60 return p.stdin, p.stdout
61 61 def popen3(cmd, mode='t', bufsize=-1):
62 62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 63 close_fds=closefds,
64 64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 65 stderr=subprocess.PIPE)
66 66 return p.stdin, p.stdout, p.stderr
67 67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 68 stderr = capturestderr and subprocess.PIPE or None
69 69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 70 close_fds=closefds,
71 71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 72 stderr=stderr)
73 73 p.fromchild = p.stdout
74 74 p.tochild = p.stdin
75 75 p.childerr = p.stderr
76 76 return p
77 77 except ImportError:
78 78 subprocess = None
79 79 from popen2 import Popen3
80 80 popen2 = os.popen2
81 81 popen3 = os.popen3
82 82
83 83
84 84 def version():
85 85 """Return version information if available."""
86 86 try:
87 87 import __version__
88 88 return __version__.version
89 89 except ImportError:
90 90 return 'unknown'
91 91
92 92 # used by parsedate
93 93 defaultdateformats = (
94 94 '%Y-%m-%d %H:%M:%S',
95 95 '%Y-%m-%d %I:%M:%S%p',
96 96 '%Y-%m-%d %H:%M',
97 97 '%Y-%m-%d %I:%M%p',
98 98 '%Y-%m-%d',
99 99 '%m-%d',
100 100 '%m/%d',
101 101 '%m/%d/%y',
102 102 '%m/%d/%Y',
103 103 '%a %b %d %H:%M:%S %Y',
104 104 '%a %b %d %I:%M:%S%p %Y',
105 105 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
106 106 '%b %d %H:%M:%S %Y',
107 107 '%b %d %I:%M:%S%p %Y',
108 108 '%b %d %H:%M:%S',
109 109 '%b %d %I:%M:%S%p',
110 110 '%b %d %H:%M',
111 111 '%b %d %I:%M%p',
112 112 '%b %d %Y',
113 113 '%b %d',
114 114 '%H:%M:%S',
115 115 '%I:%M:%SP',
116 116 '%H:%M',
117 117 '%I:%M%p',
118 118 )
119 119
120 120 extendeddateformats = defaultdateformats + (
121 121 "%Y",
122 122 "%Y-%m",
123 123 "%b",
124 124 "%b %Y",
125 125 )
126 126
127 127 # differences from SafeConfigParser:
128 128 # - case-sensitive keys
129 129 # - allows values that are not strings (this means that you may not
130 130 # be able to save the configuration to a file)
131 131 class configparser(ConfigParser.SafeConfigParser):
132 132 def optionxform(self, optionstr):
133 133 return optionstr
134 134
135 135 def set(self, section, option, value):
136 136 return ConfigParser.ConfigParser.set(self, section, option, value)
137 137
138 138 def _interpolate(self, section, option, rawval, vars):
139 139 if not isinstance(rawval, basestring):
140 140 return rawval
141 141 return ConfigParser.SafeConfigParser._interpolate(self, section,
142 142 option, rawval, vars)
143 143
144 144 def cachefunc(func):
145 145 '''cache the result of function calls'''
146 146 # XXX doesn't handle keywords args
147 147 cache = {}
148 148 if func.func_code.co_argcount == 1:
149 149 # we gain a small amount of time because
150 150 # we don't need to pack/unpack the list
151 151 def f(arg):
152 152 if arg not in cache:
153 153 cache[arg] = func(arg)
154 154 return cache[arg]
155 155 else:
156 156 def f(*args):
157 157 if args not in cache:
158 158 cache[args] = func(*args)
159 159 return cache[args]
160 160
161 161 return f
162 162
163 163 def pipefilter(s, cmd):
164 164 '''filter string S through command CMD, returning its output'''
165 165 (pin, pout) = popen2(cmd, 'b')
166 166 def writer():
167 167 try:
168 168 pin.write(s)
169 169 pin.close()
170 170 except IOError, inst:
171 171 if inst.errno != errno.EPIPE:
172 172 raise
173 173
174 174 # we should use select instead on UNIX, but this will work on most
175 175 # systems, including Windows
176 176 w = threading.Thread(target=writer)
177 177 w.start()
178 178 f = pout.read()
179 179 pout.close()
180 180 w.join()
181 181 return f
182 182
183 183 def tempfilter(s, cmd):
184 184 '''filter string S through a pair of temporary files with CMD.
185 185 CMD is used as a template to create the real command to be run,
186 186 with the strings INFILE and OUTFILE replaced by the real names of
187 187 the temporary files generated.'''
188 188 inname, outname = None, None
189 189 try:
190 190 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
191 191 fp = os.fdopen(infd, 'wb')
192 192 fp.write(s)
193 193 fp.close()
194 194 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
195 195 os.close(outfd)
196 196 cmd = cmd.replace('INFILE', inname)
197 197 cmd = cmd.replace('OUTFILE', outname)
198 198 code = os.system(cmd)
199 199 if sys.platform == 'OpenVMS' and code & 1:
200 200 code = 0
201 201 if code: raise Abort(_("command '%s' failed: %s") %
202 202 (cmd, explain_exit(code)))
203 203 return open(outname, 'rb').read()
204 204 finally:
205 205 try:
206 206 if inname: os.unlink(inname)
207 207 except: pass
208 208 try:
209 209 if outname: os.unlink(outname)
210 210 except: pass
211 211
212 212 filtertable = {
213 213 'tempfile:': tempfilter,
214 214 'pipe:': pipefilter,
215 215 }
216 216
217 217 def filter(s, cmd):
218 218 "filter a string through a command that transforms its input to its output"
219 219 for name, fn in filtertable.iteritems():
220 220 if cmd.startswith(name):
221 221 return fn(s, cmd[len(name):].lstrip())
222 222 return pipefilter(s, cmd)
223 223
224 224 def binary(s):
225 225 """return true if a string is binary data"""
226 226 if s and '\0' in s:
227 227 return True
228 228 return False
229 229
230 230 def unique(g):
231 231 """return the uniq elements of iterable g"""
232 232 return dict.fromkeys(g).keys()
233 233
234 234 def sort(l):
235 235 if not isinstance(l, list):
236 236 l = list(l)
237 237 l.sort()
238 238 return l
239 239
240 240 def increasingchunks(source, min=1024, max=65536):
241 241 '''return no less than min bytes per chunk while data remains,
242 242 doubling min after each chunk until it reaches max'''
243 243 def log2(x):
244 244 if not x:
245 245 return 0
246 246 i = 0
247 247 while x:
248 248 x >>= 1
249 249 i += 1
250 250 return i - 1
251 251
252 252 buf = []
253 253 blen = 0
254 254 for chunk in source:
255 255 buf.append(chunk)
256 256 blen += len(chunk)
257 257 if blen >= min:
258 258 if min < max:
259 259 min = min << 1
260 260 nmin = 1 << log2(blen)
261 261 if nmin > min:
262 262 min = nmin
263 263 if min > max:
264 264 min = max
265 265 yield ''.join(buf)
266 266 blen = 0
267 267 buf = []
268 268 if buf:
269 269 yield ''.join(buf)
270 270
271 271 Abort = error.Abort
272 272
273 273 def always(fn): return True
274 274 def never(fn): return False
275 275
276 276 def patkind(name, default):
277 277 """Split a string into an optional pattern kind prefix and the
278 278 actual pattern."""
279 279 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
280 280 if name.startswith(prefix + ':'): return name.split(':', 1)
281 281 return default, name
282 282
283 283 def globre(pat, head='^', tail='$'):
284 284 "convert a glob pattern into a regexp"
285 285 i, n = 0, len(pat)
286 286 res = ''
287 287 group = 0
288 288 def peek(): return i < n and pat[i]
289 289 while i < n:
290 290 c = pat[i]
291 291 i = i+1
292 292 if c == '*':
293 293 if peek() == '*':
294 294 i += 1
295 295 res += '.*'
296 296 else:
297 297 res += '[^/]*'
298 298 elif c == '?':
299 299 res += '.'
300 300 elif c == '[':
301 301 j = i
302 302 if j < n and pat[j] in '!]':
303 303 j += 1
304 304 while j < n and pat[j] != ']':
305 305 j += 1
306 306 if j >= n:
307 307 res += '\\['
308 308 else:
309 309 stuff = pat[i:j].replace('\\','\\\\')
310 310 i = j + 1
311 311 if stuff[0] == '!':
312 312 stuff = '^' + stuff[1:]
313 313 elif stuff[0] == '^':
314 314 stuff = '\\' + stuff
315 315 res = '%s[%s]' % (res, stuff)
316 316 elif c == '{':
317 317 group += 1
318 318 res += '(?:'
319 319 elif c == '}' and group:
320 320 res += ')'
321 321 group -= 1
322 322 elif c == ',' and group:
323 323 res += '|'
324 324 elif c == '\\':
325 325 p = peek()
326 326 if p:
327 327 i += 1
328 328 res += re.escape(p)
329 329 else:
330 330 res += re.escape(c)
331 331 else:
332 332 res += re.escape(c)
333 333 return head + res + tail
334 334
335 335 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
336 336
337 337 def pathto(root, n1, n2):
338 338 '''return the relative path from one place to another.
339 339 root should use os.sep to separate directories
340 340 n1 should use os.sep to separate directories
341 341 n2 should use "/" to separate directories
342 342 returns an os.sep-separated path.
343 343
344 344 If n1 is a relative path, it's assumed it's
345 345 relative to root.
346 346 n2 should always be relative to root.
347 347 '''
348 348 if not n1: return localpath(n2)
349 349 if os.path.isabs(n1):
350 350 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
351 351 return os.path.join(root, localpath(n2))
352 352 n2 = '/'.join((pconvert(root), n2))
353 353 a, b = splitpath(n1), n2.split('/')
354 354 a.reverse()
355 355 b.reverse()
356 356 while a and b and a[-1] == b[-1]:
357 357 a.pop()
358 358 b.pop()
359 359 b.reverse()
360 360 return os.sep.join((['..'] * len(a)) + b) or '.'
361 361
362 362 def canonpath(root, cwd, myname):
363 363 """return the canonical path of myname, given cwd and root"""
364 364 if root == os.sep:
365 365 rootsep = os.sep
366 366 elif endswithsep(root):
367 367 rootsep = root
368 368 else:
369 369 rootsep = root + os.sep
370 370 name = myname
371 371 if not os.path.isabs(name):
372 372 name = os.path.join(root, cwd, name)
373 373 name = os.path.normpath(name)
374 374 audit_path = path_auditor(root)
375 375 if name != rootsep and name.startswith(rootsep):
376 376 name = name[len(rootsep):]
377 377 audit_path(name)
378 378 return pconvert(name)
379 379 elif name == root:
380 380 return ''
381 381 else:
382 382 # Determine whether `name' is in the hierarchy at or beneath `root',
383 383 # by iterating name=dirname(name) until that causes no change (can't
384 384 # check name == '/', because that doesn't work on windows). For each
385 385 # `name', compare dev/inode numbers. If they match, the list `rel'
386 386 # holds the reversed list of components making up the relative file
387 387 # name we want.
388 388 root_st = os.stat(root)
389 389 rel = []
390 390 while True:
391 391 try:
392 392 name_st = os.stat(name)
393 393 except OSError:
394 394 break
395 395 if samestat(name_st, root_st):
396 396 if not rel:
397 397 # name was actually the same as root (maybe a symlink)
398 398 return ''
399 399 rel.reverse()
400 400 name = os.path.join(*rel)
401 401 audit_path(name)
402 402 return pconvert(name)
403 403 dirname, basename = os.path.split(name)
404 404 rel.append(basename)
405 405 if dirname == name:
406 406 break
407 407 name = dirname
408 408
409 409 raise Abort('%s not under root' % myname)
410 410
411 411 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
412 412 """build a function to match a set of file patterns
413 413
414 414 arguments:
415 415 canonroot - the canonical root of the tree you're matching against
416 416 cwd - the current working directory, if relevant
417 417 names - patterns to find
418 418 inc - patterns to include
419 419 exc - patterns to exclude
420 420 dflt_pat - if a pattern in names has no explicit type, assume this one
421 421 src - where these patterns came from (e.g. .hgignore)
422 422
423 423 a pattern is one of:
424 424 'glob:<glob>' - a glob relative to cwd
425 425 're:<regexp>' - a regular expression
426 426 'path:<path>' - a path relative to canonroot
427 427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
428 428 'relpath:<path>' - a path relative to cwd
429 429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
430 430 '<something>' - one of the cases above, selected by the dflt_pat argument
431 431
432 432 returns:
433 433 a 3-tuple containing
434 434 - list of roots (places where one should start a recursive walk of the fs);
435 435 this often matches the explicit non-pattern names passed in, but also
436 436 includes the initial part of glob: patterns that has no glob characters
437 437 - a bool match(filename) function
438 438 - a bool indicating if any patterns were passed in
439 439 """
440 440
441 441 # a common case: no patterns at all
442 442 if not names and not inc and not exc:
443 443 return [], always, False
444 444
445 445 def contains_glob(name):
446 446 for c in name:
447 447 if c in _globchars: return True
448 448 return False
449 449
450 450 def regex(kind, name, tail):
451 451 '''convert a pattern into a regular expression'''
452 452 if not name:
453 453 return ''
454 454 if kind == 're':
455 455 return name
456 456 elif kind == 'path':
457 457 return '^' + re.escape(name) + '(?:/|$)'
458 458 elif kind == 'relglob':
459 459 return globre(name, '(?:|.*/)', tail)
460 460 elif kind == 'relpath':
461 461 return re.escape(name) + '(?:/|$)'
462 462 elif kind == 'relre':
463 463 if name.startswith('^'):
464 464 return name
465 465 return '.*' + name
466 466 return globre(name, '', tail)
467 467
468 468 def matchfn(pats, tail):
469 469 """build a matching function from a set of patterns"""
470 470 if not pats:
471 471 return
472 472 try:
473 473 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
474 474 if len(pat) > 20000:
475 475 raise OverflowError()
476 476 return re.compile(pat).match
477 477 except OverflowError:
478 478 # We're using a Python with a tiny regex engine and we
479 479 # made it explode, so we'll divide the pattern list in two
480 480 # until it works
481 481 l = len(pats)
482 482 if l < 2:
483 483 raise
484 484 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
485 485 return lambda s: a(s) or b(s)
486 486 except re.error:
487 487 for k, p in pats:
488 488 try:
489 489 re.compile('(?:%s)' % regex(k, p, tail))
490 490 except re.error:
491 491 if src:
492 492 raise Abort("%s: invalid pattern (%s): %s" %
493 493 (src, k, p))
494 494 else:
495 495 raise Abort("invalid pattern (%s): %s" % (k, p))
496 496 raise Abort("invalid pattern")
497 497
498 498 def globprefix(pat):
499 499 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
500 500 root = []
501 501 for p in pat.split('/'):
502 502 if contains_glob(p): break
503 503 root.append(p)
504 504 return '/'.join(root) or '.'
505 505
506 506 def normalizepats(names, default):
507 507 pats = []
508 508 roots = []
509 509 anypats = False
510 510 for kind, name in [patkind(p, default) for p in names]:
511 511 if kind in ('glob', 'relpath'):
512 512 name = canonpath(canonroot, cwd, name)
513 513 elif kind in ('relglob', 'path'):
514 514 name = normpath(name)
515 515
516 516 pats.append((kind, name))
517 517
518 518 if kind in ('glob', 're', 'relglob', 'relre'):
519 519 anypats = True
520 520
521 521 if kind == 'glob':
522 522 root = globprefix(name)
523 523 roots.append(root)
524 524 elif kind in ('relpath', 'path'):
525 525 roots.append(name or '.')
526 526 elif kind == 'relglob':
527 527 roots.append('.')
528 528 return roots, pats, anypats
529 529
530 530 roots, pats, anypats = normalizepats(names, dflt_pat)
531 531
532 532 patmatch = matchfn(pats, '$') or always
533 533 incmatch = always
534 534 if inc:
535 535 dummy, inckinds, dummy = normalizepats(inc, 'glob')
536 536 incmatch = matchfn(inckinds, '(?:/|$)')
537 537 excmatch = never
538 538 if exc:
539 539 dummy, exckinds, dummy = normalizepats(exc, 'glob')
540 540 excmatch = matchfn(exckinds, '(?:/|$)')
541 541
542 542 if not names and inc and not exc:
543 543 # common case: hgignore patterns
544 544 match = incmatch
545 545 else:
546 546 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
547 547
548 548 return (roots, match, (inc or exc or anypats) and True)
549 549
550 550 _hgexecutable = None
551 551
552 552 def main_is_frozen():
553 553 """return True if we are a frozen executable.
554 554
555 555 The code supports py2exe (most common, Windows only) and tools/freeze
556 556 (portable, not much used).
557 557 """
558 558 return (hasattr(sys, "frozen") or # new py2exe
559 559 hasattr(sys, "importers") or # old py2exe
560 560 imp.is_frozen("__main__")) # tools/freeze
561 561
562 562 def hgexecutable():
563 563 """return location of the 'hg' executable.
564 564
565 565 Defaults to $HG or 'hg' in the search path.
566 566 """
567 567 if _hgexecutable is None:
568 568 hg = os.environ.get('HG')
569 569 if hg:
570 570 set_hgexecutable(hg)
571 571 elif main_is_frozen():
572 572 set_hgexecutable(sys.executable)
573 573 else:
574 574 set_hgexecutable(find_exe('hg') or 'hg')
575 575 return _hgexecutable
576 576
577 577 def set_hgexecutable(path):
578 578 """set location of the 'hg' executable"""
579 579 global _hgexecutable
580 580 _hgexecutable = path
581 581
582 582 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
583 583 '''enhanced shell command execution.
584 584 run with environment maybe modified, maybe in different dir.
585 585
586 586 if command fails and onerr is None, return status. if ui object,
587 587 print error message and return status, else raise onerr object as
588 588 exception.'''
589 589 def py2shell(val):
590 590 'convert python object into string that is useful to shell'
591 591 if val in (None, False):
592 592 return '0'
593 593 if val == True:
594 594 return '1'
595 595 return str(val)
596 596 oldenv = {}
597 597 for k in environ:
598 598 oldenv[k] = os.environ.get(k)
599 599 if cwd is not None:
600 600 oldcwd = os.getcwd()
601 601 origcmd = cmd
602 602 if os.name == 'nt':
603 603 cmd = '"%s"' % cmd
604 604 try:
605 605 for k, v in environ.iteritems():
606 606 os.environ[k] = py2shell(v)
607 607 os.environ['HG'] = hgexecutable()
608 608 if cwd is not None and oldcwd != cwd:
609 609 os.chdir(cwd)
610 610 rc = os.system(cmd)
611 611 if sys.platform == 'OpenVMS' and rc & 1:
612 612 rc = 0
613 613 if rc and onerr:
614 614 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
615 615 explain_exit(rc)[0])
616 616 if errprefix:
617 617 errmsg = '%s: %s' % (errprefix, errmsg)
618 618 try:
619 619 onerr.warn(errmsg + '\n')
620 620 except AttributeError:
621 621 raise onerr(errmsg)
622 622 return rc
623 623 finally:
624 624 for k, v in oldenv.iteritems():
625 625 if v is None:
626 626 del os.environ[k]
627 627 else:
628 628 os.environ[k] = v
629 629 if cwd is not None and oldcwd != cwd:
630 630 os.chdir(oldcwd)
631 631
632 632 def checksignature(func):
633 633 '''wrap a function with code to check for calling errors'''
634 634 def check(*args, **kwargs):
635 635 try:
636 636 return func(*args, **kwargs)
637 637 except TypeError:
638 638 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
639 639 raise error.SignatureError
640 640 raise
641 641
642 642 return check
643 643
644 644 # os.path.lexists is not available on python2.3
645 645 def lexists(filename):
646 646 "test whether a file with this name exists. does not follow symlinks"
647 647 try:
648 648 os.lstat(filename)
649 649 except:
650 650 return False
651 651 return True
652 652
653 653 def rename(src, dst):
654 654 """forcibly rename a file"""
655 655 try:
656 656 os.rename(src, dst)
657 657 except OSError, err: # FIXME: check err (EEXIST ?)
658 658 # on windows, rename to existing file is not allowed, so we
659 659 # must delete destination first. but if file is open, unlink
660 660 # schedules it for delete but does not delete it. rename
661 661 # happens immediately even for open files, so we rename
662 662 # destination to a temporary name, then delete that. then
663 663 # rename is safe to do.
664 664 temp = dst + "-force-rename"
665 665 os.rename(dst, temp)
666 666 os.unlink(temp)
667 667 os.rename(src, dst)
668 668
669 669 def unlink(f):
670 670 """unlink and remove the directory if it is empty"""
671 671 os.unlink(f)
672 672 # try removing directories that might now be empty
673 673 try:
674 674 os.removedirs(os.path.dirname(f))
675 675 except OSError:
676 676 pass
677 677
678 678 def copyfile(src, dest):
679 679 "copy a file, preserving mode and atime/mtime"
680 680 if os.path.islink(src):
681 681 try:
682 682 os.unlink(dest)
683 683 except:
684 684 pass
685 685 os.symlink(os.readlink(src), dest)
686 686 else:
687 687 try:
688 688 shutil.copyfile(src, dest)
689 689 shutil.copystat(src, dest)
690 690 except shutil.Error, inst:
691 691 raise Abort(str(inst))
692 692
693 693 def copyfiles(src, dst, hardlink=None):
694 694 """Copy a directory tree using hardlinks if possible"""
695 695
696 696 if hardlink is None:
697 697 hardlink = (os.stat(src).st_dev ==
698 698 os.stat(os.path.dirname(dst)).st_dev)
699 699
700 700 if os.path.isdir(src):
701 701 os.mkdir(dst)
702 702 for name, kind in osutil.listdir(src):
703 703 srcname = os.path.join(src, name)
704 704 dstname = os.path.join(dst, name)
705 705 copyfiles(srcname, dstname, hardlink)
706 706 else:
707 707 if hardlink:
708 708 try:
709 709 os_link(src, dst)
710 710 except (IOError, OSError):
711 711 hardlink = False
712 712 shutil.copy(src, dst)
713 713 else:
714 714 shutil.copy(src, dst)
715 715
716 716 class path_auditor(object):
717 717 '''ensure that a filesystem path contains no banned components.
718 718 the following properties of a path are checked:
719 719
720 720 - under top-level .hg
721 721 - starts at the root of a windows drive
722 722 - contains ".."
723 723 - traverses a symlink (e.g. a/symlink_here/b)
724 724 - inside a nested repository'''
725 725
726 726 def __init__(self, root):
727 727 self.audited = set()
728 728 self.auditeddir = set()
729 729 self.root = root
730 730
731 731 def __call__(self, path):
732 732 if path in self.audited:
733 733 return
734 734 normpath = os.path.normcase(path)
735 735 parts = splitpath(normpath)
736 736 if (os.path.splitdrive(path)[0]
737 737 or parts[0].lower() in ('.hg', '.hg.', '')
738 738 or os.pardir in parts):
739 739 raise Abort(_("path contains illegal component: %s") % path)
740 740 if '.hg' in path.lower():
741 741 lparts = [p.lower() for p in parts]
742 742 for p in '.hg', '.hg.':
743 743 if p in lparts[1:]:
744 744 pos = lparts.index(p)
745 745 base = os.path.join(*parts[:pos])
746 746 raise Abort(_('path %r is inside repo %r') % (path, base))
747 747 def check(prefix):
748 748 curpath = os.path.join(self.root, prefix)
749 749 try:
750 750 st = os.lstat(curpath)
751 751 except OSError, err:
752 752 # EINVAL can be raised as invalid path syntax under win32.
753 753 # They must be ignored for patterns can be checked too.
754 754 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
755 755 raise
756 756 else:
757 757 if stat.S_ISLNK(st.st_mode):
758 758 raise Abort(_('path %r traverses symbolic link %r') %
759 759 (path, prefix))
760 760 elif (stat.S_ISDIR(st.st_mode) and
761 761 os.path.isdir(os.path.join(curpath, '.hg'))):
762 762 raise Abort(_('path %r is inside repo %r') %
763 763 (path, prefix))
764 764 parts.pop()
765 765 prefixes = []
766 766 for n in range(len(parts)):
767 767 prefix = os.sep.join(parts)
768 768 if prefix in self.auditeddir:
769 769 break
770 770 check(prefix)
771 771 prefixes.append(prefix)
772 772 parts.pop()
773 773
774 774 self.audited.add(path)
775 775 # only add prefixes to the cache after checking everything: we don't
776 776 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
777 777 self.auditeddir.update(prefixes)
778 778
779 779 if os.name == 'nt':
780 780 from windows import *
781 781 def expand_glob(pats):
782 782 '''On Windows, expand the implicit globs in a list of patterns'''
783 783 ret = []
784 784 for p in pats:
785 785 kind, name = patkind(p, None)
786 786 if kind is None:
787 787 globbed = glob.glob(name)
788 788 if globbed:
789 789 ret.extend(globbed)
790 790 continue
791 791 # if we couldn't expand the glob, just keep it around
792 792 ret.append(p)
793 793 return ret
794 794 else:
795 795 from posix import *
796 796
797 797 def makelock(info, pathname):
798 798 try:
799 799 return os.symlink(info, pathname)
800 800 except OSError, why:
801 801 if why.errno == errno.EEXIST:
802 802 raise
803 803 except AttributeError: # no symlink in os
804 804 pass
805 805
806 806 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
807 807 os.write(ld, info)
808 808 os.close(ld)
809 809
810 810 def readlock(pathname):
811 811 try:
812 812 return os.readlink(pathname)
813 813 except OSError, why:
814 814 if why.errno not in (errno.EINVAL, errno.ENOSYS):
815 815 raise
816 816 except AttributeError: # no symlink in os
817 817 pass
818 818 return posixfile(pathname).read()
819 819
820 820 def nlinks(pathname):
821 821 """Return number of hardlinks for the given file."""
822 822 return os.lstat(pathname).st_nlink
823 823
824 824 if hasattr(os, 'link'):
825 825 os_link = os.link
826 826 else:
827 827 def os_link(src, dst):
828 828 raise OSError(0, _("Hardlinks not supported"))
829 829
830 830 def fstat(fp):
831 831 '''stat file object that may not have fileno method.'''
832 832 try:
833 833 return os.fstat(fp.fileno())
834 834 except AttributeError:
835 835 return os.stat(fp.name)
836 836
837 837 # File system features
838 838
839 839 def checkcase(path):
840 840 """
841 841 Check whether the given path is on a case-sensitive filesystem
842 842
843 843 Requires a path (like /foo/.hg) ending with a foldable final
844 844 directory component.
845 845 """
846 846 s1 = os.stat(path)
847 847 d, b = os.path.split(path)
848 848 p2 = os.path.join(d, b.upper())
849 849 if path == p2:
850 850 p2 = os.path.join(d, b.lower())
851 851 try:
852 852 s2 = os.stat(p2)
853 853 if s2 == s1:
854 854 return False
855 855 return True
856 856 except:
857 857 return True
858 858
859 859 _fspathcache = {}
860 860 def fspath(name, root):
861 861 '''Get name in the case stored in the filesystem
862 862
863 863 The name is either relative to root, or it is an absolute path starting
864 864 with root. Note that this function is unnecessary, and should not be
865 865 called, for case-sensitive filesystems (simply because it's expensive).
866 866 '''
867 867 # If name is absolute, make it relative
868 868 if name.lower().startswith(root.lower()):
869 869 l = len(root)
870 870 if name[l] == os.sep or name[l] == os.altsep:
871 871 l = l + 1
872 872 name = name[l:]
873 873
874 874 if not os.path.exists(os.path.join(root, name)):
875 875 return None
876 876
877 877 seps = os.sep
878 878 if os.altsep:
879 879 seps = seps + os.altsep
880 880 # Protect backslashes. This gets silly very quickly.
881 881 seps.replace('\\','\\\\')
882 882 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
883 883 dir = os.path.normcase(os.path.normpath(root))
884 884 result = []
885 885 for part, sep in pattern.findall(name):
886 886 if sep:
887 887 result.append(sep)
888 888 continue
889 889
890 890 if dir not in _fspathcache:
891 891 _fspathcache[dir] = os.listdir(dir)
892 892 contents = _fspathcache[dir]
893 893
894 894 lpart = part.lower()
895 895 for n in contents:
896 896 if n.lower() == lpart:
897 897 result.append(n)
898 898 break
899 899 else:
900 900 # Cannot happen, as the file exists!
901 901 result.append(part)
902 902 dir = os.path.join(dir, lpart)
903 903
904 904 return ''.join(result)
905 905
906 906 def checkexec(path):
907 907 """
908 908 Check whether the given path is on a filesystem with UNIX-like exec flags
909 909
910 910 Requires a directory (like /foo/.hg)
911 911 """
912 912
913 913 # VFAT on some Linux versions can flip mode but it doesn't persist
914 914 # a FS remount. Frequently we can detect it if files are created
915 915 # with exec bit on.
916 916
917 917 try:
918 918 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
919 919 fh, fn = tempfile.mkstemp("", "", path)
920 920 try:
921 921 os.close(fh)
922 922 m = os.stat(fn).st_mode & 0777
923 923 new_file_has_exec = m & EXECFLAGS
924 924 os.chmod(fn, m ^ EXECFLAGS)
925 925 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
926 926 finally:
927 927 os.unlink(fn)
928 928 except (IOError, OSError):
929 929 # we don't care, the user probably won't be able to commit anyway
930 930 return False
931 931 return not (new_file_has_exec or exec_flags_cannot_flip)
932 932
933 933 def checklink(path):
934 934 """check whether the given path is on a symlink-capable filesystem"""
935 935 # mktemp is not racy because symlink creation will fail if the
936 936 # file already exists
937 937 name = tempfile.mktemp(dir=path)
938 938 try:
939 939 os.symlink(".", name)
940 940 os.unlink(name)
941 941 return True
942 942 except (OSError, AttributeError):
943 943 return False
944 944
945 945 def needbinarypatch():
946 946 """return True if patches should be applied in binary mode by default."""
947 947 return os.name == 'nt'
948 948
949 949 def endswithsep(path):
950 950 '''Check path ends with os.sep or os.altsep.'''
951 951 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
952 952
953 953 def splitpath(path):
954 954 '''Split path by os.sep.
955 955 Note that this function does not use os.altsep because this is
956 956 an alternative of simple "xxx.split(os.sep)".
957 957 It is recommended to use os.path.normpath() before using this
958 958 function if need.'''
959 959 return path.split(os.sep)
960 960
961 961 def gui():
962 962 '''Are we running in a GUI?'''
963 963 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
964 964
965 965 def lookup_reg(key, name=None, scope=None):
966 966 return None
967 967
968 968 def mktempcopy(name, emptyok=False, createmode=None):
969 969 """Create a temporary file with the same contents from name
970 970
971 971 The permission bits are copied from the original file.
972 972
973 973 If the temporary file is going to be truncated immediately, you
974 974 can use emptyok=True as an optimization.
975 975
976 976 Returns the name of the temporary file.
977 977 """
978 978 d, fn = os.path.split(name)
979 979 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
980 980 os.close(fd)
981 981 # Temporary files are created with mode 0600, which is usually not
982 982 # what we want. If the original file already exists, just copy
983 983 # its mode. Otherwise, manually obey umask.
984 984 try:
985 985 st_mode = os.lstat(name).st_mode & 0777
986 986 except OSError, inst:
987 987 if inst.errno != errno.ENOENT:
988 988 raise
989 989 st_mode = createmode
990 990 if st_mode is None:
991 991 st_mode = ~umask
992 992 st_mode &= 0666
993 993 os.chmod(temp, st_mode)
994 994 if emptyok:
995 995 return temp
996 996 try:
997 997 try:
998 998 ifp = posixfile(name, "rb")
999 999 except IOError, inst:
1000 1000 if inst.errno == errno.ENOENT:
1001 1001 return temp
1002 1002 if not getattr(inst, 'filename', None):
1003 1003 inst.filename = name
1004 1004 raise
1005 1005 ofp = posixfile(temp, "wb")
1006 1006 for chunk in filechunkiter(ifp):
1007 1007 ofp.write(chunk)
1008 1008 ifp.close()
1009 1009 ofp.close()
1010 1010 except:
1011 1011 try: os.unlink(temp)
1012 1012 except: pass
1013 1013 raise
1014 1014 return temp
1015 1015
1016 1016 class atomictempfile(posixfile):
1017 1017 """file-like object that atomically updates a file
1018 1018
1019 1019 All writes will be redirected to a temporary copy of the original
1020 1020 file. When rename is called, the copy is renamed to the original
1021 1021 name, making the changes visible.
1022 1022 """
1023 1023 def __init__(self, name, mode, createmode):
1024 1024 self.__name = name
1025 1025 self.temp = mktempcopy(name, emptyok=('w' in mode),
1026 1026 createmode=createmode)
1027 1027 posixfile.__init__(self, self.temp, mode)
1028 1028
1029 1029 def rename(self):
1030 1030 if not self.closed:
1031 1031 posixfile.close(self)
1032 1032 rename(self.temp, localpath(self.__name))
1033 1033
1034 1034 def __del__(self):
1035 1035 if not self.closed:
1036 1036 try:
1037 1037 os.unlink(self.temp)
1038 1038 except: pass
1039 1039 posixfile.close(self)
1040 1040
1041 1041 def makedirs(name, mode=None):
1042 1042 """recursive directory creation with parent mode inheritance"""
1043 1043 try:
1044 1044 os.mkdir(name)
1045 1045 if mode is not None:
1046 1046 os.chmod(name, mode)
1047 1047 return
1048 1048 except OSError, err:
1049 1049 if err.errno == errno.EEXIST:
1050 1050 return
1051 1051 if err.errno != errno.ENOENT:
1052 1052 raise
1053 1053 parent = os.path.abspath(os.path.dirname(name))
1054 1054 makedirs(parent, mode)
1055 1055 makedirs(name, mode)
1056 1056
1057 1057 class opener(object):
1058 1058 """Open files relative to a base directory
1059 1059
1060 1060 This class is used to hide the details of COW semantics and
1061 1061 remote file access from higher level code.
1062 1062 """
1063 1063 def __init__(self, base, audit=True):
1064 1064 self.base = base
1065 1065 if audit:
1066 1066 self.audit_path = path_auditor(base)
1067 1067 else:
1068 1068 self.audit_path = always
1069 1069 self.createmode = None
1070 1070
1071 1071 def __getattr__(self, name):
1072 1072 if name == '_can_symlink':
1073 1073 self._can_symlink = checklink(self.base)
1074 1074 return self._can_symlink
1075 1075 raise AttributeError(name)
1076 1076
1077 1077 def _fixfilemode(self, name):
1078 1078 if self.createmode is None:
1079 1079 return
1080 1080 os.chmod(name, self.createmode & 0666)
1081 1081
1082 1082 def __call__(self, path, mode="r", text=False, atomictemp=False):
1083 1083 self.audit_path(path)
1084 1084 f = os.path.join(self.base, path)
1085 1085
1086 1086 if not text and "b" not in mode:
1087 1087 mode += "b" # for that other OS
1088 1088
1089 1089 nlink = -1
1090 1090 if mode not in ("r", "rb"):
1091 1091 try:
1092 1092 nlink = nlinks(f)
1093 1093 except OSError:
1094 1094 nlink = 0
1095 1095 d = os.path.dirname(f)
1096 1096 if not os.path.isdir(d):
1097 1097 makedirs(d, self.createmode)
1098 1098 if atomictemp:
1099 1099 return atomictempfile(f, mode, self.createmode)
1100 1100 if nlink > 1:
1101 1101 rename(mktempcopy(f), f)
1102 1102 fp = posixfile(f, mode)
1103 1103 if nlink == 0:
1104 1104 self._fixfilemode(f)
1105 1105 return fp
1106 1106
1107 1107 def symlink(self, src, dst):
1108 1108 self.audit_path(dst)
1109 1109 linkname = os.path.join(self.base, dst)
1110 1110 try:
1111 1111 os.unlink(linkname)
1112 1112 except OSError:
1113 1113 pass
1114 1114
1115 1115 dirname = os.path.dirname(linkname)
1116 1116 if not os.path.exists(dirname):
1117 1117 makedirs(dirname, self.createmode)
1118 1118
1119 1119 if self._can_symlink:
1120 1120 try:
1121 1121 os.symlink(src, linkname)
1122 1122 except OSError, err:
1123 1123 raise OSError(err.errno, _('could not symlink to %r: %s') %
1124 1124 (src, err.strerror), linkname)
1125 1125 else:
1126 1126 f = self(dst, "w")
1127 1127 f.write(src)
1128 1128 f.close()
1129 1129 self._fixfilemode(dst)
1130 1130
1131 1131 class chunkbuffer(object):
1132 1132 """Allow arbitrary sized chunks of data to be efficiently read from an
1133 1133 iterator over chunks of arbitrary size."""
1134 1134
1135 1135 def __init__(self, in_iter):
1136 1136 """in_iter is the iterator that's iterating over the input chunks.
1137 1137 targetsize is how big a buffer to try to maintain."""
1138 1138 self.iter = iter(in_iter)
1139 1139 self.buf = ''
1140 1140 self.targetsize = 2**16
1141 1141
1142 1142 def read(self, l):
1143 1143 """Read L bytes of data from the iterator of chunks of data.
1144 1144 Returns less than L bytes if the iterator runs dry."""
1145 1145 if l > len(self.buf) and self.iter:
1146 1146 # Clamp to a multiple of self.targetsize
1147 1147 targetsize = max(l, self.targetsize)
1148 1148 collector = cStringIO.StringIO()
1149 1149 collector.write(self.buf)
1150 1150 collected = len(self.buf)
1151 1151 for chunk in self.iter:
1152 1152 collector.write(chunk)
1153 1153 collected += len(chunk)
1154 1154 if collected >= targetsize:
1155 1155 break
1156 1156 if collected < targetsize:
1157 1157 self.iter = False
1158 1158 self.buf = collector.getvalue()
1159 1159 if len(self.buf) == l:
1160 1160 s, self.buf = str(self.buf), ''
1161 1161 else:
1162 1162 s, self.buf = self.buf[:l], buffer(self.buf, l)
1163 1163 return s
1164 1164
1165 1165 def filechunkiter(f, size=65536, limit=None):
1166 1166 """Create a generator that produces the data in the file size
1167 1167 (default 65536) bytes at a time, up to optional limit (default is
1168 1168 to read all data). Chunks may be less than size bytes if the
1169 1169 chunk is the last chunk in the file, or the file is a socket or
1170 1170 some other type of file that sometimes reads less data than is
1171 1171 requested."""
1172 1172 assert size >= 0
1173 1173 assert limit is None or limit >= 0
1174 1174 while True:
1175 1175 if limit is None: nbytes = size
1176 1176 else: nbytes = min(limit, size)
1177 1177 s = nbytes and f.read(nbytes)
1178 1178 if not s: break
1179 1179 if limit: limit -= len(s)
1180 1180 yield s
1181 1181
1182 1182 def makedate():
1183 1183 lt = time.localtime()
1184 1184 if lt[8] == 1 and time.daylight:
1185 1185 tz = time.altzone
1186 1186 else:
1187 1187 tz = time.timezone
1188 1188 return time.mktime(lt), tz
1189 1189
1190 1190 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1191 1191 """represent a (unixtime, offset) tuple as a localized time.
1192 1192 unixtime is seconds since the epoch, and offset is the time zone's
1193 1193 number of seconds away from UTC. if timezone is false, do not
1194 1194 append time zone to string."""
1195 1195 t, tz = date or makedate()
1196 1196 if "%1" in format or "%2" in format:
1197 1197 sign = (tz > 0) and "-" or "+"
1198 1198 minutes = abs(tz) / 60
1199 1199 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1200 1200 format = format.replace("%2", "%02d" % (minutes % 60))
1201 1201 s = time.strftime(format, time.gmtime(float(t) - tz))
1202 1202 return s
1203 1203
1204 1204 def shortdate(date=None):
1205 1205 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1206 1206 return datestr(date, format='%Y-%m-%d')
1207 1207
1208 1208 def strdate(string, format, defaults=[]):
1209 1209 """parse a localized time string and return a (unixtime, offset) tuple.
1210 1210 if the string cannot be parsed, ValueError is raised."""
1211 1211 def timezone(string):
1212 1212 tz = string.split()[-1]
1213 1213 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1214 1214 sign = (tz[0] == "+") and 1 or -1
1215 1215 hours = int(tz[1:3])
1216 1216 minutes = int(tz[3:5])
1217 1217 return -sign * (hours * 60 + minutes) * 60
1218 1218 if tz == "GMT" or tz == "UTC":
1219 1219 return 0
1220 1220 return None
1221 1221
1222 1222 # NOTE: unixtime = localunixtime + offset
1223 1223 offset, date = timezone(string), string
1224 1224 if offset != None:
1225 1225 date = " ".join(string.split()[:-1])
1226 1226
1227 1227 # add missing elements from defaults
1228 1228 for part in defaults:
1229 1229 found = [True for p in part if ("%"+p) in format]
1230 1230 if not found:
1231 1231 date += "@" + defaults[part]
1232 1232 format += "@%" + part[0]
1233 1233
1234 1234 timetuple = time.strptime(date, format)
1235 1235 localunixtime = int(calendar.timegm(timetuple))
1236 1236 if offset is None:
1237 1237 # local timezone
1238 1238 unixtime = int(time.mktime(timetuple))
1239 1239 offset = unixtime - localunixtime
1240 1240 else:
1241 1241 unixtime = localunixtime + offset
1242 1242 return unixtime, offset
1243 1243
1244 1244 def parsedate(date, formats=None, defaults=None):
1245 1245 """parse a localized date/time string and return a (unixtime, offset) tuple.
1246 1246
1247 1247 The date may be a "unixtime offset" string or in one of the specified
1248 1248 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1249 1249 """
1250 1250 if not date:
1251 1251 return 0, 0
1252 1252 if isinstance(date, tuple) and len(date) == 2:
1253 1253 return date
1254 1254 if not formats:
1255 1255 formats = defaultdateformats
1256 1256 date = date.strip()
1257 1257 try:
1258 1258 when, offset = map(int, date.split(' '))
1259 1259 except ValueError:
1260 1260 # fill out defaults
1261 1261 if not defaults:
1262 1262 defaults = {}
1263 1263 now = makedate()
1264 1264 for part in "d mb yY HI M S".split():
1265 1265 if part not in defaults:
1266 1266 if part[0] in "HMS":
1267 1267 defaults[part] = "00"
1268 1268 else:
1269 1269 defaults[part] = datestr(now, "%" + part[0])
1270 1270
1271 1271 for format in formats:
1272 1272 try:
1273 1273 when, offset = strdate(date, format, defaults)
1274 1274 except (ValueError, OverflowError):
1275 1275 pass
1276 1276 else:
1277 1277 break
1278 1278 else:
1279 1279 raise Abort(_('invalid date: %r ') % date)
1280 1280 # validate explicit (probably user-specified) date and
1281 1281 # time zone offset. values must fit in signed 32 bits for
1282 1282 # current 32-bit linux runtimes. timezones go from UTC-12
1283 1283 # to UTC+14
1284 1284 if abs(when) > 0x7fffffff:
1285 1285 raise Abort(_('date exceeds 32 bits: %d') % when)
1286 1286 if offset < -50400 or offset > 43200:
1287 1287 raise Abort(_('impossible time zone offset: %d') % offset)
1288 1288 return when, offset
1289 1289
1290 1290 def matchdate(date):
1291 1291 """Return a function that matches a given date match specifier
1292 1292
1293 1293 Formats include:
1294 1294
1295 1295 '{date}' match a given date to the accuracy provided
1296 1296
1297 1297 '<{date}' on or before a given date
1298 1298
1299 1299 '>{date}' on or after a given date
1300 1300
1301 1301 """
1302 1302
1303 1303 def lower(date):
1304 1304 d = dict(mb="1", d="1")
1305 1305 return parsedate(date, extendeddateformats, d)[0]
1306 1306
1307 1307 def upper(date):
1308 1308 d = dict(mb="12", HI="23", M="59", S="59")
1309 1309 for days in "31 30 29".split():
1310 1310 try:
1311 1311 d["d"] = days
1312 1312 return parsedate(date, extendeddateformats, d)[0]
1313 1313 except:
1314 1314 pass
1315 1315 d["d"] = "28"
1316 1316 return parsedate(date, extendeddateformats, d)[0]
1317 1317
1318 date = date.strip()
1318 1319 if date[0] == "<":
1319 1320 when = upper(date[1:])
1320 1321 return lambda x: x <= when
1321 1322 elif date[0] == ">":
1322 1323 when = lower(date[1:])
1323 1324 return lambda x: x >= when
1324 1325 elif date[0] == "-":
1325 1326 try:
1326 1327 days = int(date[1:])
1327 1328 except ValueError:
1328 1329 raise Abort(_("invalid day spec: %s") % date[1:])
1329 1330 when = makedate()[0] - days * 3600 * 24
1330 1331 return lambda x: x >= when
1331 1332 elif " to " in date:
1332 1333 a, b = date.split(" to ")
1333 1334 start, stop = lower(a), upper(b)
1334 1335 return lambda x: x >= start and x <= stop
1335 1336 else:
1336 1337 start, stop = lower(date), upper(date)
1337 1338 return lambda x: x >= start and x <= stop
1338 1339
1339 1340 def shortuser(user):
1340 1341 """Return a short representation of a user name or email address."""
1341 1342 f = user.find('@')
1342 1343 if f >= 0:
1343 1344 user = user[:f]
1344 1345 f = user.find('<')
1345 1346 if f >= 0:
1346 1347 user = user[f+1:]
1347 1348 f = user.find(' ')
1348 1349 if f >= 0:
1349 1350 user = user[:f]
1350 1351 f = user.find('.')
1351 1352 if f >= 0:
1352 1353 user = user[:f]
1353 1354 return user
1354 1355
1355 1356 def email(author):
1356 1357 '''get email of author.'''
1357 1358 r = author.find('>')
1358 1359 if r == -1: r = None
1359 1360 return author[author.find('<')+1:r]
1360 1361
1361 1362 def ellipsis(text, maxlength=400):
1362 1363 """Trim string to at most maxlength (default: 400) characters."""
1363 1364 if len(text) <= maxlength:
1364 1365 return text
1365 1366 else:
1366 1367 return "%s..." % (text[:maxlength-3])
1367 1368
1368 1369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1369 1370 '''yield every hg repository under path, recursively.'''
1370 1371 def errhandler(err):
1371 1372 if err.filename == path:
1372 1373 raise err
1373 1374 if followsym and hasattr(os.path, 'samestat'):
1374 1375 def _add_dir_if_not_there(dirlst, dirname):
1375 1376 match = False
1376 1377 samestat = os.path.samestat
1377 1378 dirstat = os.stat(dirname)
1378 1379 for lstdirstat in dirlst:
1379 1380 if samestat(dirstat, lstdirstat):
1380 1381 match = True
1381 1382 break
1382 1383 if not match:
1383 1384 dirlst.append(dirstat)
1384 1385 return not match
1385 1386 else:
1386 1387 followsym = False
1387 1388
1388 1389 if (seen_dirs is None) and followsym:
1389 1390 seen_dirs = []
1390 1391 _add_dir_if_not_there(seen_dirs, path)
1391 1392 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1392 1393 if '.hg' in dirs:
1393 1394 yield root # found a repository
1394 1395 qroot = os.path.join(root, '.hg', 'patches')
1395 1396 if os.path.isdir(os.path.join(qroot, '.hg')):
1396 1397 yield qroot # we have a patch queue repo here
1397 1398 if recurse:
1398 1399 # avoid recursing inside the .hg directory
1399 1400 dirs.remove('.hg')
1400 1401 else:
1401 1402 dirs[:] = [] # don't descend further
1402 1403 elif followsym:
1403 1404 newdirs = []
1404 1405 for d in dirs:
1405 1406 fname = os.path.join(root, d)
1406 1407 if _add_dir_if_not_there(seen_dirs, fname):
1407 1408 if os.path.islink(fname):
1408 1409 for hgname in walkrepos(fname, True, seen_dirs):
1409 1410 yield hgname
1410 1411 else:
1411 1412 newdirs.append(d)
1412 1413 dirs[:] = newdirs
1413 1414
1414 1415 _rcpath = None
1415 1416
1416 1417 def os_rcpath():
1417 1418 '''return default os-specific hgrc search path'''
1418 1419 path = system_rcpath()
1419 1420 path.extend(user_rcpath())
1420 1421 path = [os.path.normpath(f) for f in path]
1421 1422 return path
1422 1423
1423 1424 def rcpath():
1424 1425 '''return hgrc search path. if env var HGRCPATH is set, use it.
1425 1426 for each item in path, if directory, use files ending in .rc,
1426 1427 else use item.
1427 1428 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1428 1429 if no HGRCPATH, use default os-specific path.'''
1429 1430 global _rcpath
1430 1431 if _rcpath is None:
1431 1432 if 'HGRCPATH' in os.environ:
1432 1433 _rcpath = []
1433 1434 for p in os.environ['HGRCPATH'].split(os.pathsep):
1434 1435 if not p: continue
1435 1436 if os.path.isdir(p):
1436 1437 for f, kind in osutil.listdir(p):
1437 1438 if f.endswith('.rc'):
1438 1439 _rcpath.append(os.path.join(p, f))
1439 1440 else:
1440 1441 _rcpath.append(p)
1441 1442 else:
1442 1443 _rcpath = os_rcpath()
1443 1444 return _rcpath
1444 1445
1445 1446 def bytecount(nbytes):
1446 1447 '''return byte count formatted as readable string, with units'''
1447 1448
1448 1449 units = (
1449 1450 (100, 1<<30, _('%.0f GB')),
1450 1451 (10, 1<<30, _('%.1f GB')),
1451 1452 (1, 1<<30, _('%.2f GB')),
1452 1453 (100, 1<<20, _('%.0f MB')),
1453 1454 (10, 1<<20, _('%.1f MB')),
1454 1455 (1, 1<<20, _('%.2f MB')),
1455 1456 (100, 1<<10, _('%.0f KB')),
1456 1457 (10, 1<<10, _('%.1f KB')),
1457 1458 (1, 1<<10, _('%.2f KB')),
1458 1459 (1, 1, _('%.0f bytes')),
1459 1460 )
1460 1461
1461 1462 for multiplier, divisor, format in units:
1462 1463 if nbytes >= divisor * multiplier:
1463 1464 return format % (nbytes / float(divisor))
1464 1465 return units[-1][2] % nbytes
1465 1466
1466 1467 def drop_scheme(scheme, path):
1467 1468 sc = scheme + ':'
1468 1469 if path.startswith(sc):
1469 1470 path = path[len(sc):]
1470 1471 if path.startswith('//'):
1471 1472 path = path[2:]
1472 1473 return path
1473 1474
1474 1475 def uirepr(s):
1475 1476 # Avoid double backslash in Windows path repr()
1476 1477 return repr(s).replace('\\\\', '\\')
1477 1478
1478 1479 def termwidth():
1479 1480 if 'COLUMNS' in os.environ:
1480 1481 try:
1481 1482 return int(os.environ['COLUMNS'])
1482 1483 except ValueError:
1483 1484 pass
1484 1485 try:
1485 1486 import termios, array, fcntl
1486 1487 for dev in (sys.stdout, sys.stdin):
1487 1488 try:
1488 1489 fd = dev.fileno()
1489 1490 if not os.isatty(fd):
1490 1491 continue
1491 1492 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1492 1493 return array.array('h', arri)[1]
1493 1494 except ValueError:
1494 1495 pass
1495 1496 except ImportError:
1496 1497 pass
1497 1498 return 80
1498 1499
1499 1500 def iterlines(iterator):
1500 1501 for chunk in iterator:
1501 1502 for line in chunk.splitlines():
1502 1503 yield line
@@ -1,45 +1,83 b''
1 1 #!/bin/sh
2 2
3 3 # This runs with TZ="GMT"
4 4 hg init
5 5 echo "test-parse-date" > a
6 6 hg add a
7 7 hg ci -d "2006-02-01 13:00:30" -m "rev 0"
8 8 echo "hi!" >> a
9 9 hg ci -d "2006-02-01 13:00:30 -0500" -m "rev 1"
10 10 hg tag -d "2006-04-15 13:30" "Hi"
11 11 hg backout --merge -d "2006-04-15 13:30 +0200" -m "rev 3" 1
12 12 hg ci -d "1150000000 14400" -m "rev 4 (merge)"
13 13 echo "fail" >> a
14 14 hg ci -d "should fail" -m "fail"
15 15 hg ci -d "100000000000000000 1400" -m "fail"
16 16 hg ci -d "100000 1400000" -m "fail"
17 17
18 18 # Check with local timezone other than GMT and with DST
19 19 TZ="PST+8PDT"
20 20 export TZ
21 21 # PST=UTC-8 / PDT=UTC-7
22 22 hg debugrebuildstate
23 23 echo "a" > a
24 24 hg ci -d "2006-07-15 13:30" -m "summer@UTC-7"
25 25 hg debugrebuildstate
26 26 echo "b" > a
27 27 hg ci -d "2006-07-15 13:30 +0500" -m "summer@UTC+5"
28 28 hg debugrebuildstate
29 29 echo "c" > a
30 30 hg ci -d "2006-01-15 13:30" -m "winter@UTC-8"
31 31 hg debugrebuildstate
32 32 echo "d" > a
33 33 hg ci -d "2006-01-15 13:30 +0500" -m "winter@UTC+5"
34 34 hg log --template '{date|date}\n'
35 35
36 36 # Test issue1014 (fractional timezones)
37 37 hg debugdate "1000000000 -16200" # 0430
38 38 hg debugdate "1000000000 -15300" # 0415
39 39 hg debugdate "1000000000 -14400" # 0400
40 40 hg debugdate "1000000000 0" # GMT
41 41 hg debugdate "1000000000 14400" # -0400
42 42 hg debugdate "1000000000 15300" # -0415
43 43 hg debugdate "1000000000 16200" # -0430
44 44 hg debugdate "Sat Sep 08 21:16:40 2001 +0430"
45 45 hg debugdate "Sat Sep 08 21:16:40 2001 -0430"
46
47 #Test date formats with '>' or '<' accompanied by space characters
48 hg log -d '>' --template '{date|date}\n'
49 hg log -d '<' hg log -d '>' --template '{date|date}\n'
50
51 hg log -d ' >' --template '{date|date}\n'
52 hg log -d ' <' --template '{date|date}\n'
53
54 hg log -d '> ' --template '{date|date}\n'
55 hg log -d '< ' --template '{date|date}\n'
56
57 hg log -d ' > ' --template '{date|date}\n'
58 hg log -d ' < ' --template '{date|date}\n'
59
60
61 hg log -d '>02/01' --template '{date|date}\n'
62 hg log -d '<02/01' --template '{date|date}\n'
63
64 hg log -d ' >02/01' --template '{date|date}\n'
65 hg log -d ' <02/01' --template '{date|date}\n'
66
67 hg log -d '> 02/01' --template '{date|date}\n'
68 hg log -d '< 02/01' --template '{date|date}\n'
69
70 hg log -d ' > 02/01' --template '{date|date}\n'
71 hg log -d ' < 02/01' --template '{date|date}\n'
72
73 hg log -d '>02/01 ' --template '{date|date}\n'
74 hg log -d '<02/01 ' --template '{date|date}\n'
75
76 hg log -d ' >02/01 ' --template '{date|date}\n'
77 hg log -d ' <02/01 ' --template '{date|date}\n'
78
79 hg log -d '> 02/01 ' --template '{date|date}\n'
80 hg log -d '< 02/01 ' --template '{date|date}\n'
81
82 hg log -d ' > 02/01 ' --template '{date|date}\n'
83 hg log -d ' < 02/01 ' --template '{date|date}\n' No newline at end of file
@@ -1,36 +1,144 b''
1 1 reverting a
2 2 created new head
3 3 changeset 3:107ce1ee2b43 backs out changeset 1:25a1420a55f8
4 4 merging with changeset 3:107ce1ee2b43
5 5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
6 6 (branch merge, don't forget to commit)
7 7 abort: invalid date: 'should fail'
8 8 abort: date exceeds 32 bits: 100000000000000000
9 9 abort: impossible time zone offset: 1400000
10 10 Sun Jan 15 13:30:00 2006 +0500
11 11 Sun Jan 15 13:30:00 2006 -0800
12 12 Sat Jul 15 13:30:00 2006 +0500
13 13 Sat Jul 15 13:30:00 2006 -0700
14 14 Sun Jun 11 00:26:40 2006 -0400
15 15 Sat Apr 15 13:30:00 2006 +0200
16 16 Sat Apr 15 13:30:00 2006 +0000
17 17 Wed Feb 01 13:00:30 2006 -0500
18 18 Wed Feb 01 13:00:30 2006 +0000
19 19 internal: 1000000000 -16200
20 20 standard: Sun Sep 09 06:16:40 2001 +0430
21 21 internal: 1000000000 -15300
22 22 standard: Sun Sep 09 06:01:40 2001 +0415
23 23 internal: 1000000000 -14400
24 24 standard: Sun Sep 09 05:46:40 2001 +0400
25 25 internal: 1000000000 0
26 26 standard: Sun Sep 09 01:46:40 2001 +0000
27 27 internal: 1000000000 14400
28 28 standard: Sat Sep 08 21:46:40 2001 -0400
29 29 internal: 1000000000 15300
30 30 standard: Sat Sep 08 21:31:40 2001 -0415
31 31 internal: 1000000000 16200
32 32 standard: Sat Sep 08 21:16:40 2001 -0430
33 33 internal: 999967600 -16200
34 34 standard: Sat Sep 08 21:16:40 2001 +0430
35 35 internal: 1000000000 16200
36 36 standard: Sat Sep 08 21:16:40 2001 -0430
37 Sun Jan 15 13:30:00 2006 +0500
38 Sun Jan 15 13:30:00 2006 -0800
39 Sat Jul 15 13:30:00 2006 +0500
40 Sat Jul 15 13:30:00 2006 -0700
41 Sun Jun 11 00:26:40 2006 -0400
42 Sat Apr 15 13:30:00 2006 +0200
43 Sat Apr 15 13:30:00 2006 +0000
44 Wed Feb 01 13:00:30 2006 -0500
45 Wed Feb 01 13:00:30 2006 +0000
46 Sun Jan 15 13:30:00 2006 +0500
47 Sun Jan 15 13:30:00 2006 -0800
48 Sat Jul 15 13:30:00 2006 +0500
49 Sat Jul 15 13:30:00 2006 -0700
50 Sun Jun 11 00:26:40 2006 -0400
51 Sat Apr 15 13:30:00 2006 +0200
52 Sat Apr 15 13:30:00 2006 +0000
53 Wed Feb 01 13:00:30 2006 -0500
54 Wed Feb 01 13:00:30 2006 +0000
55 Sun Jan 15 13:30:00 2006 +0500
56 Sun Jan 15 13:30:00 2006 -0800
57 Sat Jul 15 13:30:00 2006 +0500
58 Sat Jul 15 13:30:00 2006 -0700
59 Sun Jun 11 00:26:40 2006 -0400
60 Sat Apr 15 13:30:00 2006 +0200
61 Sat Apr 15 13:30:00 2006 +0000
62 Wed Feb 01 13:00:30 2006 -0500
63 Wed Feb 01 13:00:30 2006 +0000
64 Sun Jan 15 13:30:00 2006 +0500
65 Sun Jan 15 13:30:00 2006 -0800
66 Sat Jul 15 13:30:00 2006 +0500
67 Sat Jul 15 13:30:00 2006 -0700
68 Sun Jun 11 00:26:40 2006 -0400
69 Sat Apr 15 13:30:00 2006 +0200
70 Sat Apr 15 13:30:00 2006 +0000
71 Wed Feb 01 13:00:30 2006 -0500
72 Wed Feb 01 13:00:30 2006 +0000
73 Sun Jan 15 13:30:00 2006 +0500
74 Sun Jan 15 13:30:00 2006 -0800
75 Sat Jul 15 13:30:00 2006 +0500
76 Sat Jul 15 13:30:00 2006 -0700
77 Sun Jun 11 00:26:40 2006 -0400
78 Sat Apr 15 13:30:00 2006 +0200
79 Sat Apr 15 13:30:00 2006 +0000
80 Wed Feb 01 13:00:30 2006 -0500
81 Wed Feb 01 13:00:30 2006 +0000
82 Sun Jan 15 13:30:00 2006 +0500
83 Sun Jan 15 13:30:00 2006 -0800
84 Sat Jul 15 13:30:00 2006 +0500
85 Sat Jul 15 13:30:00 2006 -0700
86 Sun Jun 11 00:26:40 2006 -0400
87 Sat Apr 15 13:30:00 2006 +0200
88 Sat Apr 15 13:30:00 2006 +0000
89 Wed Feb 01 13:00:30 2006 -0500
90 Wed Feb 01 13:00:30 2006 +0000
91 Sun Jan 15 13:30:00 2006 +0500
92 Sun Jan 15 13:30:00 2006 -0800
93 Sat Jul 15 13:30:00 2006 +0500
94 Sat Jul 15 13:30:00 2006 -0700
95 Sun Jun 11 00:26:40 2006 -0400
96 Sat Apr 15 13:30:00 2006 +0200
97 Sat Apr 15 13:30:00 2006 +0000
98 Wed Feb 01 13:00:30 2006 -0500
99 Wed Feb 01 13:00:30 2006 +0000
100 Sun Jan 15 13:30:00 2006 +0500
101 Sun Jan 15 13:30:00 2006 -0800
102 Sat Jul 15 13:30:00 2006 +0500
103 Sat Jul 15 13:30:00 2006 -0700
104 Sun Jun 11 00:26:40 2006 -0400
105 Sat Apr 15 13:30:00 2006 +0200
106 Sat Apr 15 13:30:00 2006 +0000
107 Wed Feb 01 13:00:30 2006 -0500
108 Wed Feb 01 13:00:30 2006 +0000
109 Sun Jan 15 13:30:00 2006 +0500
110 Sun Jan 15 13:30:00 2006 -0800
111 Sat Jul 15 13:30:00 2006 +0500
112 Sat Jul 15 13:30:00 2006 -0700
113 Sun Jun 11 00:26:40 2006 -0400
114 Sat Apr 15 13:30:00 2006 +0200
115 Sat Apr 15 13:30:00 2006 +0000
116 Wed Feb 01 13:00:30 2006 -0500
117 Wed Feb 01 13:00:30 2006 +0000
118 Sun Jan 15 13:30:00 2006 +0500
119 Sun Jan 15 13:30:00 2006 -0800
120 Sat Jul 15 13:30:00 2006 +0500
121 Sat Jul 15 13:30:00 2006 -0700
122 Sun Jun 11 00:26:40 2006 -0400
123 Sat Apr 15 13:30:00 2006 +0200
124 Sat Apr 15 13:30:00 2006 +0000
125 Wed Feb 01 13:00:30 2006 -0500
126 Wed Feb 01 13:00:30 2006 +0000
127 Sun Jan 15 13:30:00 2006 +0500
128 Sun Jan 15 13:30:00 2006 -0800
129 Sat Jul 15 13:30:00 2006 +0500
130 Sat Jul 15 13:30:00 2006 -0700
131 Sun Jun 11 00:26:40 2006 -0400
132 Sat Apr 15 13:30:00 2006 +0200
133 Sat Apr 15 13:30:00 2006 +0000
134 Wed Feb 01 13:00:30 2006 -0500
135 Wed Feb 01 13:00:30 2006 +0000
136 Sun Jan 15 13:30:00 2006 +0500
137 Sun Jan 15 13:30:00 2006 -0800
138 Sat Jul 15 13:30:00 2006 +0500
139 Sat Jul 15 13:30:00 2006 -0700
140 Sun Jun 11 00:26:40 2006 -0400
141 Sat Apr 15 13:30:00 2006 +0200
142 Sat Apr 15 13:30:00 2006 +0000
143 Wed Feb 01 13:00:30 2006 -0500
144 Wed Feb 01 13:00:30 2006 +0000
General Comments 0
You need to be logged in to leave comments. Login now