##// END OF EJS Templates
util: initialize md5 and sha1 without using extra global variables...
Martin Geisler -
r8281:3e1e499d default
parent child Browse files
Show More
@@ -1,1485 +1,1483 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
18 import os, stat, threading, time, calendar, glob, osutil, random
18 import os, stat, threading, time, calendar, glob, osutil, random
19 import imp
19 import imp
20
20
21 # Python compatibility
21 # Python compatibility
22
22
23 _md5 = None
24 def md5(s):
23 def md5(s):
25 global _md5
26 if _md5 is None:
27 try:
24 try:
28 import hashlib
25 import hashlib
29 _md5 = hashlib.md5
26 _md5 = hashlib.md5
30 except ImportError:
27 except ImportError:
31 import md5
28 import md5
32 _md5 = md5.md5
29 _md5 = md5.md5
30 global md5
31 md5 = _md5
33 return _md5(s)
32 return _md5(s)
34
33
35 _sha1 = None
36 def sha1(s):
34 def sha1(s):
37 global _sha1
38 if _sha1 is None:
39 try:
35 try:
40 import hashlib
36 import hashlib
41 _sha1 = hashlib.sha1
37 _sha1 = hashlib.sha1
42 except ImportError:
38 except ImportError:
43 import sha
39 import sha
44 _sha1 = sha.sha
40 _sha1 = sha.sha
41 global sha1
42 sha1 = _sha1
45 return _sha1(s)
43 return _sha1(s)
46
44
47 import subprocess
45 import subprocess
48 closefds = os.name == 'posix'
46 closefds = os.name == 'posix'
49 def popen2(cmd, mode='t', bufsize=-1):
47 def popen2(cmd, mode='t', bufsize=-1):
50 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
48 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
51 close_fds=closefds,
49 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
50 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
53 return p.stdin, p.stdout
51 return p.stdin, p.stdout
54 def popen3(cmd, mode='t', bufsize=-1):
52 def popen3(cmd, mode='t', bufsize=-1):
55 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
53 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
56 close_fds=closefds,
54 close_fds=closefds,
57 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
58 stderr=subprocess.PIPE)
56 stderr=subprocess.PIPE)
59 return p.stdin, p.stdout, p.stderr
57 return p.stdin, p.stdout, p.stderr
60 def Popen3(cmd, capturestderr=False, bufsize=-1):
58 def Popen3(cmd, capturestderr=False, bufsize=-1):
61 stderr = capturestderr and subprocess.PIPE or None
59 stderr = capturestderr and subprocess.PIPE or None
62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
60 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 close_fds=closefds,
61 close_fds=closefds,
64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stderr=stderr)
63 stderr=stderr)
66 p.fromchild = p.stdout
64 p.fromchild = p.stdout
67 p.tochild = p.stdin
65 p.tochild = p.stdin
68 p.childerr = p.stderr
66 p.childerr = p.stderr
69 return p
67 return p
70
68
71 def version():
69 def version():
72 """Return version information if available."""
70 """Return version information if available."""
73 try:
71 try:
74 import __version__
72 import __version__
75 return __version__.version
73 return __version__.version
76 except ImportError:
74 except ImportError:
77 return 'unknown'
75 return 'unknown'
78
76
79 # used by parsedate
77 # used by parsedate
80 defaultdateformats = (
78 defaultdateformats = (
81 '%Y-%m-%d %H:%M:%S',
79 '%Y-%m-%d %H:%M:%S',
82 '%Y-%m-%d %I:%M:%S%p',
80 '%Y-%m-%d %I:%M:%S%p',
83 '%Y-%m-%d %H:%M',
81 '%Y-%m-%d %H:%M',
84 '%Y-%m-%d %I:%M%p',
82 '%Y-%m-%d %I:%M%p',
85 '%Y-%m-%d',
83 '%Y-%m-%d',
86 '%m-%d',
84 '%m-%d',
87 '%m/%d',
85 '%m/%d',
88 '%m/%d/%y',
86 '%m/%d/%y',
89 '%m/%d/%Y',
87 '%m/%d/%Y',
90 '%a %b %d %H:%M:%S %Y',
88 '%a %b %d %H:%M:%S %Y',
91 '%a %b %d %I:%M:%S%p %Y',
89 '%a %b %d %I:%M:%S%p %Y',
92 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
90 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
93 '%b %d %H:%M:%S %Y',
91 '%b %d %H:%M:%S %Y',
94 '%b %d %I:%M:%S%p %Y',
92 '%b %d %I:%M:%S%p %Y',
95 '%b %d %H:%M:%S',
93 '%b %d %H:%M:%S',
96 '%b %d %I:%M:%S%p',
94 '%b %d %I:%M:%S%p',
97 '%b %d %H:%M',
95 '%b %d %H:%M',
98 '%b %d %I:%M%p',
96 '%b %d %I:%M%p',
99 '%b %d %Y',
97 '%b %d %Y',
100 '%b %d',
98 '%b %d',
101 '%H:%M:%S',
99 '%H:%M:%S',
102 '%I:%M:%SP',
100 '%I:%M:%SP',
103 '%H:%M',
101 '%H:%M',
104 '%I:%M%p',
102 '%I:%M%p',
105 )
103 )
106
104
107 extendeddateformats = defaultdateformats + (
105 extendeddateformats = defaultdateformats + (
108 "%Y",
106 "%Y",
109 "%Y-%m",
107 "%Y-%m",
110 "%b",
108 "%b",
111 "%b %Y",
109 "%b %Y",
112 )
110 )
113
111
114 def cachefunc(func):
112 def cachefunc(func):
115 '''cache the result of function calls'''
113 '''cache the result of function calls'''
116 # XXX doesn't handle keywords args
114 # XXX doesn't handle keywords args
117 cache = {}
115 cache = {}
118 if func.func_code.co_argcount == 1:
116 if func.func_code.co_argcount == 1:
119 # we gain a small amount of time because
117 # we gain a small amount of time because
120 # we don't need to pack/unpack the list
118 # we don't need to pack/unpack the list
121 def f(arg):
119 def f(arg):
122 if arg not in cache:
120 if arg not in cache:
123 cache[arg] = func(arg)
121 cache[arg] = func(arg)
124 return cache[arg]
122 return cache[arg]
125 else:
123 else:
126 def f(*args):
124 def f(*args):
127 if args not in cache:
125 if args not in cache:
128 cache[args] = func(*args)
126 cache[args] = func(*args)
129 return cache[args]
127 return cache[args]
130
128
131 return f
129 return f
132
130
133 class propertycache(object):
131 class propertycache(object):
134 def __init__(self, func):
132 def __init__(self, func):
135 self.func = func
133 self.func = func
136 self.name = func.__name__
134 self.name = func.__name__
137 def __get__(self, obj, type=None):
135 def __get__(self, obj, type=None):
138 result = self.func(obj)
136 result = self.func(obj)
139 setattr(obj, self.name, result)
137 setattr(obj, self.name, result)
140 return result
138 return result
141
139
142 def pipefilter(s, cmd):
140 def pipefilter(s, cmd):
143 '''filter string S through command CMD, returning its output'''
141 '''filter string S through command CMD, returning its output'''
144 (pin, pout) = popen2(cmd, 'b')
142 (pin, pout) = popen2(cmd, 'b')
145 def writer():
143 def writer():
146 try:
144 try:
147 pin.write(s)
145 pin.write(s)
148 pin.close()
146 pin.close()
149 except IOError, inst:
147 except IOError, inst:
150 if inst.errno != errno.EPIPE:
148 if inst.errno != errno.EPIPE:
151 raise
149 raise
152
150
153 # we should use select instead on UNIX, but this will work on most
151 # we should use select instead on UNIX, but this will work on most
154 # systems, including Windows
152 # systems, including Windows
155 w = threading.Thread(target=writer)
153 w = threading.Thread(target=writer)
156 w.start()
154 w.start()
157 f = pout.read()
155 f = pout.read()
158 pout.close()
156 pout.close()
159 w.join()
157 w.join()
160 return f
158 return f
161
159
162 def tempfilter(s, cmd):
160 def tempfilter(s, cmd):
163 '''filter string S through a pair of temporary files with CMD.
161 '''filter string S through a pair of temporary files with CMD.
164 CMD is used as a template to create the real command to be run,
162 CMD is used as a template to create the real command to be run,
165 with the strings INFILE and OUTFILE replaced by the real names of
163 with the strings INFILE and OUTFILE replaced by the real names of
166 the temporary files generated.'''
164 the temporary files generated.'''
167 inname, outname = None, None
165 inname, outname = None, None
168 try:
166 try:
169 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
170 fp = os.fdopen(infd, 'wb')
168 fp = os.fdopen(infd, 'wb')
171 fp.write(s)
169 fp.write(s)
172 fp.close()
170 fp.close()
173 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
174 os.close(outfd)
172 os.close(outfd)
175 cmd = cmd.replace('INFILE', inname)
173 cmd = cmd.replace('INFILE', inname)
176 cmd = cmd.replace('OUTFILE', outname)
174 cmd = cmd.replace('OUTFILE', outname)
177 code = os.system(cmd)
175 code = os.system(cmd)
178 if sys.platform == 'OpenVMS' and code & 1:
176 if sys.platform == 'OpenVMS' and code & 1:
179 code = 0
177 code = 0
180 if code: raise Abort(_("command '%s' failed: %s") %
178 if code: raise Abort(_("command '%s' failed: %s") %
181 (cmd, explain_exit(code)))
179 (cmd, explain_exit(code)))
182 return open(outname, 'rb').read()
180 return open(outname, 'rb').read()
183 finally:
181 finally:
184 try:
182 try:
185 if inname: os.unlink(inname)
183 if inname: os.unlink(inname)
186 except: pass
184 except: pass
187 try:
185 try:
188 if outname: os.unlink(outname)
186 if outname: os.unlink(outname)
189 except: pass
187 except: pass
190
188
191 filtertable = {
189 filtertable = {
192 'tempfile:': tempfilter,
190 'tempfile:': tempfilter,
193 'pipe:': pipefilter,
191 'pipe:': pipefilter,
194 }
192 }
195
193
196 def filter(s, cmd):
194 def filter(s, cmd):
197 "filter a string through a command that transforms its input to its output"
195 "filter a string through a command that transforms its input to its output"
198 for name, fn in filtertable.iteritems():
196 for name, fn in filtertable.iteritems():
199 if cmd.startswith(name):
197 if cmd.startswith(name):
200 return fn(s, cmd[len(name):].lstrip())
198 return fn(s, cmd[len(name):].lstrip())
201 return pipefilter(s, cmd)
199 return pipefilter(s, cmd)
202
200
203 def binary(s):
201 def binary(s):
204 """return true if a string is binary data"""
202 """return true if a string is binary data"""
205 return bool(s and '\0' in s)
203 return bool(s and '\0' in s)
206
204
207 def increasingchunks(source, min=1024, max=65536):
205 def increasingchunks(source, min=1024, max=65536):
208 '''return no less than min bytes per chunk while data remains,
206 '''return no less than min bytes per chunk while data remains,
209 doubling min after each chunk until it reaches max'''
207 doubling min after each chunk until it reaches max'''
210 def log2(x):
208 def log2(x):
211 if not x:
209 if not x:
212 return 0
210 return 0
213 i = 0
211 i = 0
214 while x:
212 while x:
215 x >>= 1
213 x >>= 1
216 i += 1
214 i += 1
217 return i - 1
215 return i - 1
218
216
219 buf = []
217 buf = []
220 blen = 0
218 blen = 0
221 for chunk in source:
219 for chunk in source:
222 buf.append(chunk)
220 buf.append(chunk)
223 blen += len(chunk)
221 blen += len(chunk)
224 if blen >= min:
222 if blen >= min:
225 if min < max:
223 if min < max:
226 min = min << 1
224 min = min << 1
227 nmin = 1 << log2(blen)
225 nmin = 1 << log2(blen)
228 if nmin > min:
226 if nmin > min:
229 min = nmin
227 min = nmin
230 if min > max:
228 if min > max:
231 min = max
229 min = max
232 yield ''.join(buf)
230 yield ''.join(buf)
233 blen = 0
231 blen = 0
234 buf = []
232 buf = []
235 if buf:
233 if buf:
236 yield ''.join(buf)
234 yield ''.join(buf)
237
235
238 Abort = error.Abort
236 Abort = error.Abort
239
237
240 def always(fn): return True
238 def always(fn): return True
241 def never(fn): return False
239 def never(fn): return False
242
240
243 def patkind(name, default):
241 def patkind(name, default):
244 """Split a string into an optional pattern kind prefix and the
242 """Split a string into an optional pattern kind prefix and the
245 actual pattern."""
243 actual pattern."""
246 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
244 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
247 if name.startswith(prefix + ':'): return name.split(':', 1)
245 if name.startswith(prefix + ':'): return name.split(':', 1)
248 return default, name
246 return default, name
249
247
250 def globre(pat, head='^', tail='$'):
248 def globre(pat, head='^', tail='$'):
251 "convert a glob pattern into a regexp"
249 "convert a glob pattern into a regexp"
252 i, n = 0, len(pat)
250 i, n = 0, len(pat)
253 res = ''
251 res = ''
254 group = 0
252 group = 0
255 def peek(): return i < n and pat[i]
253 def peek(): return i < n and pat[i]
256 while i < n:
254 while i < n:
257 c = pat[i]
255 c = pat[i]
258 i = i+1
256 i = i+1
259 if c == '*':
257 if c == '*':
260 if peek() == '*':
258 if peek() == '*':
261 i += 1
259 i += 1
262 res += '.*'
260 res += '.*'
263 else:
261 else:
264 res += '[^/]*'
262 res += '[^/]*'
265 elif c == '?':
263 elif c == '?':
266 res += '.'
264 res += '.'
267 elif c == '[':
265 elif c == '[':
268 j = i
266 j = i
269 if j < n and pat[j] in '!]':
267 if j < n and pat[j] in '!]':
270 j += 1
268 j += 1
271 while j < n and pat[j] != ']':
269 while j < n and pat[j] != ']':
272 j += 1
270 j += 1
273 if j >= n:
271 if j >= n:
274 res += '\\['
272 res += '\\['
275 else:
273 else:
276 stuff = pat[i:j].replace('\\','\\\\')
274 stuff = pat[i:j].replace('\\','\\\\')
277 i = j + 1
275 i = j + 1
278 if stuff[0] == '!':
276 if stuff[0] == '!':
279 stuff = '^' + stuff[1:]
277 stuff = '^' + stuff[1:]
280 elif stuff[0] == '^':
278 elif stuff[0] == '^':
281 stuff = '\\' + stuff
279 stuff = '\\' + stuff
282 res = '%s[%s]' % (res, stuff)
280 res = '%s[%s]' % (res, stuff)
283 elif c == '{':
281 elif c == '{':
284 group += 1
282 group += 1
285 res += '(?:'
283 res += '(?:'
286 elif c == '}' and group:
284 elif c == '}' and group:
287 res += ')'
285 res += ')'
288 group -= 1
286 group -= 1
289 elif c == ',' and group:
287 elif c == ',' and group:
290 res += '|'
288 res += '|'
291 elif c == '\\':
289 elif c == '\\':
292 p = peek()
290 p = peek()
293 if p:
291 if p:
294 i += 1
292 i += 1
295 res += re.escape(p)
293 res += re.escape(p)
296 else:
294 else:
297 res += re.escape(c)
295 res += re.escape(c)
298 else:
296 else:
299 res += re.escape(c)
297 res += re.escape(c)
300 return head + res + tail
298 return head + res + tail
301
299
302 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
300 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
303
301
304 def pathto(root, n1, n2):
302 def pathto(root, n1, n2):
305 '''return the relative path from one place to another.
303 '''return the relative path from one place to another.
306 root should use os.sep to separate directories
304 root should use os.sep to separate directories
307 n1 should use os.sep to separate directories
305 n1 should use os.sep to separate directories
308 n2 should use "/" to separate directories
306 n2 should use "/" to separate directories
309 returns an os.sep-separated path.
307 returns an os.sep-separated path.
310
308
311 If n1 is a relative path, it's assumed it's
309 If n1 is a relative path, it's assumed it's
312 relative to root.
310 relative to root.
313 n2 should always be relative to root.
311 n2 should always be relative to root.
314 '''
312 '''
315 if not n1: return localpath(n2)
313 if not n1: return localpath(n2)
316 if os.path.isabs(n1):
314 if os.path.isabs(n1):
317 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
315 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
318 return os.path.join(root, localpath(n2))
316 return os.path.join(root, localpath(n2))
319 n2 = '/'.join((pconvert(root), n2))
317 n2 = '/'.join((pconvert(root), n2))
320 a, b = splitpath(n1), n2.split('/')
318 a, b = splitpath(n1), n2.split('/')
321 a.reverse()
319 a.reverse()
322 b.reverse()
320 b.reverse()
323 while a and b and a[-1] == b[-1]:
321 while a and b and a[-1] == b[-1]:
324 a.pop()
322 a.pop()
325 b.pop()
323 b.pop()
326 b.reverse()
324 b.reverse()
327 return os.sep.join((['..'] * len(a)) + b) or '.'
325 return os.sep.join((['..'] * len(a)) + b) or '.'
328
326
329 def canonpath(root, cwd, myname):
327 def canonpath(root, cwd, myname):
330 """return the canonical path of myname, given cwd and root"""
328 """return the canonical path of myname, given cwd and root"""
331 if root == os.sep:
329 if root == os.sep:
332 rootsep = os.sep
330 rootsep = os.sep
333 elif endswithsep(root):
331 elif endswithsep(root):
334 rootsep = root
332 rootsep = root
335 else:
333 else:
336 rootsep = root + os.sep
334 rootsep = root + os.sep
337 name = myname
335 name = myname
338 if not os.path.isabs(name):
336 if not os.path.isabs(name):
339 name = os.path.join(root, cwd, name)
337 name = os.path.join(root, cwd, name)
340 name = os.path.normpath(name)
338 name = os.path.normpath(name)
341 audit_path = path_auditor(root)
339 audit_path = path_auditor(root)
342 if name != rootsep and name.startswith(rootsep):
340 if name != rootsep and name.startswith(rootsep):
343 name = name[len(rootsep):]
341 name = name[len(rootsep):]
344 audit_path(name)
342 audit_path(name)
345 return pconvert(name)
343 return pconvert(name)
346 elif name == root:
344 elif name == root:
347 return ''
345 return ''
348 else:
346 else:
349 # Determine whether `name' is in the hierarchy at or beneath `root',
347 # Determine whether `name' is in the hierarchy at or beneath `root',
350 # by iterating name=dirname(name) until that causes no change (can't
348 # by iterating name=dirname(name) until that causes no change (can't
351 # check name == '/', because that doesn't work on windows). For each
349 # check name == '/', because that doesn't work on windows). For each
352 # `name', compare dev/inode numbers. If they match, the list `rel'
350 # `name', compare dev/inode numbers. If they match, the list `rel'
353 # holds the reversed list of components making up the relative file
351 # holds the reversed list of components making up the relative file
354 # name we want.
352 # name we want.
355 root_st = os.stat(root)
353 root_st = os.stat(root)
356 rel = []
354 rel = []
357 while True:
355 while True:
358 try:
356 try:
359 name_st = os.stat(name)
357 name_st = os.stat(name)
360 except OSError:
358 except OSError:
361 break
359 break
362 if samestat(name_st, root_st):
360 if samestat(name_st, root_st):
363 if not rel:
361 if not rel:
364 # name was actually the same as root (maybe a symlink)
362 # name was actually the same as root (maybe a symlink)
365 return ''
363 return ''
366 rel.reverse()
364 rel.reverse()
367 name = os.path.join(*rel)
365 name = os.path.join(*rel)
368 audit_path(name)
366 audit_path(name)
369 return pconvert(name)
367 return pconvert(name)
370 dirname, basename = os.path.split(name)
368 dirname, basename = os.path.split(name)
371 rel.append(basename)
369 rel.append(basename)
372 if dirname == name:
370 if dirname == name:
373 break
371 break
374 name = dirname
372 name = dirname
375
373
376 raise Abort('%s not under root' % myname)
374 raise Abort('%s not under root' % myname)
377
375
378 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
376 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
379 """build a function to match a set of file patterns
377 """build a function to match a set of file patterns
380
378
381 arguments:
379 arguments:
382 canonroot - the canonical root of the tree you're matching against
380 canonroot - the canonical root of the tree you're matching against
383 cwd - the current working directory, if relevant
381 cwd - the current working directory, if relevant
384 names - patterns to find
382 names - patterns to find
385 inc - patterns to include
383 inc - patterns to include
386 exc - patterns to exclude
384 exc - patterns to exclude
387 dflt_pat - if a pattern in names has no explicit type, assume this one
385 dflt_pat - if a pattern in names has no explicit type, assume this one
388 src - where these patterns came from (e.g. .hgignore)
386 src - where these patterns came from (e.g. .hgignore)
389
387
390 a pattern is one of:
388 a pattern is one of:
391 'glob:<glob>' - a glob relative to cwd
389 'glob:<glob>' - a glob relative to cwd
392 're:<regexp>' - a regular expression
390 're:<regexp>' - a regular expression
393 'path:<path>' - a path relative to canonroot
391 'path:<path>' - a path relative to canonroot
394 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
392 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
395 'relpath:<path>' - a path relative to cwd
393 'relpath:<path>' - a path relative to cwd
396 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
394 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
397 '<something>' - one of the cases above, selected by the dflt_pat argument
395 '<something>' - one of the cases above, selected by the dflt_pat argument
398
396
399 returns:
397 returns:
400 a 3-tuple containing
398 a 3-tuple containing
401 - list of roots (places where one should start a recursive walk of the fs);
399 - list of roots (places where one should start a recursive walk of the fs);
402 this often matches the explicit non-pattern names passed in, but also
400 this often matches the explicit non-pattern names passed in, but also
403 includes the initial part of glob: patterns that has no glob characters
401 includes the initial part of glob: patterns that has no glob characters
404 - a bool match(filename) function
402 - a bool match(filename) function
405 - a bool indicating if any patterns were passed in
403 - a bool indicating if any patterns were passed in
406 """
404 """
407
405
408 # a common case: no patterns at all
406 # a common case: no patterns at all
409 if not names and not inc and not exc:
407 if not names and not inc and not exc:
410 return [], always, False
408 return [], always, False
411
409
412 def contains_glob(name):
410 def contains_glob(name):
413 for c in name:
411 for c in name:
414 if c in _globchars: return True
412 if c in _globchars: return True
415 return False
413 return False
416
414
417 def regex(kind, name, tail):
415 def regex(kind, name, tail):
418 '''convert a pattern into a regular expression'''
416 '''convert a pattern into a regular expression'''
419 if not name:
417 if not name:
420 return ''
418 return ''
421 if kind == 're':
419 if kind == 're':
422 return name
420 return name
423 elif kind == 'path':
421 elif kind == 'path':
424 return '^' + re.escape(name) + '(?:/|$)'
422 return '^' + re.escape(name) + '(?:/|$)'
425 elif kind == 'relglob':
423 elif kind == 'relglob':
426 return globre(name, '(?:|.*/)', tail)
424 return globre(name, '(?:|.*/)', tail)
427 elif kind == 'relpath':
425 elif kind == 'relpath':
428 return re.escape(name) + '(?:/|$)'
426 return re.escape(name) + '(?:/|$)'
429 elif kind == 'relre':
427 elif kind == 'relre':
430 if name.startswith('^'):
428 if name.startswith('^'):
431 return name
429 return name
432 return '.*' + name
430 return '.*' + name
433 return globre(name, '', tail)
431 return globre(name, '', tail)
434
432
435 def matchfn(pats, tail):
433 def matchfn(pats, tail):
436 """build a matching function from a set of patterns"""
434 """build a matching function from a set of patterns"""
437 if not pats:
435 if not pats:
438 return
436 return
439 try:
437 try:
440 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
438 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
441 if len(pat) > 20000:
439 if len(pat) > 20000:
442 raise OverflowError()
440 raise OverflowError()
443 return re.compile(pat).match
441 return re.compile(pat).match
444 except OverflowError:
442 except OverflowError:
445 # We're using a Python with a tiny regex engine and we
443 # We're using a Python with a tiny regex engine and we
446 # made it explode, so we'll divide the pattern list in two
444 # made it explode, so we'll divide the pattern list in two
447 # until it works
445 # until it works
448 l = len(pats)
446 l = len(pats)
449 if l < 2:
447 if l < 2:
450 raise
448 raise
451 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
449 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
452 return lambda s: a(s) or b(s)
450 return lambda s: a(s) or b(s)
453 except re.error:
451 except re.error:
454 for k, p in pats:
452 for k, p in pats:
455 try:
453 try:
456 re.compile('(?:%s)' % regex(k, p, tail))
454 re.compile('(?:%s)' % regex(k, p, tail))
457 except re.error:
455 except re.error:
458 if src:
456 if src:
459 raise Abort("%s: invalid pattern (%s): %s" %
457 raise Abort("%s: invalid pattern (%s): %s" %
460 (src, k, p))
458 (src, k, p))
461 else:
459 else:
462 raise Abort("invalid pattern (%s): %s" % (k, p))
460 raise Abort("invalid pattern (%s): %s" % (k, p))
463 raise Abort("invalid pattern")
461 raise Abort("invalid pattern")
464
462
465 def globprefix(pat):
463 def globprefix(pat):
466 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
464 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
467 root = []
465 root = []
468 for p in pat.split('/'):
466 for p in pat.split('/'):
469 if contains_glob(p): break
467 if contains_glob(p): break
470 root.append(p)
468 root.append(p)
471 return '/'.join(root) or '.'
469 return '/'.join(root) or '.'
472
470
473 def normalizepats(names, default):
471 def normalizepats(names, default):
474 pats = []
472 pats = []
475 roots = []
473 roots = []
476 anypats = False
474 anypats = False
477 for kind, name in [patkind(p, default) for p in names]:
475 for kind, name in [patkind(p, default) for p in names]:
478 if kind in ('glob', 'relpath'):
476 if kind in ('glob', 'relpath'):
479 name = canonpath(canonroot, cwd, name)
477 name = canonpath(canonroot, cwd, name)
480 elif kind in ('relglob', 'path'):
478 elif kind in ('relglob', 'path'):
481 name = normpath(name)
479 name = normpath(name)
482
480
483 pats.append((kind, name))
481 pats.append((kind, name))
484
482
485 if kind in ('glob', 're', 'relglob', 'relre'):
483 if kind in ('glob', 're', 'relglob', 'relre'):
486 anypats = True
484 anypats = True
487
485
488 if kind == 'glob':
486 if kind == 'glob':
489 root = globprefix(name)
487 root = globprefix(name)
490 roots.append(root)
488 roots.append(root)
491 elif kind in ('relpath', 'path'):
489 elif kind in ('relpath', 'path'):
492 roots.append(name or '.')
490 roots.append(name or '.')
493 elif kind == 'relglob':
491 elif kind == 'relglob':
494 roots.append('.')
492 roots.append('.')
495 return roots, pats, anypats
493 return roots, pats, anypats
496
494
497 roots, pats, anypats = normalizepats(names, dflt_pat)
495 roots, pats, anypats = normalizepats(names, dflt_pat)
498
496
499 patmatch = matchfn(pats, '$') or always
497 patmatch = matchfn(pats, '$') or always
500 incmatch = always
498 incmatch = always
501 if inc:
499 if inc:
502 dummy, inckinds, dummy = normalizepats(inc, 'glob')
500 dummy, inckinds, dummy = normalizepats(inc, 'glob')
503 incmatch = matchfn(inckinds, '(?:/|$)')
501 incmatch = matchfn(inckinds, '(?:/|$)')
504 excmatch = never
502 excmatch = never
505 if exc:
503 if exc:
506 dummy, exckinds, dummy = normalizepats(exc, 'glob')
504 dummy, exckinds, dummy = normalizepats(exc, 'glob')
507 excmatch = matchfn(exckinds, '(?:/|$)')
505 excmatch = matchfn(exckinds, '(?:/|$)')
508
506
509 if not names and inc and not exc:
507 if not names and inc and not exc:
510 # common case: hgignore patterns
508 # common case: hgignore patterns
511 match = incmatch
509 match = incmatch
512 else:
510 else:
513 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
511 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
514
512
515 return (roots, match, (inc or exc or anypats) and True)
513 return (roots, match, (inc or exc or anypats) and True)
516
514
517 _hgexecutable = None
515 _hgexecutable = None
518
516
519 def main_is_frozen():
517 def main_is_frozen():
520 """return True if we are a frozen executable.
518 """return True if we are a frozen executable.
521
519
522 The code supports py2exe (most common, Windows only) and tools/freeze
520 The code supports py2exe (most common, Windows only) and tools/freeze
523 (portable, not much used).
521 (portable, not much used).
524 """
522 """
525 return (hasattr(sys, "frozen") or # new py2exe
523 return (hasattr(sys, "frozen") or # new py2exe
526 hasattr(sys, "importers") or # old py2exe
524 hasattr(sys, "importers") or # old py2exe
527 imp.is_frozen("__main__")) # tools/freeze
525 imp.is_frozen("__main__")) # tools/freeze
528
526
529 def hgexecutable():
527 def hgexecutable():
530 """return location of the 'hg' executable.
528 """return location of the 'hg' executable.
531
529
532 Defaults to $HG or 'hg' in the search path.
530 Defaults to $HG or 'hg' in the search path.
533 """
531 """
534 if _hgexecutable is None:
532 if _hgexecutable is None:
535 hg = os.environ.get('HG')
533 hg = os.environ.get('HG')
536 if hg:
534 if hg:
537 set_hgexecutable(hg)
535 set_hgexecutable(hg)
538 elif main_is_frozen():
536 elif main_is_frozen():
539 set_hgexecutable(sys.executable)
537 set_hgexecutable(sys.executable)
540 else:
538 else:
541 set_hgexecutable(find_exe('hg') or 'hg')
539 set_hgexecutable(find_exe('hg') or 'hg')
542 return _hgexecutable
540 return _hgexecutable
543
541
544 def set_hgexecutable(path):
542 def set_hgexecutable(path):
545 """set location of the 'hg' executable"""
543 """set location of the 'hg' executable"""
546 global _hgexecutable
544 global _hgexecutable
547 _hgexecutable = path
545 _hgexecutable = path
548
546
549 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
547 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
550 '''enhanced shell command execution.
548 '''enhanced shell command execution.
551 run with environment maybe modified, maybe in different dir.
549 run with environment maybe modified, maybe in different dir.
552
550
553 if command fails and onerr is None, return status. if ui object,
551 if command fails and onerr is None, return status. if ui object,
554 print error message and return status, else raise onerr object as
552 print error message and return status, else raise onerr object as
555 exception.'''
553 exception.'''
556 def py2shell(val):
554 def py2shell(val):
557 'convert python object into string that is useful to shell'
555 'convert python object into string that is useful to shell'
558 if val in (None, False):
556 if val in (None, False):
559 return '0'
557 return '0'
560 if val == True:
558 if val == True:
561 return '1'
559 return '1'
562 return str(val)
560 return str(val)
563 oldenv = {}
561 oldenv = {}
564 for k in environ:
562 for k in environ:
565 oldenv[k] = os.environ.get(k)
563 oldenv[k] = os.environ.get(k)
566 if cwd is not None:
564 if cwd is not None:
567 oldcwd = os.getcwd()
565 oldcwd = os.getcwd()
568 origcmd = cmd
566 origcmd = cmd
569 if os.name == 'nt':
567 if os.name == 'nt':
570 cmd = '"%s"' % cmd
568 cmd = '"%s"' % cmd
571 try:
569 try:
572 for k, v in environ.iteritems():
570 for k, v in environ.iteritems():
573 os.environ[k] = py2shell(v)
571 os.environ[k] = py2shell(v)
574 os.environ['HG'] = hgexecutable()
572 os.environ['HG'] = hgexecutable()
575 if cwd is not None and oldcwd != cwd:
573 if cwd is not None and oldcwd != cwd:
576 os.chdir(cwd)
574 os.chdir(cwd)
577 rc = os.system(cmd)
575 rc = os.system(cmd)
578 if sys.platform == 'OpenVMS' and rc & 1:
576 if sys.platform == 'OpenVMS' and rc & 1:
579 rc = 0
577 rc = 0
580 if rc and onerr:
578 if rc and onerr:
581 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
579 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
582 explain_exit(rc)[0])
580 explain_exit(rc)[0])
583 if errprefix:
581 if errprefix:
584 errmsg = '%s: %s' % (errprefix, errmsg)
582 errmsg = '%s: %s' % (errprefix, errmsg)
585 try:
583 try:
586 onerr.warn(errmsg + '\n')
584 onerr.warn(errmsg + '\n')
587 except AttributeError:
585 except AttributeError:
588 raise onerr(errmsg)
586 raise onerr(errmsg)
589 return rc
587 return rc
590 finally:
588 finally:
591 for k, v in oldenv.iteritems():
589 for k, v in oldenv.iteritems():
592 if v is None:
590 if v is None:
593 del os.environ[k]
591 del os.environ[k]
594 else:
592 else:
595 os.environ[k] = v
593 os.environ[k] = v
596 if cwd is not None and oldcwd != cwd:
594 if cwd is not None and oldcwd != cwd:
597 os.chdir(oldcwd)
595 os.chdir(oldcwd)
598
596
599 def checksignature(func):
597 def checksignature(func):
600 '''wrap a function with code to check for calling errors'''
598 '''wrap a function with code to check for calling errors'''
601 def check(*args, **kwargs):
599 def check(*args, **kwargs):
602 try:
600 try:
603 return func(*args, **kwargs)
601 return func(*args, **kwargs)
604 except TypeError:
602 except TypeError:
605 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
603 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
606 raise error.SignatureError
604 raise error.SignatureError
607 raise
605 raise
608
606
609 return check
607 return check
610
608
611 # os.path.lexists is not available on python2.3
609 # os.path.lexists is not available on python2.3
612 def lexists(filename):
610 def lexists(filename):
613 "test whether a file with this name exists. does not follow symlinks"
611 "test whether a file with this name exists. does not follow symlinks"
614 try:
612 try:
615 os.lstat(filename)
613 os.lstat(filename)
616 except:
614 except:
617 return False
615 return False
618 return True
616 return True
619
617
620 def rename(src, dst):
618 def rename(src, dst):
621 """forcibly rename a file"""
619 """forcibly rename a file"""
622 try:
620 try:
623 os.rename(src, dst)
621 os.rename(src, dst)
624 except OSError, err: # FIXME: check err (EEXIST ?)
622 except OSError, err: # FIXME: check err (EEXIST ?)
625
623
626 # On windows, rename to existing file is not allowed, so we
624 # On windows, rename to existing file is not allowed, so we
627 # must delete destination first. But if a file is open, unlink
625 # must delete destination first. But if a file is open, unlink
628 # schedules it for delete but does not delete it. Rename
626 # schedules it for delete but does not delete it. Rename
629 # happens immediately even for open files, so we rename
627 # happens immediately even for open files, so we rename
630 # destination to a temporary name, then delete that. Then
628 # destination to a temporary name, then delete that. Then
631 # rename is safe to do.
629 # rename is safe to do.
632 # The temporary name is chosen at random to avoid the situation
630 # The temporary name is chosen at random to avoid the situation
633 # where a file is left lying around from a previous aborted run.
631 # where a file is left lying around from a previous aborted run.
634 # The usual race condition this introduces can't be avoided as
632 # The usual race condition this introduces can't be avoided as
635 # we need the name to rename into, and not the file itself. Due
633 # we need the name to rename into, and not the file itself. Due
636 # to the nature of the operation however, any races will at worst
634 # to the nature of the operation however, any races will at worst
637 # lead to the rename failing and the current operation aborting.
635 # lead to the rename failing and the current operation aborting.
638
636
639 def tempname(prefix):
637 def tempname(prefix):
640 for tries in xrange(10):
638 for tries in xrange(10):
641 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
639 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
642 if not os.path.exists(temp):
640 if not os.path.exists(temp):
643 return temp
641 return temp
644 raise IOError, (errno.EEXIST, "No usable temporary filename found")
642 raise IOError, (errno.EEXIST, "No usable temporary filename found")
645
643
646 temp = tempname(dst)
644 temp = tempname(dst)
647 os.rename(dst, temp)
645 os.rename(dst, temp)
648 os.unlink(temp)
646 os.unlink(temp)
649 os.rename(src, dst)
647 os.rename(src, dst)
650
648
651 def unlink(f):
649 def unlink(f):
652 """unlink and remove the directory if it is empty"""
650 """unlink and remove the directory if it is empty"""
653 os.unlink(f)
651 os.unlink(f)
654 # try removing directories that might now be empty
652 # try removing directories that might now be empty
655 try:
653 try:
656 os.removedirs(os.path.dirname(f))
654 os.removedirs(os.path.dirname(f))
657 except OSError:
655 except OSError:
658 pass
656 pass
659
657
660 def copyfile(src, dest):
658 def copyfile(src, dest):
661 "copy a file, preserving mode and atime/mtime"
659 "copy a file, preserving mode and atime/mtime"
662 if os.path.islink(src):
660 if os.path.islink(src):
663 try:
661 try:
664 os.unlink(dest)
662 os.unlink(dest)
665 except:
663 except:
666 pass
664 pass
667 os.symlink(os.readlink(src), dest)
665 os.symlink(os.readlink(src), dest)
668 else:
666 else:
669 try:
667 try:
670 shutil.copyfile(src, dest)
668 shutil.copyfile(src, dest)
671 shutil.copystat(src, dest)
669 shutil.copystat(src, dest)
672 except shutil.Error, inst:
670 except shutil.Error, inst:
673 raise Abort(str(inst))
671 raise Abort(str(inst))
674
672
675 def copyfiles(src, dst, hardlink=None):
673 def copyfiles(src, dst, hardlink=None):
676 """Copy a directory tree using hardlinks if possible"""
674 """Copy a directory tree using hardlinks if possible"""
677
675
678 if hardlink is None:
676 if hardlink is None:
679 hardlink = (os.stat(src).st_dev ==
677 hardlink = (os.stat(src).st_dev ==
680 os.stat(os.path.dirname(dst)).st_dev)
678 os.stat(os.path.dirname(dst)).st_dev)
681
679
682 if os.path.isdir(src):
680 if os.path.isdir(src):
683 os.mkdir(dst)
681 os.mkdir(dst)
684 for name, kind in osutil.listdir(src):
682 for name, kind in osutil.listdir(src):
685 srcname = os.path.join(src, name)
683 srcname = os.path.join(src, name)
686 dstname = os.path.join(dst, name)
684 dstname = os.path.join(dst, name)
687 copyfiles(srcname, dstname, hardlink)
685 copyfiles(srcname, dstname, hardlink)
688 else:
686 else:
689 if hardlink:
687 if hardlink:
690 try:
688 try:
691 os_link(src, dst)
689 os_link(src, dst)
692 except (IOError, OSError):
690 except (IOError, OSError):
693 hardlink = False
691 hardlink = False
694 shutil.copy(src, dst)
692 shutil.copy(src, dst)
695 else:
693 else:
696 shutil.copy(src, dst)
694 shutil.copy(src, dst)
697
695
698 class path_auditor(object):
696 class path_auditor(object):
699 '''ensure that a filesystem path contains no banned components.
697 '''ensure that a filesystem path contains no banned components.
700 the following properties of a path are checked:
698 the following properties of a path are checked:
701
699
702 - under top-level .hg
700 - under top-level .hg
703 - starts at the root of a windows drive
701 - starts at the root of a windows drive
704 - contains ".."
702 - contains ".."
705 - traverses a symlink (e.g. a/symlink_here/b)
703 - traverses a symlink (e.g. a/symlink_here/b)
706 - inside a nested repository'''
704 - inside a nested repository'''
707
705
708 def __init__(self, root):
706 def __init__(self, root):
709 self.audited = set()
707 self.audited = set()
710 self.auditeddir = set()
708 self.auditeddir = set()
711 self.root = root
709 self.root = root
712
710
713 def __call__(self, path):
711 def __call__(self, path):
714 if path in self.audited:
712 if path in self.audited:
715 return
713 return
716 normpath = os.path.normcase(path)
714 normpath = os.path.normcase(path)
717 parts = splitpath(normpath)
715 parts = splitpath(normpath)
718 if (os.path.splitdrive(path)[0]
716 if (os.path.splitdrive(path)[0]
719 or parts[0].lower() in ('.hg', '.hg.', '')
717 or parts[0].lower() in ('.hg', '.hg.', '')
720 or os.pardir in parts):
718 or os.pardir in parts):
721 raise Abort(_("path contains illegal component: %s") % path)
719 raise Abort(_("path contains illegal component: %s") % path)
722 if '.hg' in path.lower():
720 if '.hg' in path.lower():
723 lparts = [p.lower() for p in parts]
721 lparts = [p.lower() for p in parts]
724 for p in '.hg', '.hg.':
722 for p in '.hg', '.hg.':
725 if p in lparts[1:]:
723 if p in lparts[1:]:
726 pos = lparts.index(p)
724 pos = lparts.index(p)
727 base = os.path.join(*parts[:pos])
725 base = os.path.join(*parts[:pos])
728 raise Abort(_('path %r is inside repo %r') % (path, base))
726 raise Abort(_('path %r is inside repo %r') % (path, base))
729 def check(prefix):
727 def check(prefix):
730 curpath = os.path.join(self.root, prefix)
728 curpath = os.path.join(self.root, prefix)
731 try:
729 try:
732 st = os.lstat(curpath)
730 st = os.lstat(curpath)
733 except OSError, err:
731 except OSError, err:
734 # EINVAL can be raised as invalid path syntax under win32.
732 # EINVAL can be raised as invalid path syntax under win32.
735 # They must be ignored for patterns can be checked too.
733 # They must be ignored for patterns can be checked too.
736 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
734 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
737 raise
735 raise
738 else:
736 else:
739 if stat.S_ISLNK(st.st_mode):
737 if stat.S_ISLNK(st.st_mode):
740 raise Abort(_('path %r traverses symbolic link %r') %
738 raise Abort(_('path %r traverses symbolic link %r') %
741 (path, prefix))
739 (path, prefix))
742 elif (stat.S_ISDIR(st.st_mode) and
740 elif (stat.S_ISDIR(st.st_mode) and
743 os.path.isdir(os.path.join(curpath, '.hg'))):
741 os.path.isdir(os.path.join(curpath, '.hg'))):
744 raise Abort(_('path %r is inside repo %r') %
742 raise Abort(_('path %r is inside repo %r') %
745 (path, prefix))
743 (path, prefix))
746 parts.pop()
744 parts.pop()
747 prefixes = []
745 prefixes = []
748 for n in range(len(parts)):
746 for n in range(len(parts)):
749 prefix = os.sep.join(parts)
747 prefix = os.sep.join(parts)
750 if prefix in self.auditeddir:
748 if prefix in self.auditeddir:
751 break
749 break
752 check(prefix)
750 check(prefix)
753 prefixes.append(prefix)
751 prefixes.append(prefix)
754 parts.pop()
752 parts.pop()
755
753
756 self.audited.add(path)
754 self.audited.add(path)
757 # only add prefixes to the cache after checking everything: we don't
755 # only add prefixes to the cache after checking everything: we don't
758 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
756 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
759 self.auditeddir.update(prefixes)
757 self.auditeddir.update(prefixes)
760
758
761 def nlinks(pathname):
759 def nlinks(pathname):
762 """Return number of hardlinks for the given file."""
760 """Return number of hardlinks for the given file."""
763 return os.lstat(pathname).st_nlink
761 return os.lstat(pathname).st_nlink
764
762
765 if hasattr(os, 'link'):
763 if hasattr(os, 'link'):
766 os_link = os.link
764 os_link = os.link
767 else:
765 else:
768 def os_link(src, dst):
766 def os_link(src, dst):
769 raise OSError(0, _("Hardlinks not supported"))
767 raise OSError(0, _("Hardlinks not supported"))
770
768
771 def lookup_reg(key, name=None, scope=None):
769 def lookup_reg(key, name=None, scope=None):
772 return None
770 return None
773
771
774 if os.name == 'nt':
772 if os.name == 'nt':
775 from windows import *
773 from windows import *
776 def expand_glob(pats):
774 def expand_glob(pats):
777 '''On Windows, expand the implicit globs in a list of patterns'''
775 '''On Windows, expand the implicit globs in a list of patterns'''
778 ret = []
776 ret = []
779 for p in pats:
777 for p in pats:
780 kind, name = patkind(p, None)
778 kind, name = patkind(p, None)
781 if kind is None:
779 if kind is None:
782 globbed = glob.glob(name)
780 globbed = glob.glob(name)
783 if globbed:
781 if globbed:
784 ret.extend(globbed)
782 ret.extend(globbed)
785 continue
783 continue
786 # if we couldn't expand the glob, just keep it around
784 # if we couldn't expand the glob, just keep it around
787 ret.append(p)
785 ret.append(p)
788 return ret
786 return ret
789 else:
787 else:
790 from posix import *
788 from posix import *
791
789
792 def makelock(info, pathname):
790 def makelock(info, pathname):
793 try:
791 try:
794 return os.symlink(info, pathname)
792 return os.symlink(info, pathname)
795 except OSError, why:
793 except OSError, why:
796 if why.errno == errno.EEXIST:
794 if why.errno == errno.EEXIST:
797 raise
795 raise
798 except AttributeError: # no symlink in os
796 except AttributeError: # no symlink in os
799 pass
797 pass
800
798
801 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
799 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
802 os.write(ld, info)
800 os.write(ld, info)
803 os.close(ld)
801 os.close(ld)
804
802
805 def readlock(pathname):
803 def readlock(pathname):
806 try:
804 try:
807 return os.readlink(pathname)
805 return os.readlink(pathname)
808 except OSError, why:
806 except OSError, why:
809 if why.errno not in (errno.EINVAL, errno.ENOSYS):
807 if why.errno not in (errno.EINVAL, errno.ENOSYS):
810 raise
808 raise
811 except AttributeError: # no symlink in os
809 except AttributeError: # no symlink in os
812 pass
810 pass
813 return posixfile(pathname).read()
811 return posixfile(pathname).read()
814
812
815 def fstat(fp):
813 def fstat(fp):
816 '''stat file object that may not have fileno method.'''
814 '''stat file object that may not have fileno method.'''
817 try:
815 try:
818 return os.fstat(fp.fileno())
816 return os.fstat(fp.fileno())
819 except AttributeError:
817 except AttributeError:
820 return os.stat(fp.name)
818 return os.stat(fp.name)
821
819
822 # File system features
820 # File system features
823
821
824 def checkcase(path):
822 def checkcase(path):
825 """
823 """
826 Check whether the given path is on a case-sensitive filesystem
824 Check whether the given path is on a case-sensitive filesystem
827
825
828 Requires a path (like /foo/.hg) ending with a foldable final
826 Requires a path (like /foo/.hg) ending with a foldable final
829 directory component.
827 directory component.
830 """
828 """
831 s1 = os.stat(path)
829 s1 = os.stat(path)
832 d, b = os.path.split(path)
830 d, b = os.path.split(path)
833 p2 = os.path.join(d, b.upper())
831 p2 = os.path.join(d, b.upper())
834 if path == p2:
832 if path == p2:
835 p2 = os.path.join(d, b.lower())
833 p2 = os.path.join(d, b.lower())
836 try:
834 try:
837 s2 = os.stat(p2)
835 s2 = os.stat(p2)
838 if s2 == s1:
836 if s2 == s1:
839 return False
837 return False
840 return True
838 return True
841 except:
839 except:
842 return True
840 return True
843
841
844 _fspathcache = {}
842 _fspathcache = {}
845 def fspath(name, root):
843 def fspath(name, root):
846 '''Get name in the case stored in the filesystem
844 '''Get name in the case stored in the filesystem
847
845
848 The name is either relative to root, or it is an absolute path starting
846 The name is either relative to root, or it is an absolute path starting
849 with root. Note that this function is unnecessary, and should not be
847 with root. Note that this function is unnecessary, and should not be
850 called, for case-sensitive filesystems (simply because it's expensive).
848 called, for case-sensitive filesystems (simply because it's expensive).
851 '''
849 '''
852 # If name is absolute, make it relative
850 # If name is absolute, make it relative
853 if name.lower().startswith(root.lower()):
851 if name.lower().startswith(root.lower()):
854 l = len(root)
852 l = len(root)
855 if name[l] == os.sep or name[l] == os.altsep:
853 if name[l] == os.sep or name[l] == os.altsep:
856 l = l + 1
854 l = l + 1
857 name = name[l:]
855 name = name[l:]
858
856
859 if not os.path.exists(os.path.join(root, name)):
857 if not os.path.exists(os.path.join(root, name)):
860 return None
858 return None
861
859
862 seps = os.sep
860 seps = os.sep
863 if os.altsep:
861 if os.altsep:
864 seps = seps + os.altsep
862 seps = seps + os.altsep
865 # Protect backslashes. This gets silly very quickly.
863 # Protect backslashes. This gets silly very quickly.
866 seps.replace('\\','\\\\')
864 seps.replace('\\','\\\\')
867 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
865 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
868 dir = os.path.normcase(os.path.normpath(root))
866 dir = os.path.normcase(os.path.normpath(root))
869 result = []
867 result = []
870 for part, sep in pattern.findall(name):
868 for part, sep in pattern.findall(name):
871 if sep:
869 if sep:
872 result.append(sep)
870 result.append(sep)
873 continue
871 continue
874
872
875 if dir not in _fspathcache:
873 if dir not in _fspathcache:
876 _fspathcache[dir] = os.listdir(dir)
874 _fspathcache[dir] = os.listdir(dir)
877 contents = _fspathcache[dir]
875 contents = _fspathcache[dir]
878
876
879 lpart = part.lower()
877 lpart = part.lower()
880 for n in contents:
878 for n in contents:
881 if n.lower() == lpart:
879 if n.lower() == lpart:
882 result.append(n)
880 result.append(n)
883 break
881 break
884 else:
882 else:
885 # Cannot happen, as the file exists!
883 # Cannot happen, as the file exists!
886 result.append(part)
884 result.append(part)
887 dir = os.path.join(dir, lpart)
885 dir = os.path.join(dir, lpart)
888
886
889 return ''.join(result)
887 return ''.join(result)
890
888
891 def checkexec(path):
889 def checkexec(path):
892 """
890 """
893 Check whether the given path is on a filesystem with UNIX-like exec flags
891 Check whether the given path is on a filesystem with UNIX-like exec flags
894
892
895 Requires a directory (like /foo/.hg)
893 Requires a directory (like /foo/.hg)
896 """
894 """
897
895
898 # VFAT on some Linux versions can flip mode but it doesn't persist
896 # VFAT on some Linux versions can flip mode but it doesn't persist
899 # a FS remount. Frequently we can detect it if files are created
897 # a FS remount. Frequently we can detect it if files are created
900 # with exec bit on.
898 # with exec bit on.
901
899
902 try:
900 try:
903 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
901 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
904 fh, fn = tempfile.mkstemp("", "", path)
902 fh, fn = tempfile.mkstemp("", "", path)
905 try:
903 try:
906 os.close(fh)
904 os.close(fh)
907 m = os.stat(fn).st_mode & 0777
905 m = os.stat(fn).st_mode & 0777
908 new_file_has_exec = m & EXECFLAGS
906 new_file_has_exec = m & EXECFLAGS
909 os.chmod(fn, m ^ EXECFLAGS)
907 os.chmod(fn, m ^ EXECFLAGS)
910 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
908 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
911 finally:
909 finally:
912 os.unlink(fn)
910 os.unlink(fn)
913 except (IOError, OSError):
911 except (IOError, OSError):
914 # we don't care, the user probably won't be able to commit anyway
912 # we don't care, the user probably won't be able to commit anyway
915 return False
913 return False
916 return not (new_file_has_exec or exec_flags_cannot_flip)
914 return not (new_file_has_exec or exec_flags_cannot_flip)
917
915
918 def checklink(path):
916 def checklink(path):
919 """check whether the given path is on a symlink-capable filesystem"""
917 """check whether the given path is on a symlink-capable filesystem"""
920 # mktemp is not racy because symlink creation will fail if the
918 # mktemp is not racy because symlink creation will fail if the
921 # file already exists
919 # file already exists
922 name = tempfile.mktemp(dir=path)
920 name = tempfile.mktemp(dir=path)
923 try:
921 try:
924 os.symlink(".", name)
922 os.symlink(".", name)
925 os.unlink(name)
923 os.unlink(name)
926 return True
924 return True
927 except (OSError, AttributeError):
925 except (OSError, AttributeError):
928 return False
926 return False
929
927
930 def needbinarypatch():
928 def needbinarypatch():
931 """return True if patches should be applied in binary mode by default."""
929 """return True if patches should be applied in binary mode by default."""
932 return os.name == 'nt'
930 return os.name == 'nt'
933
931
934 def endswithsep(path):
932 def endswithsep(path):
935 '''Check path ends with os.sep or os.altsep.'''
933 '''Check path ends with os.sep or os.altsep.'''
936 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
934 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
937
935
938 def splitpath(path):
936 def splitpath(path):
939 '''Split path by os.sep.
937 '''Split path by os.sep.
940 Note that this function does not use os.altsep because this is
938 Note that this function does not use os.altsep because this is
941 an alternative of simple "xxx.split(os.sep)".
939 an alternative of simple "xxx.split(os.sep)".
942 It is recommended to use os.path.normpath() before using this
940 It is recommended to use os.path.normpath() before using this
943 function if need.'''
941 function if need.'''
944 return path.split(os.sep)
942 return path.split(os.sep)
945
943
946 def gui():
944 def gui():
947 '''Are we running in a GUI?'''
945 '''Are we running in a GUI?'''
948 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
946 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
949
947
950 def mktempcopy(name, emptyok=False, createmode=None):
948 def mktempcopy(name, emptyok=False, createmode=None):
951 """Create a temporary file with the same contents from name
949 """Create a temporary file with the same contents from name
952
950
953 The permission bits are copied from the original file.
951 The permission bits are copied from the original file.
954
952
955 If the temporary file is going to be truncated immediately, you
953 If the temporary file is going to be truncated immediately, you
956 can use emptyok=True as an optimization.
954 can use emptyok=True as an optimization.
957
955
958 Returns the name of the temporary file.
956 Returns the name of the temporary file.
959 """
957 """
960 d, fn = os.path.split(name)
958 d, fn = os.path.split(name)
961 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
959 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
962 os.close(fd)
960 os.close(fd)
963 # Temporary files are created with mode 0600, which is usually not
961 # Temporary files are created with mode 0600, which is usually not
964 # what we want. If the original file already exists, just copy
962 # what we want. If the original file already exists, just copy
965 # its mode. Otherwise, manually obey umask.
963 # its mode. Otherwise, manually obey umask.
966 try:
964 try:
967 st_mode = os.lstat(name).st_mode & 0777
965 st_mode = os.lstat(name).st_mode & 0777
968 except OSError, inst:
966 except OSError, inst:
969 if inst.errno != errno.ENOENT:
967 if inst.errno != errno.ENOENT:
970 raise
968 raise
971 st_mode = createmode
969 st_mode = createmode
972 if st_mode is None:
970 if st_mode is None:
973 st_mode = ~umask
971 st_mode = ~umask
974 st_mode &= 0666
972 st_mode &= 0666
975 os.chmod(temp, st_mode)
973 os.chmod(temp, st_mode)
976 if emptyok:
974 if emptyok:
977 return temp
975 return temp
978 try:
976 try:
979 try:
977 try:
980 ifp = posixfile(name, "rb")
978 ifp = posixfile(name, "rb")
981 except IOError, inst:
979 except IOError, inst:
982 if inst.errno == errno.ENOENT:
980 if inst.errno == errno.ENOENT:
983 return temp
981 return temp
984 if not getattr(inst, 'filename', None):
982 if not getattr(inst, 'filename', None):
985 inst.filename = name
983 inst.filename = name
986 raise
984 raise
987 ofp = posixfile(temp, "wb")
985 ofp = posixfile(temp, "wb")
988 for chunk in filechunkiter(ifp):
986 for chunk in filechunkiter(ifp):
989 ofp.write(chunk)
987 ofp.write(chunk)
990 ifp.close()
988 ifp.close()
991 ofp.close()
989 ofp.close()
992 except:
990 except:
993 try: os.unlink(temp)
991 try: os.unlink(temp)
994 except: pass
992 except: pass
995 raise
993 raise
996 return temp
994 return temp
997
995
998 class atomictempfile(posixfile):
996 class atomictempfile(posixfile):
999 """file-like object that atomically updates a file
997 """file-like object that atomically updates a file
1000
998
1001 All writes will be redirected to a temporary copy of the original
999 All writes will be redirected to a temporary copy of the original
1002 file. When rename is called, the copy is renamed to the original
1000 file. When rename is called, the copy is renamed to the original
1003 name, making the changes visible.
1001 name, making the changes visible.
1004 """
1002 """
1005 def __init__(self, name, mode, createmode):
1003 def __init__(self, name, mode, createmode):
1006 self.__name = name
1004 self.__name = name
1007 self.temp = mktempcopy(name, emptyok=('w' in mode),
1005 self.temp = mktempcopy(name, emptyok=('w' in mode),
1008 createmode=createmode)
1006 createmode=createmode)
1009 posixfile.__init__(self, self.temp, mode)
1007 posixfile.__init__(self, self.temp, mode)
1010
1008
1011 def rename(self):
1009 def rename(self):
1012 if not self.closed:
1010 if not self.closed:
1013 posixfile.close(self)
1011 posixfile.close(self)
1014 rename(self.temp, localpath(self.__name))
1012 rename(self.temp, localpath(self.__name))
1015
1013
1016 def __del__(self):
1014 def __del__(self):
1017 if not self.closed:
1015 if not self.closed:
1018 try:
1016 try:
1019 os.unlink(self.temp)
1017 os.unlink(self.temp)
1020 except: pass
1018 except: pass
1021 posixfile.close(self)
1019 posixfile.close(self)
1022
1020
1023 def makedirs(name, mode=None):
1021 def makedirs(name, mode=None):
1024 """recursive directory creation with parent mode inheritance"""
1022 """recursive directory creation with parent mode inheritance"""
1025 try:
1023 try:
1026 os.mkdir(name)
1024 os.mkdir(name)
1027 if mode is not None:
1025 if mode is not None:
1028 os.chmod(name, mode)
1026 os.chmod(name, mode)
1029 return
1027 return
1030 except OSError, err:
1028 except OSError, err:
1031 if err.errno == errno.EEXIST:
1029 if err.errno == errno.EEXIST:
1032 return
1030 return
1033 if err.errno != errno.ENOENT:
1031 if err.errno != errno.ENOENT:
1034 raise
1032 raise
1035 parent = os.path.abspath(os.path.dirname(name))
1033 parent = os.path.abspath(os.path.dirname(name))
1036 makedirs(parent, mode)
1034 makedirs(parent, mode)
1037 makedirs(name, mode)
1035 makedirs(name, mode)
1038
1036
1039 class opener(object):
1037 class opener(object):
1040 """Open files relative to a base directory
1038 """Open files relative to a base directory
1041
1039
1042 This class is used to hide the details of COW semantics and
1040 This class is used to hide the details of COW semantics and
1043 remote file access from higher level code.
1041 remote file access from higher level code.
1044 """
1042 """
1045 def __init__(self, base, audit=True):
1043 def __init__(self, base, audit=True):
1046 self.base = base
1044 self.base = base
1047 if audit:
1045 if audit:
1048 self.audit_path = path_auditor(base)
1046 self.audit_path = path_auditor(base)
1049 else:
1047 else:
1050 self.audit_path = always
1048 self.audit_path = always
1051 self.createmode = None
1049 self.createmode = None
1052
1050
1053 def __getattr__(self, name):
1051 def __getattr__(self, name):
1054 if name == '_can_symlink':
1052 if name == '_can_symlink':
1055 self._can_symlink = checklink(self.base)
1053 self._can_symlink = checklink(self.base)
1056 return self._can_symlink
1054 return self._can_symlink
1057 raise AttributeError(name)
1055 raise AttributeError(name)
1058
1056
1059 def _fixfilemode(self, name):
1057 def _fixfilemode(self, name):
1060 if self.createmode is None:
1058 if self.createmode is None:
1061 return
1059 return
1062 os.chmod(name, self.createmode & 0666)
1060 os.chmod(name, self.createmode & 0666)
1063
1061
1064 def __call__(self, path, mode="r", text=False, atomictemp=False):
1062 def __call__(self, path, mode="r", text=False, atomictemp=False):
1065 self.audit_path(path)
1063 self.audit_path(path)
1066 f = os.path.join(self.base, path)
1064 f = os.path.join(self.base, path)
1067
1065
1068 if not text and "b" not in mode:
1066 if not text and "b" not in mode:
1069 mode += "b" # for that other OS
1067 mode += "b" # for that other OS
1070
1068
1071 nlink = -1
1069 nlink = -1
1072 if mode not in ("r", "rb"):
1070 if mode not in ("r", "rb"):
1073 try:
1071 try:
1074 nlink = nlinks(f)
1072 nlink = nlinks(f)
1075 except OSError:
1073 except OSError:
1076 nlink = 0
1074 nlink = 0
1077 d = os.path.dirname(f)
1075 d = os.path.dirname(f)
1078 if not os.path.isdir(d):
1076 if not os.path.isdir(d):
1079 makedirs(d, self.createmode)
1077 makedirs(d, self.createmode)
1080 if atomictemp:
1078 if atomictemp:
1081 return atomictempfile(f, mode, self.createmode)
1079 return atomictempfile(f, mode, self.createmode)
1082 if nlink > 1:
1080 if nlink > 1:
1083 rename(mktempcopy(f), f)
1081 rename(mktempcopy(f), f)
1084 fp = posixfile(f, mode)
1082 fp = posixfile(f, mode)
1085 if nlink == 0:
1083 if nlink == 0:
1086 self._fixfilemode(f)
1084 self._fixfilemode(f)
1087 return fp
1085 return fp
1088
1086
1089 def symlink(self, src, dst):
1087 def symlink(self, src, dst):
1090 self.audit_path(dst)
1088 self.audit_path(dst)
1091 linkname = os.path.join(self.base, dst)
1089 linkname = os.path.join(self.base, dst)
1092 try:
1090 try:
1093 os.unlink(linkname)
1091 os.unlink(linkname)
1094 except OSError:
1092 except OSError:
1095 pass
1093 pass
1096
1094
1097 dirname = os.path.dirname(linkname)
1095 dirname = os.path.dirname(linkname)
1098 if not os.path.exists(dirname):
1096 if not os.path.exists(dirname):
1099 makedirs(dirname, self.createmode)
1097 makedirs(dirname, self.createmode)
1100
1098
1101 if self._can_symlink:
1099 if self._can_symlink:
1102 try:
1100 try:
1103 os.symlink(src, linkname)
1101 os.symlink(src, linkname)
1104 except OSError, err:
1102 except OSError, err:
1105 raise OSError(err.errno, _('could not symlink to %r: %s') %
1103 raise OSError(err.errno, _('could not symlink to %r: %s') %
1106 (src, err.strerror), linkname)
1104 (src, err.strerror), linkname)
1107 else:
1105 else:
1108 f = self(dst, "w")
1106 f = self(dst, "w")
1109 f.write(src)
1107 f.write(src)
1110 f.close()
1108 f.close()
1111 self._fixfilemode(dst)
1109 self._fixfilemode(dst)
1112
1110
1113 class chunkbuffer(object):
1111 class chunkbuffer(object):
1114 """Allow arbitrary sized chunks of data to be efficiently read from an
1112 """Allow arbitrary sized chunks of data to be efficiently read from an
1115 iterator over chunks of arbitrary size."""
1113 iterator over chunks of arbitrary size."""
1116
1114
1117 def __init__(self, in_iter):
1115 def __init__(self, in_iter):
1118 """in_iter is the iterator that's iterating over the input chunks.
1116 """in_iter is the iterator that's iterating over the input chunks.
1119 targetsize is how big a buffer to try to maintain."""
1117 targetsize is how big a buffer to try to maintain."""
1120 self.iter = iter(in_iter)
1118 self.iter = iter(in_iter)
1121 self.buf = ''
1119 self.buf = ''
1122 self.targetsize = 2**16
1120 self.targetsize = 2**16
1123
1121
1124 def read(self, l):
1122 def read(self, l):
1125 """Read L bytes of data from the iterator of chunks of data.
1123 """Read L bytes of data from the iterator of chunks of data.
1126 Returns less than L bytes if the iterator runs dry."""
1124 Returns less than L bytes if the iterator runs dry."""
1127 if l > len(self.buf) and self.iter:
1125 if l > len(self.buf) and self.iter:
1128 # Clamp to a multiple of self.targetsize
1126 # Clamp to a multiple of self.targetsize
1129 targetsize = max(l, self.targetsize)
1127 targetsize = max(l, self.targetsize)
1130 collector = cStringIO.StringIO()
1128 collector = cStringIO.StringIO()
1131 collector.write(self.buf)
1129 collector.write(self.buf)
1132 collected = len(self.buf)
1130 collected = len(self.buf)
1133 for chunk in self.iter:
1131 for chunk in self.iter:
1134 collector.write(chunk)
1132 collector.write(chunk)
1135 collected += len(chunk)
1133 collected += len(chunk)
1136 if collected >= targetsize:
1134 if collected >= targetsize:
1137 break
1135 break
1138 if collected < targetsize:
1136 if collected < targetsize:
1139 self.iter = False
1137 self.iter = False
1140 self.buf = collector.getvalue()
1138 self.buf = collector.getvalue()
1141 if len(self.buf) == l:
1139 if len(self.buf) == l:
1142 s, self.buf = str(self.buf), ''
1140 s, self.buf = str(self.buf), ''
1143 else:
1141 else:
1144 s, self.buf = self.buf[:l], buffer(self.buf, l)
1142 s, self.buf = self.buf[:l], buffer(self.buf, l)
1145 return s
1143 return s
1146
1144
1147 def filechunkiter(f, size=65536, limit=None):
1145 def filechunkiter(f, size=65536, limit=None):
1148 """Create a generator that produces the data in the file size
1146 """Create a generator that produces the data in the file size
1149 (default 65536) bytes at a time, up to optional limit (default is
1147 (default 65536) bytes at a time, up to optional limit (default is
1150 to read all data). Chunks may be less than size bytes if the
1148 to read all data). Chunks may be less than size bytes if the
1151 chunk is the last chunk in the file, or the file is a socket or
1149 chunk is the last chunk in the file, or the file is a socket or
1152 some other type of file that sometimes reads less data than is
1150 some other type of file that sometimes reads less data than is
1153 requested."""
1151 requested."""
1154 assert size >= 0
1152 assert size >= 0
1155 assert limit is None or limit >= 0
1153 assert limit is None or limit >= 0
1156 while True:
1154 while True:
1157 if limit is None: nbytes = size
1155 if limit is None: nbytes = size
1158 else: nbytes = min(limit, size)
1156 else: nbytes = min(limit, size)
1159 s = nbytes and f.read(nbytes)
1157 s = nbytes and f.read(nbytes)
1160 if not s: break
1158 if not s: break
1161 if limit: limit -= len(s)
1159 if limit: limit -= len(s)
1162 yield s
1160 yield s
1163
1161
1164 def makedate():
1162 def makedate():
1165 lt = time.localtime()
1163 lt = time.localtime()
1166 if lt[8] == 1 and time.daylight:
1164 if lt[8] == 1 and time.daylight:
1167 tz = time.altzone
1165 tz = time.altzone
1168 else:
1166 else:
1169 tz = time.timezone
1167 tz = time.timezone
1170 return time.mktime(lt), tz
1168 return time.mktime(lt), tz
1171
1169
1172 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1170 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1173 """represent a (unixtime, offset) tuple as a localized time.
1171 """represent a (unixtime, offset) tuple as a localized time.
1174 unixtime is seconds since the epoch, and offset is the time zone's
1172 unixtime is seconds since the epoch, and offset is the time zone's
1175 number of seconds away from UTC. if timezone is false, do not
1173 number of seconds away from UTC. if timezone is false, do not
1176 append time zone to string."""
1174 append time zone to string."""
1177 t, tz = date or makedate()
1175 t, tz = date or makedate()
1178 if "%1" in format or "%2" in format:
1176 if "%1" in format or "%2" in format:
1179 sign = (tz > 0) and "-" or "+"
1177 sign = (tz > 0) and "-" or "+"
1180 minutes = abs(tz) / 60
1178 minutes = abs(tz) / 60
1181 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1179 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1182 format = format.replace("%2", "%02d" % (minutes % 60))
1180 format = format.replace("%2", "%02d" % (minutes % 60))
1183 s = time.strftime(format, time.gmtime(float(t) - tz))
1181 s = time.strftime(format, time.gmtime(float(t) - tz))
1184 return s
1182 return s
1185
1183
1186 def shortdate(date=None):
1184 def shortdate(date=None):
1187 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1185 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1188 return datestr(date, format='%Y-%m-%d')
1186 return datestr(date, format='%Y-%m-%d')
1189
1187
1190 def strdate(string, format, defaults=[]):
1188 def strdate(string, format, defaults=[]):
1191 """parse a localized time string and return a (unixtime, offset) tuple.
1189 """parse a localized time string and return a (unixtime, offset) tuple.
1192 if the string cannot be parsed, ValueError is raised."""
1190 if the string cannot be parsed, ValueError is raised."""
1193 def timezone(string):
1191 def timezone(string):
1194 tz = string.split()[-1]
1192 tz = string.split()[-1]
1195 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1193 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1196 sign = (tz[0] == "+") and 1 or -1
1194 sign = (tz[0] == "+") and 1 or -1
1197 hours = int(tz[1:3])
1195 hours = int(tz[1:3])
1198 minutes = int(tz[3:5])
1196 minutes = int(tz[3:5])
1199 return -sign * (hours * 60 + minutes) * 60
1197 return -sign * (hours * 60 + minutes) * 60
1200 if tz == "GMT" or tz == "UTC":
1198 if tz == "GMT" or tz == "UTC":
1201 return 0
1199 return 0
1202 return None
1200 return None
1203
1201
1204 # NOTE: unixtime = localunixtime + offset
1202 # NOTE: unixtime = localunixtime + offset
1205 offset, date = timezone(string), string
1203 offset, date = timezone(string), string
1206 if offset != None:
1204 if offset != None:
1207 date = " ".join(string.split()[:-1])
1205 date = " ".join(string.split()[:-1])
1208
1206
1209 # add missing elements from defaults
1207 # add missing elements from defaults
1210 for part in defaults:
1208 for part in defaults:
1211 found = [True for p in part if ("%"+p) in format]
1209 found = [True for p in part if ("%"+p) in format]
1212 if not found:
1210 if not found:
1213 date += "@" + defaults[part]
1211 date += "@" + defaults[part]
1214 format += "@%" + part[0]
1212 format += "@%" + part[0]
1215
1213
1216 timetuple = time.strptime(date, format)
1214 timetuple = time.strptime(date, format)
1217 localunixtime = int(calendar.timegm(timetuple))
1215 localunixtime = int(calendar.timegm(timetuple))
1218 if offset is None:
1216 if offset is None:
1219 # local timezone
1217 # local timezone
1220 unixtime = int(time.mktime(timetuple))
1218 unixtime = int(time.mktime(timetuple))
1221 offset = unixtime - localunixtime
1219 offset = unixtime - localunixtime
1222 else:
1220 else:
1223 unixtime = localunixtime + offset
1221 unixtime = localunixtime + offset
1224 return unixtime, offset
1222 return unixtime, offset
1225
1223
1226 def parsedate(date, formats=None, defaults=None):
1224 def parsedate(date, formats=None, defaults=None):
1227 """parse a localized date/time string and return a (unixtime, offset) tuple.
1225 """parse a localized date/time string and return a (unixtime, offset) tuple.
1228
1226
1229 The date may be a "unixtime offset" string or in one of the specified
1227 The date may be a "unixtime offset" string or in one of the specified
1230 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1228 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1231 """
1229 """
1232 if not date:
1230 if not date:
1233 return 0, 0
1231 return 0, 0
1234 if isinstance(date, tuple) and len(date) == 2:
1232 if isinstance(date, tuple) and len(date) == 2:
1235 return date
1233 return date
1236 if not formats:
1234 if not formats:
1237 formats = defaultdateformats
1235 formats = defaultdateformats
1238 date = date.strip()
1236 date = date.strip()
1239 try:
1237 try:
1240 when, offset = map(int, date.split(' '))
1238 when, offset = map(int, date.split(' '))
1241 except ValueError:
1239 except ValueError:
1242 # fill out defaults
1240 # fill out defaults
1243 if not defaults:
1241 if not defaults:
1244 defaults = {}
1242 defaults = {}
1245 now = makedate()
1243 now = makedate()
1246 for part in "d mb yY HI M S".split():
1244 for part in "d mb yY HI M S".split():
1247 if part not in defaults:
1245 if part not in defaults:
1248 if part[0] in "HMS":
1246 if part[0] in "HMS":
1249 defaults[part] = "00"
1247 defaults[part] = "00"
1250 else:
1248 else:
1251 defaults[part] = datestr(now, "%" + part[0])
1249 defaults[part] = datestr(now, "%" + part[0])
1252
1250
1253 for format in formats:
1251 for format in formats:
1254 try:
1252 try:
1255 when, offset = strdate(date, format, defaults)
1253 when, offset = strdate(date, format, defaults)
1256 except (ValueError, OverflowError):
1254 except (ValueError, OverflowError):
1257 pass
1255 pass
1258 else:
1256 else:
1259 break
1257 break
1260 else:
1258 else:
1261 raise Abort(_('invalid date: %r ') % date)
1259 raise Abort(_('invalid date: %r ') % date)
1262 # validate explicit (probably user-specified) date and
1260 # validate explicit (probably user-specified) date and
1263 # time zone offset. values must fit in signed 32 bits for
1261 # time zone offset. values must fit in signed 32 bits for
1264 # current 32-bit linux runtimes. timezones go from UTC-12
1262 # current 32-bit linux runtimes. timezones go from UTC-12
1265 # to UTC+14
1263 # to UTC+14
1266 if abs(when) > 0x7fffffff:
1264 if abs(when) > 0x7fffffff:
1267 raise Abort(_('date exceeds 32 bits: %d') % when)
1265 raise Abort(_('date exceeds 32 bits: %d') % when)
1268 if offset < -50400 or offset > 43200:
1266 if offset < -50400 or offset > 43200:
1269 raise Abort(_('impossible time zone offset: %d') % offset)
1267 raise Abort(_('impossible time zone offset: %d') % offset)
1270 return when, offset
1268 return when, offset
1271
1269
1272 def matchdate(date):
1270 def matchdate(date):
1273 """Return a function that matches a given date match specifier
1271 """Return a function that matches a given date match specifier
1274
1272
1275 Formats include:
1273 Formats include:
1276
1274
1277 '{date}' match a given date to the accuracy provided
1275 '{date}' match a given date to the accuracy provided
1278
1276
1279 '<{date}' on or before a given date
1277 '<{date}' on or before a given date
1280
1278
1281 '>{date}' on or after a given date
1279 '>{date}' on or after a given date
1282
1280
1283 """
1281 """
1284
1282
1285 def lower(date):
1283 def lower(date):
1286 d = dict(mb="1", d="1")
1284 d = dict(mb="1", d="1")
1287 return parsedate(date, extendeddateformats, d)[0]
1285 return parsedate(date, extendeddateformats, d)[0]
1288
1286
1289 def upper(date):
1287 def upper(date):
1290 d = dict(mb="12", HI="23", M="59", S="59")
1288 d = dict(mb="12", HI="23", M="59", S="59")
1291 for days in "31 30 29".split():
1289 for days in "31 30 29".split():
1292 try:
1290 try:
1293 d["d"] = days
1291 d["d"] = days
1294 return parsedate(date, extendeddateformats, d)[0]
1292 return parsedate(date, extendeddateformats, d)[0]
1295 except:
1293 except:
1296 pass
1294 pass
1297 d["d"] = "28"
1295 d["d"] = "28"
1298 return parsedate(date, extendeddateformats, d)[0]
1296 return parsedate(date, extendeddateformats, d)[0]
1299
1297
1300 date = date.strip()
1298 date = date.strip()
1301 if date[0] == "<":
1299 if date[0] == "<":
1302 when = upper(date[1:])
1300 when = upper(date[1:])
1303 return lambda x: x <= when
1301 return lambda x: x <= when
1304 elif date[0] == ">":
1302 elif date[0] == ">":
1305 when = lower(date[1:])
1303 when = lower(date[1:])
1306 return lambda x: x >= when
1304 return lambda x: x >= when
1307 elif date[0] == "-":
1305 elif date[0] == "-":
1308 try:
1306 try:
1309 days = int(date[1:])
1307 days = int(date[1:])
1310 except ValueError:
1308 except ValueError:
1311 raise Abort(_("invalid day spec: %s") % date[1:])
1309 raise Abort(_("invalid day spec: %s") % date[1:])
1312 when = makedate()[0] - days * 3600 * 24
1310 when = makedate()[0] - days * 3600 * 24
1313 return lambda x: x >= when
1311 return lambda x: x >= when
1314 elif " to " in date:
1312 elif " to " in date:
1315 a, b = date.split(" to ")
1313 a, b = date.split(" to ")
1316 start, stop = lower(a), upper(b)
1314 start, stop = lower(a), upper(b)
1317 return lambda x: x >= start and x <= stop
1315 return lambda x: x >= start and x <= stop
1318 else:
1316 else:
1319 start, stop = lower(date), upper(date)
1317 start, stop = lower(date), upper(date)
1320 return lambda x: x >= start and x <= stop
1318 return lambda x: x >= start and x <= stop
1321
1319
1322 def shortuser(user):
1320 def shortuser(user):
1323 """Return a short representation of a user name or email address."""
1321 """Return a short representation of a user name or email address."""
1324 f = user.find('@')
1322 f = user.find('@')
1325 if f >= 0:
1323 if f >= 0:
1326 user = user[:f]
1324 user = user[:f]
1327 f = user.find('<')
1325 f = user.find('<')
1328 if f >= 0:
1326 if f >= 0:
1329 user = user[f+1:]
1327 user = user[f+1:]
1330 f = user.find(' ')
1328 f = user.find(' ')
1331 if f >= 0:
1329 if f >= 0:
1332 user = user[:f]
1330 user = user[:f]
1333 f = user.find('.')
1331 f = user.find('.')
1334 if f >= 0:
1332 if f >= 0:
1335 user = user[:f]
1333 user = user[:f]
1336 return user
1334 return user
1337
1335
1338 def email(author):
1336 def email(author):
1339 '''get email of author.'''
1337 '''get email of author.'''
1340 r = author.find('>')
1338 r = author.find('>')
1341 if r == -1: r = None
1339 if r == -1: r = None
1342 return author[author.find('<')+1:r]
1340 return author[author.find('<')+1:r]
1343
1341
1344 def ellipsis(text, maxlength=400):
1342 def ellipsis(text, maxlength=400):
1345 """Trim string to at most maxlength (default: 400) characters."""
1343 """Trim string to at most maxlength (default: 400) characters."""
1346 if len(text) <= maxlength:
1344 if len(text) <= maxlength:
1347 return text
1345 return text
1348 else:
1346 else:
1349 return "%s..." % (text[:maxlength-3])
1347 return "%s..." % (text[:maxlength-3])
1350
1348
1351 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1352 '''yield every hg repository under path, recursively.'''
1350 '''yield every hg repository under path, recursively.'''
1353 def errhandler(err):
1351 def errhandler(err):
1354 if err.filename == path:
1352 if err.filename == path:
1355 raise err
1353 raise err
1356 if followsym and hasattr(os.path, 'samestat'):
1354 if followsym and hasattr(os.path, 'samestat'):
1357 def _add_dir_if_not_there(dirlst, dirname):
1355 def _add_dir_if_not_there(dirlst, dirname):
1358 match = False
1356 match = False
1359 samestat = os.path.samestat
1357 samestat = os.path.samestat
1360 dirstat = os.stat(dirname)
1358 dirstat = os.stat(dirname)
1361 for lstdirstat in dirlst:
1359 for lstdirstat in dirlst:
1362 if samestat(dirstat, lstdirstat):
1360 if samestat(dirstat, lstdirstat):
1363 match = True
1361 match = True
1364 break
1362 break
1365 if not match:
1363 if not match:
1366 dirlst.append(dirstat)
1364 dirlst.append(dirstat)
1367 return not match
1365 return not match
1368 else:
1366 else:
1369 followsym = False
1367 followsym = False
1370
1368
1371 if (seen_dirs is None) and followsym:
1369 if (seen_dirs is None) and followsym:
1372 seen_dirs = []
1370 seen_dirs = []
1373 _add_dir_if_not_there(seen_dirs, path)
1371 _add_dir_if_not_there(seen_dirs, path)
1374 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1372 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1375 if '.hg' in dirs:
1373 if '.hg' in dirs:
1376 yield root # found a repository
1374 yield root # found a repository
1377 qroot = os.path.join(root, '.hg', 'patches')
1375 qroot = os.path.join(root, '.hg', 'patches')
1378 if os.path.isdir(os.path.join(qroot, '.hg')):
1376 if os.path.isdir(os.path.join(qroot, '.hg')):
1379 yield qroot # we have a patch queue repo here
1377 yield qroot # we have a patch queue repo here
1380 if recurse:
1378 if recurse:
1381 # avoid recursing inside the .hg directory
1379 # avoid recursing inside the .hg directory
1382 dirs.remove('.hg')
1380 dirs.remove('.hg')
1383 else:
1381 else:
1384 dirs[:] = [] # don't descend further
1382 dirs[:] = [] # don't descend further
1385 elif followsym:
1383 elif followsym:
1386 newdirs = []
1384 newdirs = []
1387 for d in dirs:
1385 for d in dirs:
1388 fname = os.path.join(root, d)
1386 fname = os.path.join(root, d)
1389 if _add_dir_if_not_there(seen_dirs, fname):
1387 if _add_dir_if_not_there(seen_dirs, fname):
1390 if os.path.islink(fname):
1388 if os.path.islink(fname):
1391 for hgname in walkrepos(fname, True, seen_dirs):
1389 for hgname in walkrepos(fname, True, seen_dirs):
1392 yield hgname
1390 yield hgname
1393 else:
1391 else:
1394 newdirs.append(d)
1392 newdirs.append(d)
1395 dirs[:] = newdirs
1393 dirs[:] = newdirs
1396
1394
1397 _rcpath = None
1395 _rcpath = None
1398
1396
1399 def os_rcpath():
1397 def os_rcpath():
1400 '''return default os-specific hgrc search path'''
1398 '''return default os-specific hgrc search path'''
1401 path = system_rcpath()
1399 path = system_rcpath()
1402 path.extend(user_rcpath())
1400 path.extend(user_rcpath())
1403 path = [os.path.normpath(f) for f in path]
1401 path = [os.path.normpath(f) for f in path]
1404 return path
1402 return path
1405
1403
1406 def rcpath():
1404 def rcpath():
1407 '''return hgrc search path. if env var HGRCPATH is set, use it.
1405 '''return hgrc search path. if env var HGRCPATH is set, use it.
1408 for each item in path, if directory, use files ending in .rc,
1406 for each item in path, if directory, use files ending in .rc,
1409 else use item.
1407 else use item.
1410 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1408 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1411 if no HGRCPATH, use default os-specific path.'''
1409 if no HGRCPATH, use default os-specific path.'''
1412 global _rcpath
1410 global _rcpath
1413 if _rcpath is None:
1411 if _rcpath is None:
1414 if 'HGRCPATH' in os.environ:
1412 if 'HGRCPATH' in os.environ:
1415 _rcpath = []
1413 _rcpath = []
1416 for p in os.environ['HGRCPATH'].split(os.pathsep):
1414 for p in os.environ['HGRCPATH'].split(os.pathsep):
1417 if not p: continue
1415 if not p: continue
1418 if os.path.isdir(p):
1416 if os.path.isdir(p):
1419 for f, kind in osutil.listdir(p):
1417 for f, kind in osutil.listdir(p):
1420 if f.endswith('.rc'):
1418 if f.endswith('.rc'):
1421 _rcpath.append(os.path.join(p, f))
1419 _rcpath.append(os.path.join(p, f))
1422 else:
1420 else:
1423 _rcpath.append(p)
1421 _rcpath.append(p)
1424 else:
1422 else:
1425 _rcpath = os_rcpath()
1423 _rcpath = os_rcpath()
1426 return _rcpath
1424 return _rcpath
1427
1425
1428 def bytecount(nbytes):
1426 def bytecount(nbytes):
1429 '''return byte count formatted as readable string, with units'''
1427 '''return byte count formatted as readable string, with units'''
1430
1428
1431 units = (
1429 units = (
1432 (100, 1<<30, _('%.0f GB')),
1430 (100, 1<<30, _('%.0f GB')),
1433 (10, 1<<30, _('%.1f GB')),
1431 (10, 1<<30, _('%.1f GB')),
1434 (1, 1<<30, _('%.2f GB')),
1432 (1, 1<<30, _('%.2f GB')),
1435 (100, 1<<20, _('%.0f MB')),
1433 (100, 1<<20, _('%.0f MB')),
1436 (10, 1<<20, _('%.1f MB')),
1434 (10, 1<<20, _('%.1f MB')),
1437 (1, 1<<20, _('%.2f MB')),
1435 (1, 1<<20, _('%.2f MB')),
1438 (100, 1<<10, _('%.0f KB')),
1436 (100, 1<<10, _('%.0f KB')),
1439 (10, 1<<10, _('%.1f KB')),
1437 (10, 1<<10, _('%.1f KB')),
1440 (1, 1<<10, _('%.2f KB')),
1438 (1, 1<<10, _('%.2f KB')),
1441 (1, 1, _('%.0f bytes')),
1439 (1, 1, _('%.0f bytes')),
1442 )
1440 )
1443
1441
1444 for multiplier, divisor, format in units:
1442 for multiplier, divisor, format in units:
1445 if nbytes >= divisor * multiplier:
1443 if nbytes >= divisor * multiplier:
1446 return format % (nbytes / float(divisor))
1444 return format % (nbytes / float(divisor))
1447 return units[-1][2] % nbytes
1445 return units[-1][2] % nbytes
1448
1446
1449 def drop_scheme(scheme, path):
1447 def drop_scheme(scheme, path):
1450 sc = scheme + ':'
1448 sc = scheme + ':'
1451 if path.startswith(sc):
1449 if path.startswith(sc):
1452 path = path[len(sc):]
1450 path = path[len(sc):]
1453 if path.startswith('//'):
1451 if path.startswith('//'):
1454 path = path[2:]
1452 path = path[2:]
1455 return path
1453 return path
1456
1454
1457 def uirepr(s):
1455 def uirepr(s):
1458 # Avoid double backslash in Windows path repr()
1456 # Avoid double backslash in Windows path repr()
1459 return repr(s).replace('\\\\', '\\')
1457 return repr(s).replace('\\\\', '\\')
1460
1458
1461 def termwidth():
1459 def termwidth():
1462 if 'COLUMNS' in os.environ:
1460 if 'COLUMNS' in os.environ:
1463 try:
1461 try:
1464 return int(os.environ['COLUMNS'])
1462 return int(os.environ['COLUMNS'])
1465 except ValueError:
1463 except ValueError:
1466 pass
1464 pass
1467 try:
1465 try:
1468 import termios, array, fcntl
1466 import termios, array, fcntl
1469 for dev in (sys.stdout, sys.stdin):
1467 for dev in (sys.stdout, sys.stdin):
1470 try:
1468 try:
1471 fd = dev.fileno()
1469 fd = dev.fileno()
1472 if not os.isatty(fd):
1470 if not os.isatty(fd):
1473 continue
1471 continue
1474 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1472 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1475 return array.array('h', arri)[1]
1473 return array.array('h', arri)[1]
1476 except ValueError:
1474 except ValueError:
1477 pass
1475 pass
1478 except ImportError:
1476 except ImportError:
1479 pass
1477 pass
1480 return 80
1478 return 80
1481
1479
1482 def iterlines(iterator):
1480 def iterlines(iterator):
1483 for chunk in iterator:
1481 for chunk in iterator:
1484 for line in chunk.splitlines():
1482 for line in chunk.splitlines():
1485 yield line
1483 yield line
General Comments 0
You need to be logged in to leave comments. Login now