##// END OF EJS Templates
util: remove warnings when importing md5 and sha
Sune Foldager -
r8295:1ea7e7d9 default
parent child Browse files
Show More
@@ -1,1483 +1,1481
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
18 import os, stat, threading, time, calendar, glob, osutil, random
18 import os, stat, threading, time, calendar, glob, osutil, random
19 import imp
19 import imp
20
20
21 # Python compatibility
21 # Python compatibility
22
22
23 def md5(s):
23 def md5(s):
24 try:
24 try:
25 import hashlib
25 import hashlib
26 _md5 = hashlib.md5
26 _md5 = hashlib.md5
27 except ImportError:
27 except ImportError:
28 import md5
28 from md5 import md5 as _md5
29 _md5 = md5.md5
30 global md5
29 global md5
31 md5 = _md5
30 md5 = _md5
32 return _md5(s)
31 return _md5(s)
33
32
34 def sha1(s):
33 def sha1(s):
35 try:
34 try:
36 import hashlib
35 import hashlib
37 _sha1 = hashlib.sha1
36 _sha1 = hashlib.sha1
38 except ImportError:
37 except ImportError:
39 import sha
38 from sha import sha as _sha1
40 _sha1 = sha.sha
41 global sha1
39 global sha1
42 sha1 = _sha1
40 sha1 = _sha1
43 return _sha1(s)
41 return _sha1(s)
44
42
45 import subprocess
43 import subprocess
46 closefds = os.name == 'posix'
44 closefds = os.name == 'posix'
47 def popen2(cmd, mode='t', bufsize=-1):
45 def popen2(cmd, mode='t', bufsize=-1):
48 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
46 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
49 close_fds=closefds,
47 close_fds=closefds,
50 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
51 return p.stdin, p.stdout
49 return p.stdin, p.stdout
52 def popen3(cmd, mode='t', bufsize=-1):
50 def popen3(cmd, mode='t', bufsize=-1):
53 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
54 close_fds=closefds,
52 close_fds=closefds,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
56 stderr=subprocess.PIPE)
54 stderr=subprocess.PIPE)
57 return p.stdin, p.stdout, p.stderr
55 return p.stdin, p.stdout, p.stderr
58 def Popen3(cmd, capturestderr=False, bufsize=-1):
56 def Popen3(cmd, capturestderr=False, bufsize=-1):
59 stderr = capturestderr and subprocess.PIPE or None
57 stderr = capturestderr and subprocess.PIPE or None
60 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
61 close_fds=closefds,
59 close_fds=closefds,
62 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
60 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
63 stderr=stderr)
61 stderr=stderr)
64 p.fromchild = p.stdout
62 p.fromchild = p.stdout
65 p.tochild = p.stdin
63 p.tochild = p.stdin
66 p.childerr = p.stderr
64 p.childerr = p.stderr
67 return p
65 return p
68
66
69 def version():
67 def version():
70 """Return version information if available."""
68 """Return version information if available."""
71 try:
69 try:
72 import __version__
70 import __version__
73 return __version__.version
71 return __version__.version
74 except ImportError:
72 except ImportError:
75 return 'unknown'
73 return 'unknown'
76
74
77 # used by parsedate
75 # used by parsedate
78 defaultdateformats = (
76 defaultdateformats = (
79 '%Y-%m-%d %H:%M:%S',
77 '%Y-%m-%d %H:%M:%S',
80 '%Y-%m-%d %I:%M:%S%p',
78 '%Y-%m-%d %I:%M:%S%p',
81 '%Y-%m-%d %H:%M',
79 '%Y-%m-%d %H:%M',
82 '%Y-%m-%d %I:%M%p',
80 '%Y-%m-%d %I:%M%p',
83 '%Y-%m-%d',
81 '%Y-%m-%d',
84 '%m-%d',
82 '%m-%d',
85 '%m/%d',
83 '%m/%d',
86 '%m/%d/%y',
84 '%m/%d/%y',
87 '%m/%d/%Y',
85 '%m/%d/%Y',
88 '%a %b %d %H:%M:%S %Y',
86 '%a %b %d %H:%M:%S %Y',
89 '%a %b %d %I:%M:%S%p %Y',
87 '%a %b %d %I:%M:%S%p %Y',
90 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
88 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
91 '%b %d %H:%M:%S %Y',
89 '%b %d %H:%M:%S %Y',
92 '%b %d %I:%M:%S%p %Y',
90 '%b %d %I:%M:%S%p %Y',
93 '%b %d %H:%M:%S',
91 '%b %d %H:%M:%S',
94 '%b %d %I:%M:%S%p',
92 '%b %d %I:%M:%S%p',
95 '%b %d %H:%M',
93 '%b %d %H:%M',
96 '%b %d %I:%M%p',
94 '%b %d %I:%M%p',
97 '%b %d %Y',
95 '%b %d %Y',
98 '%b %d',
96 '%b %d',
99 '%H:%M:%S',
97 '%H:%M:%S',
100 '%I:%M:%SP',
98 '%I:%M:%SP',
101 '%H:%M',
99 '%H:%M',
102 '%I:%M%p',
100 '%I:%M%p',
103 )
101 )
104
102
105 extendeddateformats = defaultdateformats + (
103 extendeddateformats = defaultdateformats + (
106 "%Y",
104 "%Y",
107 "%Y-%m",
105 "%Y-%m",
108 "%b",
106 "%b",
109 "%b %Y",
107 "%b %Y",
110 )
108 )
111
109
112 def cachefunc(func):
110 def cachefunc(func):
113 '''cache the result of function calls'''
111 '''cache the result of function calls'''
114 # XXX doesn't handle keywords args
112 # XXX doesn't handle keywords args
115 cache = {}
113 cache = {}
116 if func.func_code.co_argcount == 1:
114 if func.func_code.co_argcount == 1:
117 # we gain a small amount of time because
115 # we gain a small amount of time because
118 # we don't need to pack/unpack the list
116 # we don't need to pack/unpack the list
119 def f(arg):
117 def f(arg):
120 if arg not in cache:
118 if arg not in cache:
121 cache[arg] = func(arg)
119 cache[arg] = func(arg)
122 return cache[arg]
120 return cache[arg]
123 else:
121 else:
124 def f(*args):
122 def f(*args):
125 if args not in cache:
123 if args not in cache:
126 cache[args] = func(*args)
124 cache[args] = func(*args)
127 return cache[args]
125 return cache[args]
128
126
129 return f
127 return f
130
128
131 class propertycache(object):
129 class propertycache(object):
132 def __init__(self, func):
130 def __init__(self, func):
133 self.func = func
131 self.func = func
134 self.name = func.__name__
132 self.name = func.__name__
135 def __get__(self, obj, type=None):
133 def __get__(self, obj, type=None):
136 result = self.func(obj)
134 result = self.func(obj)
137 setattr(obj, self.name, result)
135 setattr(obj, self.name, result)
138 return result
136 return result
139
137
140 def pipefilter(s, cmd):
138 def pipefilter(s, cmd):
141 '''filter string S through command CMD, returning its output'''
139 '''filter string S through command CMD, returning its output'''
142 (pin, pout) = popen2(cmd, 'b')
140 (pin, pout) = popen2(cmd, 'b')
143 def writer():
141 def writer():
144 try:
142 try:
145 pin.write(s)
143 pin.write(s)
146 pin.close()
144 pin.close()
147 except IOError, inst:
145 except IOError, inst:
148 if inst.errno != errno.EPIPE:
146 if inst.errno != errno.EPIPE:
149 raise
147 raise
150
148
151 # we should use select instead on UNIX, but this will work on most
149 # we should use select instead on UNIX, but this will work on most
152 # systems, including Windows
150 # systems, including Windows
153 w = threading.Thread(target=writer)
151 w = threading.Thread(target=writer)
154 w.start()
152 w.start()
155 f = pout.read()
153 f = pout.read()
156 pout.close()
154 pout.close()
157 w.join()
155 w.join()
158 return f
156 return f
159
157
160 def tempfilter(s, cmd):
158 def tempfilter(s, cmd):
161 '''filter string S through a pair of temporary files with CMD.
159 '''filter string S through a pair of temporary files with CMD.
162 CMD is used as a template to create the real command to be run,
160 CMD is used as a template to create the real command to be run,
163 with the strings INFILE and OUTFILE replaced by the real names of
161 with the strings INFILE and OUTFILE replaced by the real names of
164 the temporary files generated.'''
162 the temporary files generated.'''
165 inname, outname = None, None
163 inname, outname = None, None
166 try:
164 try:
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
165 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 fp = os.fdopen(infd, 'wb')
166 fp = os.fdopen(infd, 'wb')
169 fp.write(s)
167 fp.write(s)
170 fp.close()
168 fp.close()
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
169 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 os.close(outfd)
170 os.close(outfd)
173 cmd = cmd.replace('INFILE', inname)
171 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('OUTFILE', outname)
172 cmd = cmd.replace('OUTFILE', outname)
175 code = os.system(cmd)
173 code = os.system(cmd)
176 if sys.platform == 'OpenVMS' and code & 1:
174 if sys.platform == 'OpenVMS' and code & 1:
177 code = 0
175 code = 0
178 if code: raise Abort(_("command '%s' failed: %s") %
176 if code: raise Abort(_("command '%s' failed: %s") %
179 (cmd, explain_exit(code)))
177 (cmd, explain_exit(code)))
180 return open(outname, 'rb').read()
178 return open(outname, 'rb').read()
181 finally:
179 finally:
182 try:
180 try:
183 if inname: os.unlink(inname)
181 if inname: os.unlink(inname)
184 except: pass
182 except: pass
185 try:
183 try:
186 if outname: os.unlink(outname)
184 if outname: os.unlink(outname)
187 except: pass
185 except: pass
188
186
189 filtertable = {
187 filtertable = {
190 'tempfile:': tempfilter,
188 'tempfile:': tempfilter,
191 'pipe:': pipefilter,
189 'pipe:': pipefilter,
192 }
190 }
193
191
194 def filter(s, cmd):
192 def filter(s, cmd):
195 "filter a string through a command that transforms its input to its output"
193 "filter a string through a command that transforms its input to its output"
196 for name, fn in filtertable.iteritems():
194 for name, fn in filtertable.iteritems():
197 if cmd.startswith(name):
195 if cmd.startswith(name):
198 return fn(s, cmd[len(name):].lstrip())
196 return fn(s, cmd[len(name):].lstrip())
199 return pipefilter(s, cmd)
197 return pipefilter(s, cmd)
200
198
201 def binary(s):
199 def binary(s):
202 """return true if a string is binary data"""
200 """return true if a string is binary data"""
203 return bool(s and '\0' in s)
201 return bool(s and '\0' in s)
204
202
205 def increasingchunks(source, min=1024, max=65536):
203 def increasingchunks(source, min=1024, max=65536):
206 '''return no less than min bytes per chunk while data remains,
204 '''return no less than min bytes per chunk while data remains,
207 doubling min after each chunk until it reaches max'''
205 doubling min after each chunk until it reaches max'''
208 def log2(x):
206 def log2(x):
209 if not x:
207 if not x:
210 return 0
208 return 0
211 i = 0
209 i = 0
212 while x:
210 while x:
213 x >>= 1
211 x >>= 1
214 i += 1
212 i += 1
215 return i - 1
213 return i - 1
216
214
217 buf = []
215 buf = []
218 blen = 0
216 blen = 0
219 for chunk in source:
217 for chunk in source:
220 buf.append(chunk)
218 buf.append(chunk)
221 blen += len(chunk)
219 blen += len(chunk)
222 if blen >= min:
220 if blen >= min:
223 if min < max:
221 if min < max:
224 min = min << 1
222 min = min << 1
225 nmin = 1 << log2(blen)
223 nmin = 1 << log2(blen)
226 if nmin > min:
224 if nmin > min:
227 min = nmin
225 min = nmin
228 if min > max:
226 if min > max:
229 min = max
227 min = max
230 yield ''.join(buf)
228 yield ''.join(buf)
231 blen = 0
229 blen = 0
232 buf = []
230 buf = []
233 if buf:
231 if buf:
234 yield ''.join(buf)
232 yield ''.join(buf)
235
233
236 Abort = error.Abort
234 Abort = error.Abort
237
235
238 def always(fn): return True
236 def always(fn): return True
239 def never(fn): return False
237 def never(fn): return False
240
238
241 def patkind(name, default):
239 def patkind(name, default):
242 """Split a string into an optional pattern kind prefix and the
240 """Split a string into an optional pattern kind prefix and the
243 actual pattern."""
241 actual pattern."""
244 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
242 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
245 if name.startswith(prefix + ':'): return name.split(':', 1)
243 if name.startswith(prefix + ':'): return name.split(':', 1)
246 return default, name
244 return default, name
247
245
248 def globre(pat, head='^', tail='$'):
246 def globre(pat, head='^', tail='$'):
249 "convert a glob pattern into a regexp"
247 "convert a glob pattern into a regexp"
250 i, n = 0, len(pat)
248 i, n = 0, len(pat)
251 res = ''
249 res = ''
252 group = 0
250 group = 0
253 def peek(): return i < n and pat[i]
251 def peek(): return i < n and pat[i]
254 while i < n:
252 while i < n:
255 c = pat[i]
253 c = pat[i]
256 i = i+1
254 i = i+1
257 if c == '*':
255 if c == '*':
258 if peek() == '*':
256 if peek() == '*':
259 i += 1
257 i += 1
260 res += '.*'
258 res += '.*'
261 else:
259 else:
262 res += '[^/]*'
260 res += '[^/]*'
263 elif c == '?':
261 elif c == '?':
264 res += '.'
262 res += '.'
265 elif c == '[':
263 elif c == '[':
266 j = i
264 j = i
267 if j < n and pat[j] in '!]':
265 if j < n and pat[j] in '!]':
268 j += 1
266 j += 1
269 while j < n and pat[j] != ']':
267 while j < n and pat[j] != ']':
270 j += 1
268 j += 1
271 if j >= n:
269 if j >= n:
272 res += '\\['
270 res += '\\['
273 else:
271 else:
274 stuff = pat[i:j].replace('\\','\\\\')
272 stuff = pat[i:j].replace('\\','\\\\')
275 i = j + 1
273 i = j + 1
276 if stuff[0] == '!':
274 if stuff[0] == '!':
277 stuff = '^' + stuff[1:]
275 stuff = '^' + stuff[1:]
278 elif stuff[0] == '^':
276 elif stuff[0] == '^':
279 stuff = '\\' + stuff
277 stuff = '\\' + stuff
280 res = '%s[%s]' % (res, stuff)
278 res = '%s[%s]' % (res, stuff)
281 elif c == '{':
279 elif c == '{':
282 group += 1
280 group += 1
283 res += '(?:'
281 res += '(?:'
284 elif c == '}' and group:
282 elif c == '}' and group:
285 res += ')'
283 res += ')'
286 group -= 1
284 group -= 1
287 elif c == ',' and group:
285 elif c == ',' and group:
288 res += '|'
286 res += '|'
289 elif c == '\\':
287 elif c == '\\':
290 p = peek()
288 p = peek()
291 if p:
289 if p:
292 i += 1
290 i += 1
293 res += re.escape(p)
291 res += re.escape(p)
294 else:
292 else:
295 res += re.escape(c)
293 res += re.escape(c)
296 else:
294 else:
297 res += re.escape(c)
295 res += re.escape(c)
298 return head + res + tail
296 return head + res + tail
299
297
300 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
298 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
301
299
302 def pathto(root, n1, n2):
300 def pathto(root, n1, n2):
303 '''return the relative path from one place to another.
301 '''return the relative path from one place to another.
304 root should use os.sep to separate directories
302 root should use os.sep to separate directories
305 n1 should use os.sep to separate directories
303 n1 should use os.sep to separate directories
306 n2 should use "/" to separate directories
304 n2 should use "/" to separate directories
307 returns an os.sep-separated path.
305 returns an os.sep-separated path.
308
306
309 If n1 is a relative path, it's assumed it's
307 If n1 is a relative path, it's assumed it's
310 relative to root.
308 relative to root.
311 n2 should always be relative to root.
309 n2 should always be relative to root.
312 '''
310 '''
313 if not n1: return localpath(n2)
311 if not n1: return localpath(n2)
314 if os.path.isabs(n1):
312 if os.path.isabs(n1):
315 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
313 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
316 return os.path.join(root, localpath(n2))
314 return os.path.join(root, localpath(n2))
317 n2 = '/'.join((pconvert(root), n2))
315 n2 = '/'.join((pconvert(root), n2))
318 a, b = splitpath(n1), n2.split('/')
316 a, b = splitpath(n1), n2.split('/')
319 a.reverse()
317 a.reverse()
320 b.reverse()
318 b.reverse()
321 while a and b and a[-1] == b[-1]:
319 while a and b and a[-1] == b[-1]:
322 a.pop()
320 a.pop()
323 b.pop()
321 b.pop()
324 b.reverse()
322 b.reverse()
325 return os.sep.join((['..'] * len(a)) + b) or '.'
323 return os.sep.join((['..'] * len(a)) + b) or '.'
326
324
327 def canonpath(root, cwd, myname):
325 def canonpath(root, cwd, myname):
328 """return the canonical path of myname, given cwd and root"""
326 """return the canonical path of myname, given cwd and root"""
329 if root == os.sep:
327 if root == os.sep:
330 rootsep = os.sep
328 rootsep = os.sep
331 elif endswithsep(root):
329 elif endswithsep(root):
332 rootsep = root
330 rootsep = root
333 else:
331 else:
334 rootsep = root + os.sep
332 rootsep = root + os.sep
335 name = myname
333 name = myname
336 if not os.path.isabs(name):
334 if not os.path.isabs(name):
337 name = os.path.join(root, cwd, name)
335 name = os.path.join(root, cwd, name)
338 name = os.path.normpath(name)
336 name = os.path.normpath(name)
339 audit_path = path_auditor(root)
337 audit_path = path_auditor(root)
340 if name != rootsep and name.startswith(rootsep):
338 if name != rootsep and name.startswith(rootsep):
341 name = name[len(rootsep):]
339 name = name[len(rootsep):]
342 audit_path(name)
340 audit_path(name)
343 return pconvert(name)
341 return pconvert(name)
344 elif name == root:
342 elif name == root:
345 return ''
343 return ''
346 else:
344 else:
347 # Determine whether `name' is in the hierarchy at or beneath `root',
345 # Determine whether `name' is in the hierarchy at or beneath `root',
348 # by iterating name=dirname(name) until that causes no change (can't
346 # by iterating name=dirname(name) until that causes no change (can't
349 # check name == '/', because that doesn't work on windows). For each
347 # check name == '/', because that doesn't work on windows). For each
350 # `name', compare dev/inode numbers. If they match, the list `rel'
348 # `name', compare dev/inode numbers. If they match, the list `rel'
351 # holds the reversed list of components making up the relative file
349 # holds the reversed list of components making up the relative file
352 # name we want.
350 # name we want.
353 root_st = os.stat(root)
351 root_st = os.stat(root)
354 rel = []
352 rel = []
355 while True:
353 while True:
356 try:
354 try:
357 name_st = os.stat(name)
355 name_st = os.stat(name)
358 except OSError:
356 except OSError:
359 break
357 break
360 if samestat(name_st, root_st):
358 if samestat(name_st, root_st):
361 if not rel:
359 if not rel:
362 # name was actually the same as root (maybe a symlink)
360 # name was actually the same as root (maybe a symlink)
363 return ''
361 return ''
364 rel.reverse()
362 rel.reverse()
365 name = os.path.join(*rel)
363 name = os.path.join(*rel)
366 audit_path(name)
364 audit_path(name)
367 return pconvert(name)
365 return pconvert(name)
368 dirname, basename = os.path.split(name)
366 dirname, basename = os.path.split(name)
369 rel.append(basename)
367 rel.append(basename)
370 if dirname == name:
368 if dirname == name:
371 break
369 break
372 name = dirname
370 name = dirname
373
371
374 raise Abort('%s not under root' % myname)
372 raise Abort('%s not under root' % myname)
375
373
376 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
374 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
377 """build a function to match a set of file patterns
375 """build a function to match a set of file patterns
378
376
379 arguments:
377 arguments:
380 canonroot - the canonical root of the tree you're matching against
378 canonroot - the canonical root of the tree you're matching against
381 cwd - the current working directory, if relevant
379 cwd - the current working directory, if relevant
382 names - patterns to find
380 names - patterns to find
383 inc - patterns to include
381 inc - patterns to include
384 exc - patterns to exclude
382 exc - patterns to exclude
385 dflt_pat - if a pattern in names has no explicit type, assume this one
383 dflt_pat - if a pattern in names has no explicit type, assume this one
386 src - where these patterns came from (e.g. .hgignore)
384 src - where these patterns came from (e.g. .hgignore)
387
385
388 a pattern is one of:
386 a pattern is one of:
389 'glob:<glob>' - a glob relative to cwd
387 'glob:<glob>' - a glob relative to cwd
390 're:<regexp>' - a regular expression
388 're:<regexp>' - a regular expression
391 'path:<path>' - a path relative to canonroot
389 'path:<path>' - a path relative to canonroot
392 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
390 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
393 'relpath:<path>' - a path relative to cwd
391 'relpath:<path>' - a path relative to cwd
394 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
392 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
395 '<something>' - one of the cases above, selected by the dflt_pat argument
393 '<something>' - one of the cases above, selected by the dflt_pat argument
396
394
397 returns:
395 returns:
398 a 3-tuple containing
396 a 3-tuple containing
399 - list of roots (places where one should start a recursive walk of the fs);
397 - list of roots (places where one should start a recursive walk of the fs);
400 this often matches the explicit non-pattern names passed in, but also
398 this often matches the explicit non-pattern names passed in, but also
401 includes the initial part of glob: patterns that has no glob characters
399 includes the initial part of glob: patterns that has no glob characters
402 - a bool match(filename) function
400 - a bool match(filename) function
403 - a bool indicating if any patterns were passed in
401 - a bool indicating if any patterns were passed in
404 """
402 """
405
403
406 # a common case: no patterns at all
404 # a common case: no patterns at all
407 if not names and not inc and not exc:
405 if not names and not inc and not exc:
408 return [], always, False
406 return [], always, False
409
407
410 def contains_glob(name):
408 def contains_glob(name):
411 for c in name:
409 for c in name:
412 if c in _globchars: return True
410 if c in _globchars: return True
413 return False
411 return False
414
412
415 def regex(kind, name, tail):
413 def regex(kind, name, tail):
416 '''convert a pattern into a regular expression'''
414 '''convert a pattern into a regular expression'''
417 if not name:
415 if not name:
418 return ''
416 return ''
419 if kind == 're':
417 if kind == 're':
420 return name
418 return name
421 elif kind == 'path':
419 elif kind == 'path':
422 return '^' + re.escape(name) + '(?:/|$)'
420 return '^' + re.escape(name) + '(?:/|$)'
423 elif kind == 'relglob':
421 elif kind == 'relglob':
424 return globre(name, '(?:|.*/)', tail)
422 return globre(name, '(?:|.*/)', tail)
425 elif kind == 'relpath':
423 elif kind == 'relpath':
426 return re.escape(name) + '(?:/|$)'
424 return re.escape(name) + '(?:/|$)'
427 elif kind == 'relre':
425 elif kind == 'relre':
428 if name.startswith('^'):
426 if name.startswith('^'):
429 return name
427 return name
430 return '.*' + name
428 return '.*' + name
431 return globre(name, '', tail)
429 return globre(name, '', tail)
432
430
433 def matchfn(pats, tail):
431 def matchfn(pats, tail):
434 """build a matching function from a set of patterns"""
432 """build a matching function from a set of patterns"""
435 if not pats:
433 if not pats:
436 return
434 return
437 try:
435 try:
438 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
436 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
439 if len(pat) > 20000:
437 if len(pat) > 20000:
440 raise OverflowError()
438 raise OverflowError()
441 return re.compile(pat).match
439 return re.compile(pat).match
442 except OverflowError:
440 except OverflowError:
443 # We're using a Python with a tiny regex engine and we
441 # We're using a Python with a tiny regex engine and we
444 # made it explode, so we'll divide the pattern list in two
442 # made it explode, so we'll divide the pattern list in two
445 # until it works
443 # until it works
446 l = len(pats)
444 l = len(pats)
447 if l < 2:
445 if l < 2:
448 raise
446 raise
449 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
447 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
450 return lambda s: a(s) or b(s)
448 return lambda s: a(s) or b(s)
451 except re.error:
449 except re.error:
452 for k, p in pats:
450 for k, p in pats:
453 try:
451 try:
454 re.compile('(?:%s)' % regex(k, p, tail))
452 re.compile('(?:%s)' % regex(k, p, tail))
455 except re.error:
453 except re.error:
456 if src:
454 if src:
457 raise Abort("%s: invalid pattern (%s): %s" %
455 raise Abort("%s: invalid pattern (%s): %s" %
458 (src, k, p))
456 (src, k, p))
459 else:
457 else:
460 raise Abort("invalid pattern (%s): %s" % (k, p))
458 raise Abort("invalid pattern (%s): %s" % (k, p))
461 raise Abort("invalid pattern")
459 raise Abort("invalid pattern")
462
460
463 def globprefix(pat):
461 def globprefix(pat):
464 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
462 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
465 root = []
463 root = []
466 for p in pat.split('/'):
464 for p in pat.split('/'):
467 if contains_glob(p): break
465 if contains_glob(p): break
468 root.append(p)
466 root.append(p)
469 return '/'.join(root) or '.'
467 return '/'.join(root) or '.'
470
468
471 def normalizepats(names, default):
469 def normalizepats(names, default):
472 pats = []
470 pats = []
473 roots = []
471 roots = []
474 anypats = False
472 anypats = False
475 for kind, name in [patkind(p, default) for p in names]:
473 for kind, name in [patkind(p, default) for p in names]:
476 if kind in ('glob', 'relpath'):
474 if kind in ('glob', 'relpath'):
477 name = canonpath(canonroot, cwd, name)
475 name = canonpath(canonroot, cwd, name)
478 elif kind in ('relglob', 'path'):
476 elif kind in ('relglob', 'path'):
479 name = normpath(name)
477 name = normpath(name)
480
478
481 pats.append((kind, name))
479 pats.append((kind, name))
482
480
483 if kind in ('glob', 're', 'relglob', 'relre'):
481 if kind in ('glob', 're', 'relglob', 'relre'):
484 anypats = True
482 anypats = True
485
483
486 if kind == 'glob':
484 if kind == 'glob':
487 root = globprefix(name)
485 root = globprefix(name)
488 roots.append(root)
486 roots.append(root)
489 elif kind in ('relpath', 'path'):
487 elif kind in ('relpath', 'path'):
490 roots.append(name or '.')
488 roots.append(name or '.')
491 elif kind == 'relglob':
489 elif kind == 'relglob':
492 roots.append('.')
490 roots.append('.')
493 return roots, pats, anypats
491 return roots, pats, anypats
494
492
495 roots, pats, anypats = normalizepats(names, dflt_pat)
493 roots, pats, anypats = normalizepats(names, dflt_pat)
496
494
497 patmatch = matchfn(pats, '$') or always
495 patmatch = matchfn(pats, '$') or always
498 incmatch = always
496 incmatch = always
499 if inc:
497 if inc:
500 dummy, inckinds, dummy = normalizepats(inc, 'glob')
498 dummy, inckinds, dummy = normalizepats(inc, 'glob')
501 incmatch = matchfn(inckinds, '(?:/|$)')
499 incmatch = matchfn(inckinds, '(?:/|$)')
502 excmatch = never
500 excmatch = never
503 if exc:
501 if exc:
504 dummy, exckinds, dummy = normalizepats(exc, 'glob')
502 dummy, exckinds, dummy = normalizepats(exc, 'glob')
505 excmatch = matchfn(exckinds, '(?:/|$)')
503 excmatch = matchfn(exckinds, '(?:/|$)')
506
504
507 if not names and inc and not exc:
505 if not names and inc and not exc:
508 # common case: hgignore patterns
506 # common case: hgignore patterns
509 match = incmatch
507 match = incmatch
510 else:
508 else:
511 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
509 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
512
510
513 return (roots, match, (inc or exc or anypats) and True)
511 return (roots, match, (inc or exc or anypats) and True)
514
512
515 _hgexecutable = None
513 _hgexecutable = None
516
514
517 def main_is_frozen():
515 def main_is_frozen():
518 """return True if we are a frozen executable.
516 """return True if we are a frozen executable.
519
517
520 The code supports py2exe (most common, Windows only) and tools/freeze
518 The code supports py2exe (most common, Windows only) and tools/freeze
521 (portable, not much used).
519 (portable, not much used).
522 """
520 """
523 return (hasattr(sys, "frozen") or # new py2exe
521 return (hasattr(sys, "frozen") or # new py2exe
524 hasattr(sys, "importers") or # old py2exe
522 hasattr(sys, "importers") or # old py2exe
525 imp.is_frozen("__main__")) # tools/freeze
523 imp.is_frozen("__main__")) # tools/freeze
526
524
527 def hgexecutable():
525 def hgexecutable():
528 """return location of the 'hg' executable.
526 """return location of the 'hg' executable.
529
527
530 Defaults to $HG or 'hg' in the search path.
528 Defaults to $HG or 'hg' in the search path.
531 """
529 """
532 if _hgexecutable is None:
530 if _hgexecutable is None:
533 hg = os.environ.get('HG')
531 hg = os.environ.get('HG')
534 if hg:
532 if hg:
535 set_hgexecutable(hg)
533 set_hgexecutable(hg)
536 elif main_is_frozen():
534 elif main_is_frozen():
537 set_hgexecutable(sys.executable)
535 set_hgexecutable(sys.executable)
538 else:
536 else:
539 set_hgexecutable(find_exe('hg') or 'hg')
537 set_hgexecutable(find_exe('hg') or 'hg')
540 return _hgexecutable
538 return _hgexecutable
541
539
542 def set_hgexecutable(path):
540 def set_hgexecutable(path):
543 """set location of the 'hg' executable"""
541 """set location of the 'hg' executable"""
544 global _hgexecutable
542 global _hgexecutable
545 _hgexecutable = path
543 _hgexecutable = path
546
544
547 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
545 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
548 '''enhanced shell command execution.
546 '''enhanced shell command execution.
549 run with environment maybe modified, maybe in different dir.
547 run with environment maybe modified, maybe in different dir.
550
548
551 if command fails and onerr is None, return status. if ui object,
549 if command fails and onerr is None, return status. if ui object,
552 print error message and return status, else raise onerr object as
550 print error message and return status, else raise onerr object as
553 exception.'''
551 exception.'''
554 def py2shell(val):
552 def py2shell(val):
555 'convert python object into string that is useful to shell'
553 'convert python object into string that is useful to shell'
556 if val in (None, False):
554 if val in (None, False):
557 return '0'
555 return '0'
558 if val == True:
556 if val == True:
559 return '1'
557 return '1'
560 return str(val)
558 return str(val)
561 oldenv = {}
559 oldenv = {}
562 for k in environ:
560 for k in environ:
563 oldenv[k] = os.environ.get(k)
561 oldenv[k] = os.environ.get(k)
564 if cwd is not None:
562 if cwd is not None:
565 oldcwd = os.getcwd()
563 oldcwd = os.getcwd()
566 origcmd = cmd
564 origcmd = cmd
567 if os.name == 'nt':
565 if os.name == 'nt':
568 cmd = '"%s"' % cmd
566 cmd = '"%s"' % cmd
569 try:
567 try:
570 for k, v in environ.iteritems():
568 for k, v in environ.iteritems():
571 os.environ[k] = py2shell(v)
569 os.environ[k] = py2shell(v)
572 os.environ['HG'] = hgexecutable()
570 os.environ['HG'] = hgexecutable()
573 if cwd is not None and oldcwd != cwd:
571 if cwd is not None and oldcwd != cwd:
574 os.chdir(cwd)
572 os.chdir(cwd)
575 rc = os.system(cmd)
573 rc = os.system(cmd)
576 if sys.platform == 'OpenVMS' and rc & 1:
574 if sys.platform == 'OpenVMS' and rc & 1:
577 rc = 0
575 rc = 0
578 if rc and onerr:
576 if rc and onerr:
579 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
577 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
580 explain_exit(rc)[0])
578 explain_exit(rc)[0])
581 if errprefix:
579 if errprefix:
582 errmsg = '%s: %s' % (errprefix, errmsg)
580 errmsg = '%s: %s' % (errprefix, errmsg)
583 try:
581 try:
584 onerr.warn(errmsg + '\n')
582 onerr.warn(errmsg + '\n')
585 except AttributeError:
583 except AttributeError:
586 raise onerr(errmsg)
584 raise onerr(errmsg)
587 return rc
585 return rc
588 finally:
586 finally:
589 for k, v in oldenv.iteritems():
587 for k, v in oldenv.iteritems():
590 if v is None:
588 if v is None:
591 del os.environ[k]
589 del os.environ[k]
592 else:
590 else:
593 os.environ[k] = v
591 os.environ[k] = v
594 if cwd is not None and oldcwd != cwd:
592 if cwd is not None and oldcwd != cwd:
595 os.chdir(oldcwd)
593 os.chdir(oldcwd)
596
594
597 def checksignature(func):
595 def checksignature(func):
598 '''wrap a function with code to check for calling errors'''
596 '''wrap a function with code to check for calling errors'''
599 def check(*args, **kwargs):
597 def check(*args, **kwargs):
600 try:
598 try:
601 return func(*args, **kwargs)
599 return func(*args, **kwargs)
602 except TypeError:
600 except TypeError:
603 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
601 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
604 raise error.SignatureError
602 raise error.SignatureError
605 raise
603 raise
606
604
607 return check
605 return check
608
606
609 # os.path.lexists is not available on python2.3
607 # os.path.lexists is not available on python2.3
610 def lexists(filename):
608 def lexists(filename):
611 "test whether a file with this name exists. does not follow symlinks"
609 "test whether a file with this name exists. does not follow symlinks"
612 try:
610 try:
613 os.lstat(filename)
611 os.lstat(filename)
614 except:
612 except:
615 return False
613 return False
616 return True
614 return True
617
615
618 def rename(src, dst):
616 def rename(src, dst):
619 """forcibly rename a file"""
617 """forcibly rename a file"""
620 try:
618 try:
621 os.rename(src, dst)
619 os.rename(src, dst)
622 except OSError, err: # FIXME: check err (EEXIST ?)
620 except OSError, err: # FIXME: check err (EEXIST ?)
623
621
624 # On windows, rename to existing file is not allowed, so we
622 # On windows, rename to existing file is not allowed, so we
625 # must delete destination first. But if a file is open, unlink
623 # must delete destination first. But if a file is open, unlink
626 # schedules it for delete but does not delete it. Rename
624 # schedules it for delete but does not delete it. Rename
627 # happens immediately even for open files, so we rename
625 # happens immediately even for open files, so we rename
628 # destination to a temporary name, then delete that. Then
626 # destination to a temporary name, then delete that. Then
629 # rename is safe to do.
627 # rename is safe to do.
630 # The temporary name is chosen at random to avoid the situation
628 # The temporary name is chosen at random to avoid the situation
631 # where a file is left lying around from a previous aborted run.
629 # where a file is left lying around from a previous aborted run.
632 # The usual race condition this introduces can't be avoided as
630 # The usual race condition this introduces can't be avoided as
633 # we need the name to rename into, and not the file itself. Due
631 # we need the name to rename into, and not the file itself. Due
634 # to the nature of the operation however, any races will at worst
632 # to the nature of the operation however, any races will at worst
635 # lead to the rename failing and the current operation aborting.
633 # lead to the rename failing and the current operation aborting.
636
634
637 def tempname(prefix):
635 def tempname(prefix):
638 for tries in xrange(10):
636 for tries in xrange(10):
639 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
637 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
640 if not os.path.exists(temp):
638 if not os.path.exists(temp):
641 return temp
639 return temp
642 raise IOError, (errno.EEXIST, "No usable temporary filename found")
640 raise IOError, (errno.EEXIST, "No usable temporary filename found")
643
641
644 temp = tempname(dst)
642 temp = tempname(dst)
645 os.rename(dst, temp)
643 os.rename(dst, temp)
646 os.unlink(temp)
644 os.unlink(temp)
647 os.rename(src, dst)
645 os.rename(src, dst)
648
646
649 def unlink(f):
647 def unlink(f):
650 """unlink and remove the directory if it is empty"""
648 """unlink and remove the directory if it is empty"""
651 os.unlink(f)
649 os.unlink(f)
652 # try removing directories that might now be empty
650 # try removing directories that might now be empty
653 try:
651 try:
654 os.removedirs(os.path.dirname(f))
652 os.removedirs(os.path.dirname(f))
655 except OSError:
653 except OSError:
656 pass
654 pass
657
655
658 def copyfile(src, dest):
656 def copyfile(src, dest):
659 "copy a file, preserving mode and atime/mtime"
657 "copy a file, preserving mode and atime/mtime"
660 if os.path.islink(src):
658 if os.path.islink(src):
661 try:
659 try:
662 os.unlink(dest)
660 os.unlink(dest)
663 except:
661 except:
664 pass
662 pass
665 os.symlink(os.readlink(src), dest)
663 os.symlink(os.readlink(src), dest)
666 else:
664 else:
667 try:
665 try:
668 shutil.copyfile(src, dest)
666 shutil.copyfile(src, dest)
669 shutil.copystat(src, dest)
667 shutil.copystat(src, dest)
670 except shutil.Error, inst:
668 except shutil.Error, inst:
671 raise Abort(str(inst))
669 raise Abort(str(inst))
672
670
673 def copyfiles(src, dst, hardlink=None):
671 def copyfiles(src, dst, hardlink=None):
674 """Copy a directory tree using hardlinks if possible"""
672 """Copy a directory tree using hardlinks if possible"""
675
673
676 if hardlink is None:
674 if hardlink is None:
677 hardlink = (os.stat(src).st_dev ==
675 hardlink = (os.stat(src).st_dev ==
678 os.stat(os.path.dirname(dst)).st_dev)
676 os.stat(os.path.dirname(dst)).st_dev)
679
677
680 if os.path.isdir(src):
678 if os.path.isdir(src):
681 os.mkdir(dst)
679 os.mkdir(dst)
682 for name, kind in osutil.listdir(src):
680 for name, kind in osutil.listdir(src):
683 srcname = os.path.join(src, name)
681 srcname = os.path.join(src, name)
684 dstname = os.path.join(dst, name)
682 dstname = os.path.join(dst, name)
685 copyfiles(srcname, dstname, hardlink)
683 copyfiles(srcname, dstname, hardlink)
686 else:
684 else:
687 if hardlink:
685 if hardlink:
688 try:
686 try:
689 os_link(src, dst)
687 os_link(src, dst)
690 except (IOError, OSError):
688 except (IOError, OSError):
691 hardlink = False
689 hardlink = False
692 shutil.copy(src, dst)
690 shutil.copy(src, dst)
693 else:
691 else:
694 shutil.copy(src, dst)
692 shutil.copy(src, dst)
695
693
696 class path_auditor(object):
694 class path_auditor(object):
697 '''ensure that a filesystem path contains no banned components.
695 '''ensure that a filesystem path contains no banned components.
698 the following properties of a path are checked:
696 the following properties of a path are checked:
699
697
700 - under top-level .hg
698 - under top-level .hg
701 - starts at the root of a windows drive
699 - starts at the root of a windows drive
702 - contains ".."
700 - contains ".."
703 - traverses a symlink (e.g. a/symlink_here/b)
701 - traverses a symlink (e.g. a/symlink_here/b)
704 - inside a nested repository'''
702 - inside a nested repository'''
705
703
706 def __init__(self, root):
704 def __init__(self, root):
707 self.audited = set()
705 self.audited = set()
708 self.auditeddir = set()
706 self.auditeddir = set()
709 self.root = root
707 self.root = root
710
708
711 def __call__(self, path):
709 def __call__(self, path):
712 if path in self.audited:
710 if path in self.audited:
713 return
711 return
714 normpath = os.path.normcase(path)
712 normpath = os.path.normcase(path)
715 parts = splitpath(normpath)
713 parts = splitpath(normpath)
716 if (os.path.splitdrive(path)[0]
714 if (os.path.splitdrive(path)[0]
717 or parts[0].lower() in ('.hg', '.hg.', '')
715 or parts[0].lower() in ('.hg', '.hg.', '')
718 or os.pardir in parts):
716 or os.pardir in parts):
719 raise Abort(_("path contains illegal component: %s") % path)
717 raise Abort(_("path contains illegal component: %s") % path)
720 if '.hg' in path.lower():
718 if '.hg' in path.lower():
721 lparts = [p.lower() for p in parts]
719 lparts = [p.lower() for p in parts]
722 for p in '.hg', '.hg.':
720 for p in '.hg', '.hg.':
723 if p in lparts[1:]:
721 if p in lparts[1:]:
724 pos = lparts.index(p)
722 pos = lparts.index(p)
725 base = os.path.join(*parts[:pos])
723 base = os.path.join(*parts[:pos])
726 raise Abort(_('path %r is inside repo %r') % (path, base))
724 raise Abort(_('path %r is inside repo %r') % (path, base))
727 def check(prefix):
725 def check(prefix):
728 curpath = os.path.join(self.root, prefix)
726 curpath = os.path.join(self.root, prefix)
729 try:
727 try:
730 st = os.lstat(curpath)
728 st = os.lstat(curpath)
731 except OSError, err:
729 except OSError, err:
732 # EINVAL can be raised as invalid path syntax under win32.
730 # EINVAL can be raised as invalid path syntax under win32.
733 # They must be ignored for patterns can be checked too.
731 # They must be ignored for patterns can be checked too.
734 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
732 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
735 raise
733 raise
736 else:
734 else:
737 if stat.S_ISLNK(st.st_mode):
735 if stat.S_ISLNK(st.st_mode):
738 raise Abort(_('path %r traverses symbolic link %r') %
736 raise Abort(_('path %r traverses symbolic link %r') %
739 (path, prefix))
737 (path, prefix))
740 elif (stat.S_ISDIR(st.st_mode) and
738 elif (stat.S_ISDIR(st.st_mode) and
741 os.path.isdir(os.path.join(curpath, '.hg'))):
739 os.path.isdir(os.path.join(curpath, '.hg'))):
742 raise Abort(_('path %r is inside repo %r') %
740 raise Abort(_('path %r is inside repo %r') %
743 (path, prefix))
741 (path, prefix))
744 parts.pop()
742 parts.pop()
745 prefixes = []
743 prefixes = []
746 for n in range(len(parts)):
744 for n in range(len(parts)):
747 prefix = os.sep.join(parts)
745 prefix = os.sep.join(parts)
748 if prefix in self.auditeddir:
746 if prefix in self.auditeddir:
749 break
747 break
750 check(prefix)
748 check(prefix)
751 prefixes.append(prefix)
749 prefixes.append(prefix)
752 parts.pop()
750 parts.pop()
753
751
754 self.audited.add(path)
752 self.audited.add(path)
755 # only add prefixes to the cache after checking everything: we don't
753 # only add prefixes to the cache after checking everything: we don't
756 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
754 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
757 self.auditeddir.update(prefixes)
755 self.auditeddir.update(prefixes)
758
756
759 def nlinks(pathname):
757 def nlinks(pathname):
760 """Return number of hardlinks for the given file."""
758 """Return number of hardlinks for the given file."""
761 return os.lstat(pathname).st_nlink
759 return os.lstat(pathname).st_nlink
762
760
763 if hasattr(os, 'link'):
761 if hasattr(os, 'link'):
764 os_link = os.link
762 os_link = os.link
765 else:
763 else:
766 def os_link(src, dst):
764 def os_link(src, dst):
767 raise OSError(0, _("Hardlinks not supported"))
765 raise OSError(0, _("Hardlinks not supported"))
768
766
769 def lookup_reg(key, name=None, scope=None):
767 def lookup_reg(key, name=None, scope=None):
770 return None
768 return None
771
769
772 if os.name == 'nt':
770 if os.name == 'nt':
773 from windows import *
771 from windows import *
774 def expand_glob(pats):
772 def expand_glob(pats):
775 '''On Windows, expand the implicit globs in a list of patterns'''
773 '''On Windows, expand the implicit globs in a list of patterns'''
776 ret = []
774 ret = []
777 for p in pats:
775 for p in pats:
778 kind, name = patkind(p, None)
776 kind, name = patkind(p, None)
779 if kind is None:
777 if kind is None:
780 globbed = glob.glob(name)
778 globbed = glob.glob(name)
781 if globbed:
779 if globbed:
782 ret.extend(globbed)
780 ret.extend(globbed)
783 continue
781 continue
784 # if we couldn't expand the glob, just keep it around
782 # if we couldn't expand the glob, just keep it around
785 ret.append(p)
783 ret.append(p)
786 return ret
784 return ret
787 else:
785 else:
788 from posix import *
786 from posix import *
789
787
790 def makelock(info, pathname):
788 def makelock(info, pathname):
791 try:
789 try:
792 return os.symlink(info, pathname)
790 return os.symlink(info, pathname)
793 except OSError, why:
791 except OSError, why:
794 if why.errno == errno.EEXIST:
792 if why.errno == errno.EEXIST:
795 raise
793 raise
796 except AttributeError: # no symlink in os
794 except AttributeError: # no symlink in os
797 pass
795 pass
798
796
799 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
797 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
800 os.write(ld, info)
798 os.write(ld, info)
801 os.close(ld)
799 os.close(ld)
802
800
803 def readlock(pathname):
801 def readlock(pathname):
804 try:
802 try:
805 return os.readlink(pathname)
803 return os.readlink(pathname)
806 except OSError, why:
804 except OSError, why:
807 if why.errno not in (errno.EINVAL, errno.ENOSYS):
805 if why.errno not in (errno.EINVAL, errno.ENOSYS):
808 raise
806 raise
809 except AttributeError: # no symlink in os
807 except AttributeError: # no symlink in os
810 pass
808 pass
811 return posixfile(pathname).read()
809 return posixfile(pathname).read()
812
810
813 def fstat(fp):
811 def fstat(fp):
814 '''stat file object that may not have fileno method.'''
812 '''stat file object that may not have fileno method.'''
815 try:
813 try:
816 return os.fstat(fp.fileno())
814 return os.fstat(fp.fileno())
817 except AttributeError:
815 except AttributeError:
818 return os.stat(fp.name)
816 return os.stat(fp.name)
819
817
820 # File system features
818 # File system features
821
819
822 def checkcase(path):
820 def checkcase(path):
823 """
821 """
824 Check whether the given path is on a case-sensitive filesystem
822 Check whether the given path is on a case-sensitive filesystem
825
823
826 Requires a path (like /foo/.hg) ending with a foldable final
824 Requires a path (like /foo/.hg) ending with a foldable final
827 directory component.
825 directory component.
828 """
826 """
829 s1 = os.stat(path)
827 s1 = os.stat(path)
830 d, b = os.path.split(path)
828 d, b = os.path.split(path)
831 p2 = os.path.join(d, b.upper())
829 p2 = os.path.join(d, b.upper())
832 if path == p2:
830 if path == p2:
833 p2 = os.path.join(d, b.lower())
831 p2 = os.path.join(d, b.lower())
834 try:
832 try:
835 s2 = os.stat(p2)
833 s2 = os.stat(p2)
836 if s2 == s1:
834 if s2 == s1:
837 return False
835 return False
838 return True
836 return True
839 except:
837 except:
840 return True
838 return True
841
839
842 _fspathcache = {}
840 _fspathcache = {}
843 def fspath(name, root):
841 def fspath(name, root):
844 '''Get name in the case stored in the filesystem
842 '''Get name in the case stored in the filesystem
845
843
846 The name is either relative to root, or it is an absolute path starting
844 The name is either relative to root, or it is an absolute path starting
847 with root. Note that this function is unnecessary, and should not be
845 with root. Note that this function is unnecessary, and should not be
848 called, for case-sensitive filesystems (simply because it's expensive).
846 called, for case-sensitive filesystems (simply because it's expensive).
849 '''
847 '''
850 # If name is absolute, make it relative
848 # If name is absolute, make it relative
851 if name.lower().startswith(root.lower()):
849 if name.lower().startswith(root.lower()):
852 l = len(root)
850 l = len(root)
853 if name[l] == os.sep or name[l] == os.altsep:
851 if name[l] == os.sep or name[l] == os.altsep:
854 l = l + 1
852 l = l + 1
855 name = name[l:]
853 name = name[l:]
856
854
857 if not os.path.exists(os.path.join(root, name)):
855 if not os.path.exists(os.path.join(root, name)):
858 return None
856 return None
859
857
860 seps = os.sep
858 seps = os.sep
861 if os.altsep:
859 if os.altsep:
862 seps = seps + os.altsep
860 seps = seps + os.altsep
863 # Protect backslashes. This gets silly very quickly.
861 # Protect backslashes. This gets silly very quickly.
864 seps.replace('\\','\\\\')
862 seps.replace('\\','\\\\')
865 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
863 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
866 dir = os.path.normcase(os.path.normpath(root))
864 dir = os.path.normcase(os.path.normpath(root))
867 result = []
865 result = []
868 for part, sep in pattern.findall(name):
866 for part, sep in pattern.findall(name):
869 if sep:
867 if sep:
870 result.append(sep)
868 result.append(sep)
871 continue
869 continue
872
870
873 if dir not in _fspathcache:
871 if dir not in _fspathcache:
874 _fspathcache[dir] = os.listdir(dir)
872 _fspathcache[dir] = os.listdir(dir)
875 contents = _fspathcache[dir]
873 contents = _fspathcache[dir]
876
874
877 lpart = part.lower()
875 lpart = part.lower()
878 for n in contents:
876 for n in contents:
879 if n.lower() == lpart:
877 if n.lower() == lpart:
880 result.append(n)
878 result.append(n)
881 break
879 break
882 else:
880 else:
883 # Cannot happen, as the file exists!
881 # Cannot happen, as the file exists!
884 result.append(part)
882 result.append(part)
885 dir = os.path.join(dir, lpart)
883 dir = os.path.join(dir, lpart)
886
884
887 return ''.join(result)
885 return ''.join(result)
888
886
889 def checkexec(path):
887 def checkexec(path):
890 """
888 """
891 Check whether the given path is on a filesystem with UNIX-like exec flags
889 Check whether the given path is on a filesystem with UNIX-like exec flags
892
890
893 Requires a directory (like /foo/.hg)
891 Requires a directory (like /foo/.hg)
894 """
892 """
895
893
896 # VFAT on some Linux versions can flip mode but it doesn't persist
894 # VFAT on some Linux versions can flip mode but it doesn't persist
897 # a FS remount. Frequently we can detect it if files are created
895 # a FS remount. Frequently we can detect it if files are created
898 # with exec bit on.
896 # with exec bit on.
899
897
900 try:
898 try:
901 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
899 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
902 fh, fn = tempfile.mkstemp("", "", path)
900 fh, fn = tempfile.mkstemp("", "", path)
903 try:
901 try:
904 os.close(fh)
902 os.close(fh)
905 m = os.stat(fn).st_mode & 0777
903 m = os.stat(fn).st_mode & 0777
906 new_file_has_exec = m & EXECFLAGS
904 new_file_has_exec = m & EXECFLAGS
907 os.chmod(fn, m ^ EXECFLAGS)
905 os.chmod(fn, m ^ EXECFLAGS)
908 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
906 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
909 finally:
907 finally:
910 os.unlink(fn)
908 os.unlink(fn)
911 except (IOError, OSError):
909 except (IOError, OSError):
912 # we don't care, the user probably won't be able to commit anyway
910 # we don't care, the user probably won't be able to commit anyway
913 return False
911 return False
914 return not (new_file_has_exec or exec_flags_cannot_flip)
912 return not (new_file_has_exec or exec_flags_cannot_flip)
915
913
916 def checklink(path):
914 def checklink(path):
917 """check whether the given path is on a symlink-capable filesystem"""
915 """check whether the given path is on a symlink-capable filesystem"""
918 # mktemp is not racy because symlink creation will fail if the
916 # mktemp is not racy because symlink creation will fail if the
919 # file already exists
917 # file already exists
920 name = tempfile.mktemp(dir=path)
918 name = tempfile.mktemp(dir=path)
921 try:
919 try:
922 os.symlink(".", name)
920 os.symlink(".", name)
923 os.unlink(name)
921 os.unlink(name)
924 return True
922 return True
925 except (OSError, AttributeError):
923 except (OSError, AttributeError):
926 return False
924 return False
927
925
928 def needbinarypatch():
926 def needbinarypatch():
929 """return True if patches should be applied in binary mode by default."""
927 """return True if patches should be applied in binary mode by default."""
930 return os.name == 'nt'
928 return os.name == 'nt'
931
929
932 def endswithsep(path):
930 def endswithsep(path):
933 '''Check path ends with os.sep or os.altsep.'''
931 '''Check path ends with os.sep or os.altsep.'''
934 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
932 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
935
933
936 def splitpath(path):
934 def splitpath(path):
937 '''Split path by os.sep.
935 '''Split path by os.sep.
938 Note that this function does not use os.altsep because this is
936 Note that this function does not use os.altsep because this is
939 an alternative of simple "xxx.split(os.sep)".
937 an alternative of simple "xxx.split(os.sep)".
940 It is recommended to use os.path.normpath() before using this
938 It is recommended to use os.path.normpath() before using this
941 function if need.'''
939 function if need.'''
942 return path.split(os.sep)
940 return path.split(os.sep)
943
941
944 def gui():
942 def gui():
945 '''Are we running in a GUI?'''
943 '''Are we running in a GUI?'''
946 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
944 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
947
945
948 def mktempcopy(name, emptyok=False, createmode=None):
946 def mktempcopy(name, emptyok=False, createmode=None):
949 """Create a temporary file with the same contents from name
947 """Create a temporary file with the same contents from name
950
948
951 The permission bits are copied from the original file.
949 The permission bits are copied from the original file.
952
950
953 If the temporary file is going to be truncated immediately, you
951 If the temporary file is going to be truncated immediately, you
954 can use emptyok=True as an optimization.
952 can use emptyok=True as an optimization.
955
953
956 Returns the name of the temporary file.
954 Returns the name of the temporary file.
957 """
955 """
958 d, fn = os.path.split(name)
956 d, fn = os.path.split(name)
959 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
957 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
960 os.close(fd)
958 os.close(fd)
961 # Temporary files are created with mode 0600, which is usually not
959 # Temporary files are created with mode 0600, which is usually not
962 # what we want. If the original file already exists, just copy
960 # what we want. If the original file already exists, just copy
963 # its mode. Otherwise, manually obey umask.
961 # its mode. Otherwise, manually obey umask.
964 try:
962 try:
965 st_mode = os.lstat(name).st_mode & 0777
963 st_mode = os.lstat(name).st_mode & 0777
966 except OSError, inst:
964 except OSError, inst:
967 if inst.errno != errno.ENOENT:
965 if inst.errno != errno.ENOENT:
968 raise
966 raise
969 st_mode = createmode
967 st_mode = createmode
970 if st_mode is None:
968 if st_mode is None:
971 st_mode = ~umask
969 st_mode = ~umask
972 st_mode &= 0666
970 st_mode &= 0666
973 os.chmod(temp, st_mode)
971 os.chmod(temp, st_mode)
974 if emptyok:
972 if emptyok:
975 return temp
973 return temp
976 try:
974 try:
977 try:
975 try:
978 ifp = posixfile(name, "rb")
976 ifp = posixfile(name, "rb")
979 except IOError, inst:
977 except IOError, inst:
980 if inst.errno == errno.ENOENT:
978 if inst.errno == errno.ENOENT:
981 return temp
979 return temp
982 if not getattr(inst, 'filename', None):
980 if not getattr(inst, 'filename', None):
983 inst.filename = name
981 inst.filename = name
984 raise
982 raise
985 ofp = posixfile(temp, "wb")
983 ofp = posixfile(temp, "wb")
986 for chunk in filechunkiter(ifp):
984 for chunk in filechunkiter(ifp):
987 ofp.write(chunk)
985 ofp.write(chunk)
988 ifp.close()
986 ifp.close()
989 ofp.close()
987 ofp.close()
990 except:
988 except:
991 try: os.unlink(temp)
989 try: os.unlink(temp)
992 except: pass
990 except: pass
993 raise
991 raise
994 return temp
992 return temp
995
993
996 class atomictempfile(posixfile):
994 class atomictempfile(posixfile):
997 """file-like object that atomically updates a file
995 """file-like object that atomically updates a file
998
996
999 All writes will be redirected to a temporary copy of the original
997 All writes will be redirected to a temporary copy of the original
1000 file. When rename is called, the copy is renamed to the original
998 file. When rename is called, the copy is renamed to the original
1001 name, making the changes visible.
999 name, making the changes visible.
1002 """
1000 """
1003 def __init__(self, name, mode, createmode):
1001 def __init__(self, name, mode, createmode):
1004 self.__name = name
1002 self.__name = name
1005 self.temp = mktempcopy(name, emptyok=('w' in mode),
1003 self.temp = mktempcopy(name, emptyok=('w' in mode),
1006 createmode=createmode)
1004 createmode=createmode)
1007 posixfile.__init__(self, self.temp, mode)
1005 posixfile.__init__(self, self.temp, mode)
1008
1006
1009 def rename(self):
1007 def rename(self):
1010 if not self.closed:
1008 if not self.closed:
1011 posixfile.close(self)
1009 posixfile.close(self)
1012 rename(self.temp, localpath(self.__name))
1010 rename(self.temp, localpath(self.__name))
1013
1011
1014 def __del__(self):
1012 def __del__(self):
1015 if not self.closed:
1013 if not self.closed:
1016 try:
1014 try:
1017 os.unlink(self.temp)
1015 os.unlink(self.temp)
1018 except: pass
1016 except: pass
1019 posixfile.close(self)
1017 posixfile.close(self)
1020
1018
1021 def makedirs(name, mode=None):
1019 def makedirs(name, mode=None):
1022 """recursive directory creation with parent mode inheritance"""
1020 """recursive directory creation with parent mode inheritance"""
1023 try:
1021 try:
1024 os.mkdir(name)
1022 os.mkdir(name)
1025 if mode is not None:
1023 if mode is not None:
1026 os.chmod(name, mode)
1024 os.chmod(name, mode)
1027 return
1025 return
1028 except OSError, err:
1026 except OSError, err:
1029 if err.errno == errno.EEXIST:
1027 if err.errno == errno.EEXIST:
1030 return
1028 return
1031 if err.errno != errno.ENOENT:
1029 if err.errno != errno.ENOENT:
1032 raise
1030 raise
1033 parent = os.path.abspath(os.path.dirname(name))
1031 parent = os.path.abspath(os.path.dirname(name))
1034 makedirs(parent, mode)
1032 makedirs(parent, mode)
1035 makedirs(name, mode)
1033 makedirs(name, mode)
1036
1034
1037 class opener(object):
1035 class opener(object):
1038 """Open files relative to a base directory
1036 """Open files relative to a base directory
1039
1037
1040 This class is used to hide the details of COW semantics and
1038 This class is used to hide the details of COW semantics and
1041 remote file access from higher level code.
1039 remote file access from higher level code.
1042 """
1040 """
1043 def __init__(self, base, audit=True):
1041 def __init__(self, base, audit=True):
1044 self.base = base
1042 self.base = base
1045 if audit:
1043 if audit:
1046 self.audit_path = path_auditor(base)
1044 self.audit_path = path_auditor(base)
1047 else:
1045 else:
1048 self.audit_path = always
1046 self.audit_path = always
1049 self.createmode = None
1047 self.createmode = None
1050
1048
1051 def __getattr__(self, name):
1049 def __getattr__(self, name):
1052 if name == '_can_symlink':
1050 if name == '_can_symlink':
1053 self._can_symlink = checklink(self.base)
1051 self._can_symlink = checklink(self.base)
1054 return self._can_symlink
1052 return self._can_symlink
1055 raise AttributeError(name)
1053 raise AttributeError(name)
1056
1054
1057 def _fixfilemode(self, name):
1055 def _fixfilemode(self, name):
1058 if self.createmode is None:
1056 if self.createmode is None:
1059 return
1057 return
1060 os.chmod(name, self.createmode & 0666)
1058 os.chmod(name, self.createmode & 0666)
1061
1059
1062 def __call__(self, path, mode="r", text=False, atomictemp=False):
1060 def __call__(self, path, mode="r", text=False, atomictemp=False):
1063 self.audit_path(path)
1061 self.audit_path(path)
1064 f = os.path.join(self.base, path)
1062 f = os.path.join(self.base, path)
1065
1063
1066 if not text and "b" not in mode:
1064 if not text and "b" not in mode:
1067 mode += "b" # for that other OS
1065 mode += "b" # for that other OS
1068
1066
1069 nlink = -1
1067 nlink = -1
1070 if mode not in ("r", "rb"):
1068 if mode not in ("r", "rb"):
1071 try:
1069 try:
1072 nlink = nlinks(f)
1070 nlink = nlinks(f)
1073 except OSError:
1071 except OSError:
1074 nlink = 0
1072 nlink = 0
1075 d = os.path.dirname(f)
1073 d = os.path.dirname(f)
1076 if not os.path.isdir(d):
1074 if not os.path.isdir(d):
1077 makedirs(d, self.createmode)
1075 makedirs(d, self.createmode)
1078 if atomictemp:
1076 if atomictemp:
1079 return atomictempfile(f, mode, self.createmode)
1077 return atomictempfile(f, mode, self.createmode)
1080 if nlink > 1:
1078 if nlink > 1:
1081 rename(mktempcopy(f), f)
1079 rename(mktempcopy(f), f)
1082 fp = posixfile(f, mode)
1080 fp = posixfile(f, mode)
1083 if nlink == 0:
1081 if nlink == 0:
1084 self._fixfilemode(f)
1082 self._fixfilemode(f)
1085 return fp
1083 return fp
1086
1084
1087 def symlink(self, src, dst):
1085 def symlink(self, src, dst):
1088 self.audit_path(dst)
1086 self.audit_path(dst)
1089 linkname = os.path.join(self.base, dst)
1087 linkname = os.path.join(self.base, dst)
1090 try:
1088 try:
1091 os.unlink(linkname)
1089 os.unlink(linkname)
1092 except OSError:
1090 except OSError:
1093 pass
1091 pass
1094
1092
1095 dirname = os.path.dirname(linkname)
1093 dirname = os.path.dirname(linkname)
1096 if not os.path.exists(dirname):
1094 if not os.path.exists(dirname):
1097 makedirs(dirname, self.createmode)
1095 makedirs(dirname, self.createmode)
1098
1096
1099 if self._can_symlink:
1097 if self._can_symlink:
1100 try:
1098 try:
1101 os.symlink(src, linkname)
1099 os.symlink(src, linkname)
1102 except OSError, err:
1100 except OSError, err:
1103 raise OSError(err.errno, _('could not symlink to %r: %s') %
1101 raise OSError(err.errno, _('could not symlink to %r: %s') %
1104 (src, err.strerror), linkname)
1102 (src, err.strerror), linkname)
1105 else:
1103 else:
1106 f = self(dst, "w")
1104 f = self(dst, "w")
1107 f.write(src)
1105 f.write(src)
1108 f.close()
1106 f.close()
1109 self._fixfilemode(dst)
1107 self._fixfilemode(dst)
1110
1108
1111 class chunkbuffer(object):
1109 class chunkbuffer(object):
1112 """Allow arbitrary sized chunks of data to be efficiently read from an
1110 """Allow arbitrary sized chunks of data to be efficiently read from an
1113 iterator over chunks of arbitrary size."""
1111 iterator over chunks of arbitrary size."""
1114
1112
1115 def __init__(self, in_iter):
1113 def __init__(self, in_iter):
1116 """in_iter is the iterator that's iterating over the input chunks.
1114 """in_iter is the iterator that's iterating over the input chunks.
1117 targetsize is how big a buffer to try to maintain."""
1115 targetsize is how big a buffer to try to maintain."""
1118 self.iter = iter(in_iter)
1116 self.iter = iter(in_iter)
1119 self.buf = ''
1117 self.buf = ''
1120 self.targetsize = 2**16
1118 self.targetsize = 2**16
1121
1119
1122 def read(self, l):
1120 def read(self, l):
1123 """Read L bytes of data from the iterator of chunks of data.
1121 """Read L bytes of data from the iterator of chunks of data.
1124 Returns less than L bytes if the iterator runs dry."""
1122 Returns less than L bytes if the iterator runs dry."""
1125 if l > len(self.buf) and self.iter:
1123 if l > len(self.buf) and self.iter:
1126 # Clamp to a multiple of self.targetsize
1124 # Clamp to a multiple of self.targetsize
1127 targetsize = max(l, self.targetsize)
1125 targetsize = max(l, self.targetsize)
1128 collector = cStringIO.StringIO()
1126 collector = cStringIO.StringIO()
1129 collector.write(self.buf)
1127 collector.write(self.buf)
1130 collected = len(self.buf)
1128 collected = len(self.buf)
1131 for chunk in self.iter:
1129 for chunk in self.iter:
1132 collector.write(chunk)
1130 collector.write(chunk)
1133 collected += len(chunk)
1131 collected += len(chunk)
1134 if collected >= targetsize:
1132 if collected >= targetsize:
1135 break
1133 break
1136 if collected < targetsize:
1134 if collected < targetsize:
1137 self.iter = False
1135 self.iter = False
1138 self.buf = collector.getvalue()
1136 self.buf = collector.getvalue()
1139 if len(self.buf) == l:
1137 if len(self.buf) == l:
1140 s, self.buf = str(self.buf), ''
1138 s, self.buf = str(self.buf), ''
1141 else:
1139 else:
1142 s, self.buf = self.buf[:l], buffer(self.buf, l)
1140 s, self.buf = self.buf[:l], buffer(self.buf, l)
1143 return s
1141 return s
1144
1142
1145 def filechunkiter(f, size=65536, limit=None):
1143 def filechunkiter(f, size=65536, limit=None):
1146 """Create a generator that produces the data in the file size
1144 """Create a generator that produces the data in the file size
1147 (default 65536) bytes at a time, up to optional limit (default is
1145 (default 65536) bytes at a time, up to optional limit (default is
1148 to read all data). Chunks may be less than size bytes if the
1146 to read all data). Chunks may be less than size bytes if the
1149 chunk is the last chunk in the file, or the file is a socket or
1147 chunk is the last chunk in the file, or the file is a socket or
1150 some other type of file that sometimes reads less data than is
1148 some other type of file that sometimes reads less data than is
1151 requested."""
1149 requested."""
1152 assert size >= 0
1150 assert size >= 0
1153 assert limit is None or limit >= 0
1151 assert limit is None or limit >= 0
1154 while True:
1152 while True:
1155 if limit is None: nbytes = size
1153 if limit is None: nbytes = size
1156 else: nbytes = min(limit, size)
1154 else: nbytes = min(limit, size)
1157 s = nbytes and f.read(nbytes)
1155 s = nbytes and f.read(nbytes)
1158 if not s: break
1156 if not s: break
1159 if limit: limit -= len(s)
1157 if limit: limit -= len(s)
1160 yield s
1158 yield s
1161
1159
1162 def makedate():
1160 def makedate():
1163 lt = time.localtime()
1161 lt = time.localtime()
1164 if lt[8] == 1 and time.daylight:
1162 if lt[8] == 1 and time.daylight:
1165 tz = time.altzone
1163 tz = time.altzone
1166 else:
1164 else:
1167 tz = time.timezone
1165 tz = time.timezone
1168 return time.mktime(lt), tz
1166 return time.mktime(lt), tz
1169
1167
1170 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1168 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1171 """represent a (unixtime, offset) tuple as a localized time.
1169 """represent a (unixtime, offset) tuple as a localized time.
1172 unixtime is seconds since the epoch, and offset is the time zone's
1170 unixtime is seconds since the epoch, and offset is the time zone's
1173 number of seconds away from UTC. if timezone is false, do not
1171 number of seconds away from UTC. if timezone is false, do not
1174 append time zone to string."""
1172 append time zone to string."""
1175 t, tz = date or makedate()
1173 t, tz = date or makedate()
1176 if "%1" in format or "%2" in format:
1174 if "%1" in format or "%2" in format:
1177 sign = (tz > 0) and "-" or "+"
1175 sign = (tz > 0) and "-" or "+"
1178 minutes = abs(tz) / 60
1176 minutes = abs(tz) / 60
1179 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1177 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1180 format = format.replace("%2", "%02d" % (minutes % 60))
1178 format = format.replace("%2", "%02d" % (minutes % 60))
1181 s = time.strftime(format, time.gmtime(float(t) - tz))
1179 s = time.strftime(format, time.gmtime(float(t) - tz))
1182 return s
1180 return s
1183
1181
1184 def shortdate(date=None):
1182 def shortdate(date=None):
1185 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1183 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1186 return datestr(date, format='%Y-%m-%d')
1184 return datestr(date, format='%Y-%m-%d')
1187
1185
1188 def strdate(string, format, defaults=[]):
1186 def strdate(string, format, defaults=[]):
1189 """parse a localized time string and return a (unixtime, offset) tuple.
1187 """parse a localized time string and return a (unixtime, offset) tuple.
1190 if the string cannot be parsed, ValueError is raised."""
1188 if the string cannot be parsed, ValueError is raised."""
1191 def timezone(string):
1189 def timezone(string):
1192 tz = string.split()[-1]
1190 tz = string.split()[-1]
1193 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1191 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1194 sign = (tz[0] == "+") and 1 or -1
1192 sign = (tz[0] == "+") and 1 or -1
1195 hours = int(tz[1:3])
1193 hours = int(tz[1:3])
1196 minutes = int(tz[3:5])
1194 minutes = int(tz[3:5])
1197 return -sign * (hours * 60 + minutes) * 60
1195 return -sign * (hours * 60 + minutes) * 60
1198 if tz == "GMT" or tz == "UTC":
1196 if tz == "GMT" or tz == "UTC":
1199 return 0
1197 return 0
1200 return None
1198 return None
1201
1199
1202 # NOTE: unixtime = localunixtime + offset
1200 # NOTE: unixtime = localunixtime + offset
1203 offset, date = timezone(string), string
1201 offset, date = timezone(string), string
1204 if offset != None:
1202 if offset != None:
1205 date = " ".join(string.split()[:-1])
1203 date = " ".join(string.split()[:-1])
1206
1204
1207 # add missing elements from defaults
1205 # add missing elements from defaults
1208 for part in defaults:
1206 for part in defaults:
1209 found = [True for p in part if ("%"+p) in format]
1207 found = [True for p in part if ("%"+p) in format]
1210 if not found:
1208 if not found:
1211 date += "@" + defaults[part]
1209 date += "@" + defaults[part]
1212 format += "@%" + part[0]
1210 format += "@%" + part[0]
1213
1211
1214 timetuple = time.strptime(date, format)
1212 timetuple = time.strptime(date, format)
1215 localunixtime = int(calendar.timegm(timetuple))
1213 localunixtime = int(calendar.timegm(timetuple))
1216 if offset is None:
1214 if offset is None:
1217 # local timezone
1215 # local timezone
1218 unixtime = int(time.mktime(timetuple))
1216 unixtime = int(time.mktime(timetuple))
1219 offset = unixtime - localunixtime
1217 offset = unixtime - localunixtime
1220 else:
1218 else:
1221 unixtime = localunixtime + offset
1219 unixtime = localunixtime + offset
1222 return unixtime, offset
1220 return unixtime, offset
1223
1221
1224 def parsedate(date, formats=None, defaults=None):
1222 def parsedate(date, formats=None, defaults=None):
1225 """parse a localized date/time string and return a (unixtime, offset) tuple.
1223 """parse a localized date/time string and return a (unixtime, offset) tuple.
1226
1224
1227 The date may be a "unixtime offset" string or in one of the specified
1225 The date may be a "unixtime offset" string or in one of the specified
1228 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1226 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1229 """
1227 """
1230 if not date:
1228 if not date:
1231 return 0, 0
1229 return 0, 0
1232 if isinstance(date, tuple) and len(date) == 2:
1230 if isinstance(date, tuple) and len(date) == 2:
1233 return date
1231 return date
1234 if not formats:
1232 if not formats:
1235 formats = defaultdateformats
1233 formats = defaultdateformats
1236 date = date.strip()
1234 date = date.strip()
1237 try:
1235 try:
1238 when, offset = map(int, date.split(' '))
1236 when, offset = map(int, date.split(' '))
1239 except ValueError:
1237 except ValueError:
1240 # fill out defaults
1238 # fill out defaults
1241 if not defaults:
1239 if not defaults:
1242 defaults = {}
1240 defaults = {}
1243 now = makedate()
1241 now = makedate()
1244 for part in "d mb yY HI M S".split():
1242 for part in "d mb yY HI M S".split():
1245 if part not in defaults:
1243 if part not in defaults:
1246 if part[0] in "HMS":
1244 if part[0] in "HMS":
1247 defaults[part] = "00"
1245 defaults[part] = "00"
1248 else:
1246 else:
1249 defaults[part] = datestr(now, "%" + part[0])
1247 defaults[part] = datestr(now, "%" + part[0])
1250
1248
1251 for format in formats:
1249 for format in formats:
1252 try:
1250 try:
1253 when, offset = strdate(date, format, defaults)
1251 when, offset = strdate(date, format, defaults)
1254 except (ValueError, OverflowError):
1252 except (ValueError, OverflowError):
1255 pass
1253 pass
1256 else:
1254 else:
1257 break
1255 break
1258 else:
1256 else:
1259 raise Abort(_('invalid date: %r ') % date)
1257 raise Abort(_('invalid date: %r ') % date)
1260 # validate explicit (probably user-specified) date and
1258 # validate explicit (probably user-specified) date and
1261 # time zone offset. values must fit in signed 32 bits for
1259 # time zone offset. values must fit in signed 32 bits for
1262 # current 32-bit linux runtimes. timezones go from UTC-12
1260 # current 32-bit linux runtimes. timezones go from UTC-12
1263 # to UTC+14
1261 # to UTC+14
1264 if abs(when) > 0x7fffffff:
1262 if abs(when) > 0x7fffffff:
1265 raise Abort(_('date exceeds 32 bits: %d') % when)
1263 raise Abort(_('date exceeds 32 bits: %d') % when)
1266 if offset < -50400 or offset > 43200:
1264 if offset < -50400 or offset > 43200:
1267 raise Abort(_('impossible time zone offset: %d') % offset)
1265 raise Abort(_('impossible time zone offset: %d') % offset)
1268 return when, offset
1266 return when, offset
1269
1267
1270 def matchdate(date):
1268 def matchdate(date):
1271 """Return a function that matches a given date match specifier
1269 """Return a function that matches a given date match specifier
1272
1270
1273 Formats include:
1271 Formats include:
1274
1272
1275 '{date}' match a given date to the accuracy provided
1273 '{date}' match a given date to the accuracy provided
1276
1274
1277 '<{date}' on or before a given date
1275 '<{date}' on or before a given date
1278
1276
1279 '>{date}' on or after a given date
1277 '>{date}' on or after a given date
1280
1278
1281 """
1279 """
1282
1280
1283 def lower(date):
1281 def lower(date):
1284 d = dict(mb="1", d="1")
1282 d = dict(mb="1", d="1")
1285 return parsedate(date, extendeddateformats, d)[0]
1283 return parsedate(date, extendeddateformats, d)[0]
1286
1284
1287 def upper(date):
1285 def upper(date):
1288 d = dict(mb="12", HI="23", M="59", S="59")
1286 d = dict(mb="12", HI="23", M="59", S="59")
1289 for days in "31 30 29".split():
1287 for days in "31 30 29".split():
1290 try:
1288 try:
1291 d["d"] = days
1289 d["d"] = days
1292 return parsedate(date, extendeddateformats, d)[0]
1290 return parsedate(date, extendeddateformats, d)[0]
1293 except:
1291 except:
1294 pass
1292 pass
1295 d["d"] = "28"
1293 d["d"] = "28"
1296 return parsedate(date, extendeddateformats, d)[0]
1294 return parsedate(date, extendeddateformats, d)[0]
1297
1295
1298 date = date.strip()
1296 date = date.strip()
1299 if date[0] == "<":
1297 if date[0] == "<":
1300 when = upper(date[1:])
1298 when = upper(date[1:])
1301 return lambda x: x <= when
1299 return lambda x: x <= when
1302 elif date[0] == ">":
1300 elif date[0] == ">":
1303 when = lower(date[1:])
1301 when = lower(date[1:])
1304 return lambda x: x >= when
1302 return lambda x: x >= when
1305 elif date[0] == "-":
1303 elif date[0] == "-":
1306 try:
1304 try:
1307 days = int(date[1:])
1305 days = int(date[1:])
1308 except ValueError:
1306 except ValueError:
1309 raise Abort(_("invalid day spec: %s") % date[1:])
1307 raise Abort(_("invalid day spec: %s") % date[1:])
1310 when = makedate()[0] - days * 3600 * 24
1308 when = makedate()[0] - days * 3600 * 24
1311 return lambda x: x >= when
1309 return lambda x: x >= when
1312 elif " to " in date:
1310 elif " to " in date:
1313 a, b = date.split(" to ")
1311 a, b = date.split(" to ")
1314 start, stop = lower(a), upper(b)
1312 start, stop = lower(a), upper(b)
1315 return lambda x: x >= start and x <= stop
1313 return lambda x: x >= start and x <= stop
1316 else:
1314 else:
1317 start, stop = lower(date), upper(date)
1315 start, stop = lower(date), upper(date)
1318 return lambda x: x >= start and x <= stop
1316 return lambda x: x >= start and x <= stop
1319
1317
1320 def shortuser(user):
1318 def shortuser(user):
1321 """Return a short representation of a user name or email address."""
1319 """Return a short representation of a user name or email address."""
1322 f = user.find('@')
1320 f = user.find('@')
1323 if f >= 0:
1321 if f >= 0:
1324 user = user[:f]
1322 user = user[:f]
1325 f = user.find('<')
1323 f = user.find('<')
1326 if f >= 0:
1324 if f >= 0:
1327 user = user[f+1:]
1325 user = user[f+1:]
1328 f = user.find(' ')
1326 f = user.find(' ')
1329 if f >= 0:
1327 if f >= 0:
1330 user = user[:f]
1328 user = user[:f]
1331 f = user.find('.')
1329 f = user.find('.')
1332 if f >= 0:
1330 if f >= 0:
1333 user = user[:f]
1331 user = user[:f]
1334 return user
1332 return user
1335
1333
1336 def email(author):
1334 def email(author):
1337 '''get email of author.'''
1335 '''get email of author.'''
1338 r = author.find('>')
1336 r = author.find('>')
1339 if r == -1: r = None
1337 if r == -1: r = None
1340 return author[author.find('<')+1:r]
1338 return author[author.find('<')+1:r]
1341
1339
1342 def ellipsis(text, maxlength=400):
1340 def ellipsis(text, maxlength=400):
1343 """Trim string to at most maxlength (default: 400) characters."""
1341 """Trim string to at most maxlength (default: 400) characters."""
1344 if len(text) <= maxlength:
1342 if len(text) <= maxlength:
1345 return text
1343 return text
1346 else:
1344 else:
1347 return "%s..." % (text[:maxlength-3])
1345 return "%s..." % (text[:maxlength-3])
1348
1346
1349 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1347 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1350 '''yield every hg repository under path, recursively.'''
1348 '''yield every hg repository under path, recursively.'''
1351 def errhandler(err):
1349 def errhandler(err):
1352 if err.filename == path:
1350 if err.filename == path:
1353 raise err
1351 raise err
1354 if followsym and hasattr(os.path, 'samestat'):
1352 if followsym and hasattr(os.path, 'samestat'):
1355 def _add_dir_if_not_there(dirlst, dirname):
1353 def _add_dir_if_not_there(dirlst, dirname):
1356 match = False
1354 match = False
1357 samestat = os.path.samestat
1355 samestat = os.path.samestat
1358 dirstat = os.stat(dirname)
1356 dirstat = os.stat(dirname)
1359 for lstdirstat in dirlst:
1357 for lstdirstat in dirlst:
1360 if samestat(dirstat, lstdirstat):
1358 if samestat(dirstat, lstdirstat):
1361 match = True
1359 match = True
1362 break
1360 break
1363 if not match:
1361 if not match:
1364 dirlst.append(dirstat)
1362 dirlst.append(dirstat)
1365 return not match
1363 return not match
1366 else:
1364 else:
1367 followsym = False
1365 followsym = False
1368
1366
1369 if (seen_dirs is None) and followsym:
1367 if (seen_dirs is None) and followsym:
1370 seen_dirs = []
1368 seen_dirs = []
1371 _add_dir_if_not_there(seen_dirs, path)
1369 _add_dir_if_not_there(seen_dirs, path)
1372 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1370 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1373 if '.hg' in dirs:
1371 if '.hg' in dirs:
1374 yield root # found a repository
1372 yield root # found a repository
1375 qroot = os.path.join(root, '.hg', 'patches')
1373 qroot = os.path.join(root, '.hg', 'patches')
1376 if os.path.isdir(os.path.join(qroot, '.hg')):
1374 if os.path.isdir(os.path.join(qroot, '.hg')):
1377 yield qroot # we have a patch queue repo here
1375 yield qroot # we have a patch queue repo here
1378 if recurse:
1376 if recurse:
1379 # avoid recursing inside the .hg directory
1377 # avoid recursing inside the .hg directory
1380 dirs.remove('.hg')
1378 dirs.remove('.hg')
1381 else:
1379 else:
1382 dirs[:] = [] # don't descend further
1380 dirs[:] = [] # don't descend further
1383 elif followsym:
1381 elif followsym:
1384 newdirs = []
1382 newdirs = []
1385 for d in dirs:
1383 for d in dirs:
1386 fname = os.path.join(root, d)
1384 fname = os.path.join(root, d)
1387 if _add_dir_if_not_there(seen_dirs, fname):
1385 if _add_dir_if_not_there(seen_dirs, fname):
1388 if os.path.islink(fname):
1386 if os.path.islink(fname):
1389 for hgname in walkrepos(fname, True, seen_dirs):
1387 for hgname in walkrepos(fname, True, seen_dirs):
1390 yield hgname
1388 yield hgname
1391 else:
1389 else:
1392 newdirs.append(d)
1390 newdirs.append(d)
1393 dirs[:] = newdirs
1391 dirs[:] = newdirs
1394
1392
1395 _rcpath = None
1393 _rcpath = None
1396
1394
1397 def os_rcpath():
1395 def os_rcpath():
1398 '''return default os-specific hgrc search path'''
1396 '''return default os-specific hgrc search path'''
1399 path = system_rcpath()
1397 path = system_rcpath()
1400 path.extend(user_rcpath())
1398 path.extend(user_rcpath())
1401 path = [os.path.normpath(f) for f in path]
1399 path = [os.path.normpath(f) for f in path]
1402 return path
1400 return path
1403
1401
1404 def rcpath():
1402 def rcpath():
1405 '''return hgrc search path. if env var HGRCPATH is set, use it.
1403 '''return hgrc search path. if env var HGRCPATH is set, use it.
1406 for each item in path, if directory, use files ending in .rc,
1404 for each item in path, if directory, use files ending in .rc,
1407 else use item.
1405 else use item.
1408 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1406 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1409 if no HGRCPATH, use default os-specific path.'''
1407 if no HGRCPATH, use default os-specific path.'''
1410 global _rcpath
1408 global _rcpath
1411 if _rcpath is None:
1409 if _rcpath is None:
1412 if 'HGRCPATH' in os.environ:
1410 if 'HGRCPATH' in os.environ:
1413 _rcpath = []
1411 _rcpath = []
1414 for p in os.environ['HGRCPATH'].split(os.pathsep):
1412 for p in os.environ['HGRCPATH'].split(os.pathsep):
1415 if not p: continue
1413 if not p: continue
1416 if os.path.isdir(p):
1414 if os.path.isdir(p):
1417 for f, kind in osutil.listdir(p):
1415 for f, kind in osutil.listdir(p):
1418 if f.endswith('.rc'):
1416 if f.endswith('.rc'):
1419 _rcpath.append(os.path.join(p, f))
1417 _rcpath.append(os.path.join(p, f))
1420 else:
1418 else:
1421 _rcpath.append(p)
1419 _rcpath.append(p)
1422 else:
1420 else:
1423 _rcpath = os_rcpath()
1421 _rcpath = os_rcpath()
1424 return _rcpath
1422 return _rcpath
1425
1423
1426 def bytecount(nbytes):
1424 def bytecount(nbytes):
1427 '''return byte count formatted as readable string, with units'''
1425 '''return byte count formatted as readable string, with units'''
1428
1426
1429 units = (
1427 units = (
1430 (100, 1<<30, _('%.0f GB')),
1428 (100, 1<<30, _('%.0f GB')),
1431 (10, 1<<30, _('%.1f GB')),
1429 (10, 1<<30, _('%.1f GB')),
1432 (1, 1<<30, _('%.2f GB')),
1430 (1, 1<<30, _('%.2f GB')),
1433 (100, 1<<20, _('%.0f MB')),
1431 (100, 1<<20, _('%.0f MB')),
1434 (10, 1<<20, _('%.1f MB')),
1432 (10, 1<<20, _('%.1f MB')),
1435 (1, 1<<20, _('%.2f MB')),
1433 (1, 1<<20, _('%.2f MB')),
1436 (100, 1<<10, _('%.0f KB')),
1434 (100, 1<<10, _('%.0f KB')),
1437 (10, 1<<10, _('%.1f KB')),
1435 (10, 1<<10, _('%.1f KB')),
1438 (1, 1<<10, _('%.2f KB')),
1436 (1, 1<<10, _('%.2f KB')),
1439 (1, 1, _('%.0f bytes')),
1437 (1, 1, _('%.0f bytes')),
1440 )
1438 )
1441
1439
1442 for multiplier, divisor, format in units:
1440 for multiplier, divisor, format in units:
1443 if nbytes >= divisor * multiplier:
1441 if nbytes >= divisor * multiplier:
1444 return format % (nbytes / float(divisor))
1442 return format % (nbytes / float(divisor))
1445 return units[-1][2] % nbytes
1443 return units[-1][2] % nbytes
1446
1444
1447 def drop_scheme(scheme, path):
1445 def drop_scheme(scheme, path):
1448 sc = scheme + ':'
1446 sc = scheme + ':'
1449 if path.startswith(sc):
1447 if path.startswith(sc):
1450 path = path[len(sc):]
1448 path = path[len(sc):]
1451 if path.startswith('//'):
1449 if path.startswith('//'):
1452 path = path[2:]
1450 path = path[2:]
1453 return path
1451 return path
1454
1452
1455 def uirepr(s):
1453 def uirepr(s):
1456 # Avoid double backslash in Windows path repr()
1454 # Avoid double backslash in Windows path repr()
1457 return repr(s).replace('\\\\', '\\')
1455 return repr(s).replace('\\\\', '\\')
1458
1456
1459 def termwidth():
1457 def termwidth():
1460 if 'COLUMNS' in os.environ:
1458 if 'COLUMNS' in os.environ:
1461 try:
1459 try:
1462 return int(os.environ['COLUMNS'])
1460 return int(os.environ['COLUMNS'])
1463 except ValueError:
1461 except ValueError:
1464 pass
1462 pass
1465 try:
1463 try:
1466 import termios, array, fcntl
1464 import termios, array, fcntl
1467 for dev in (sys.stdout, sys.stdin):
1465 for dev in (sys.stdout, sys.stdin):
1468 try:
1466 try:
1469 fd = dev.fileno()
1467 fd = dev.fileno()
1470 if not os.isatty(fd):
1468 if not os.isatty(fd):
1471 continue
1469 continue
1472 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1470 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1473 return array.array('h', arri)[1]
1471 return array.array('h', arri)[1]
1474 except ValueError:
1472 except ValueError:
1475 pass
1473 pass
1476 except ImportError:
1474 except ImportError:
1477 pass
1475 pass
1478 return 80
1476 return 80
1479
1477
1480 def iterlines(iterator):
1478 def iterlines(iterator):
1481 for chunk in iterator:
1479 for chunk in iterator:
1482 for line in chunk.splitlines():
1480 for line in chunk.splitlines():
1483 yield line
1481 yield line
General Comments 0
You need to be logged in to leave comments. Login now