##// END OF EJS Templates
util: stop overwriting sha1, overwrite _fastsha1 instead...
Martin Geisler -
r8297:7f27e69d default
parent child Browse files
Show More
@@ -1,1471 +1,1476 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
18 import os, stat, threading, time, calendar, glob, osutil, random
18 import os, stat, threading, time, calendar, glob, osutil, random
19 import imp
19 import imp
20
20
21 # Python compatibility
21 # Python compatibility
22
22
23 def sha1(s):
23 def sha1(s):
24 return _fastsha1(s)
25
26 def _fastsha1(s):
27 # This function will import sha1 from hashlib or sha (whichever is
28 # available) and overwrite itself with it on the first call.
29 # Subsequent calls will go directly to the imported function.
24 try:
30 try:
25 import hashlib
31 from hashlib import sha1 as _sha1
26 _sha1 = hashlib.sha1
27 except ImportError:
32 except ImportError:
28 from sha import sha as _sha1
33 from sha import sha as _sha1
29 global sha1
34 global _fastsha1
30 sha1 = _sha1
35 _fastsha1 = _sha1
31 return _sha1(s)
36 return _sha1(s)
32
37
33 import subprocess
38 import subprocess
34 closefds = os.name == 'posix'
39 closefds = os.name == 'posix'
35 def popen2(cmd, mode='t', bufsize=-1):
40 def popen2(cmd, mode='t', bufsize=-1):
36 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
41 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
37 close_fds=closefds,
42 close_fds=closefds,
38 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
39 return p.stdin, p.stdout
44 return p.stdin, p.stdout
40 def popen3(cmd, mode='t', bufsize=-1):
45 def popen3(cmd, mode='t', bufsize=-1):
41 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
46 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
42 close_fds=closefds,
47 close_fds=closefds,
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
48 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
44 stderr=subprocess.PIPE)
49 stderr=subprocess.PIPE)
45 return p.stdin, p.stdout, p.stderr
50 return p.stdin, p.stdout, p.stderr
46 def Popen3(cmd, capturestderr=False, bufsize=-1):
51 def Popen3(cmd, capturestderr=False, bufsize=-1):
47 stderr = capturestderr and subprocess.PIPE or None
52 stderr = capturestderr and subprocess.PIPE or None
48 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
53 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
49 close_fds=closefds,
54 close_fds=closefds,
50 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
51 stderr=stderr)
56 stderr=stderr)
52 p.fromchild = p.stdout
57 p.fromchild = p.stdout
53 p.tochild = p.stdin
58 p.tochild = p.stdin
54 p.childerr = p.stderr
59 p.childerr = p.stderr
55 return p
60 return p
56
61
57 def version():
62 def version():
58 """Return version information if available."""
63 """Return version information if available."""
59 try:
64 try:
60 import __version__
65 import __version__
61 return __version__.version
66 return __version__.version
62 except ImportError:
67 except ImportError:
63 return 'unknown'
68 return 'unknown'
64
69
65 # used by parsedate
70 # used by parsedate
66 defaultdateformats = (
71 defaultdateformats = (
67 '%Y-%m-%d %H:%M:%S',
72 '%Y-%m-%d %H:%M:%S',
68 '%Y-%m-%d %I:%M:%S%p',
73 '%Y-%m-%d %I:%M:%S%p',
69 '%Y-%m-%d %H:%M',
74 '%Y-%m-%d %H:%M',
70 '%Y-%m-%d %I:%M%p',
75 '%Y-%m-%d %I:%M%p',
71 '%Y-%m-%d',
76 '%Y-%m-%d',
72 '%m-%d',
77 '%m-%d',
73 '%m/%d',
78 '%m/%d',
74 '%m/%d/%y',
79 '%m/%d/%y',
75 '%m/%d/%Y',
80 '%m/%d/%Y',
76 '%a %b %d %H:%M:%S %Y',
81 '%a %b %d %H:%M:%S %Y',
77 '%a %b %d %I:%M:%S%p %Y',
82 '%a %b %d %I:%M:%S%p %Y',
78 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
83 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
79 '%b %d %H:%M:%S %Y',
84 '%b %d %H:%M:%S %Y',
80 '%b %d %I:%M:%S%p %Y',
85 '%b %d %I:%M:%S%p %Y',
81 '%b %d %H:%M:%S',
86 '%b %d %H:%M:%S',
82 '%b %d %I:%M:%S%p',
87 '%b %d %I:%M:%S%p',
83 '%b %d %H:%M',
88 '%b %d %H:%M',
84 '%b %d %I:%M%p',
89 '%b %d %I:%M%p',
85 '%b %d %Y',
90 '%b %d %Y',
86 '%b %d',
91 '%b %d',
87 '%H:%M:%S',
92 '%H:%M:%S',
88 '%I:%M:%SP',
93 '%I:%M:%SP',
89 '%H:%M',
94 '%H:%M',
90 '%I:%M%p',
95 '%I:%M%p',
91 )
96 )
92
97
93 extendeddateformats = defaultdateformats + (
98 extendeddateformats = defaultdateformats + (
94 "%Y",
99 "%Y",
95 "%Y-%m",
100 "%Y-%m",
96 "%b",
101 "%b",
97 "%b %Y",
102 "%b %Y",
98 )
103 )
99
104
100 def cachefunc(func):
105 def cachefunc(func):
101 '''cache the result of function calls'''
106 '''cache the result of function calls'''
102 # XXX doesn't handle keywords args
107 # XXX doesn't handle keywords args
103 cache = {}
108 cache = {}
104 if func.func_code.co_argcount == 1:
109 if func.func_code.co_argcount == 1:
105 # we gain a small amount of time because
110 # we gain a small amount of time because
106 # we don't need to pack/unpack the list
111 # we don't need to pack/unpack the list
107 def f(arg):
112 def f(arg):
108 if arg not in cache:
113 if arg not in cache:
109 cache[arg] = func(arg)
114 cache[arg] = func(arg)
110 return cache[arg]
115 return cache[arg]
111 else:
116 else:
112 def f(*args):
117 def f(*args):
113 if args not in cache:
118 if args not in cache:
114 cache[args] = func(*args)
119 cache[args] = func(*args)
115 return cache[args]
120 return cache[args]
116
121
117 return f
122 return f
118
123
119 class propertycache(object):
124 class propertycache(object):
120 def __init__(self, func):
125 def __init__(self, func):
121 self.func = func
126 self.func = func
122 self.name = func.__name__
127 self.name = func.__name__
123 def __get__(self, obj, type=None):
128 def __get__(self, obj, type=None):
124 result = self.func(obj)
129 result = self.func(obj)
125 setattr(obj, self.name, result)
130 setattr(obj, self.name, result)
126 return result
131 return result
127
132
128 def pipefilter(s, cmd):
133 def pipefilter(s, cmd):
129 '''filter string S through command CMD, returning its output'''
134 '''filter string S through command CMD, returning its output'''
130 (pin, pout) = popen2(cmd, 'b')
135 (pin, pout) = popen2(cmd, 'b')
131 def writer():
136 def writer():
132 try:
137 try:
133 pin.write(s)
138 pin.write(s)
134 pin.close()
139 pin.close()
135 except IOError, inst:
140 except IOError, inst:
136 if inst.errno != errno.EPIPE:
141 if inst.errno != errno.EPIPE:
137 raise
142 raise
138
143
139 # we should use select instead on UNIX, but this will work on most
144 # we should use select instead on UNIX, but this will work on most
140 # systems, including Windows
145 # systems, including Windows
141 w = threading.Thread(target=writer)
146 w = threading.Thread(target=writer)
142 w.start()
147 w.start()
143 f = pout.read()
148 f = pout.read()
144 pout.close()
149 pout.close()
145 w.join()
150 w.join()
146 return f
151 return f
147
152
148 def tempfilter(s, cmd):
153 def tempfilter(s, cmd):
149 '''filter string S through a pair of temporary files with CMD.
154 '''filter string S through a pair of temporary files with CMD.
150 CMD is used as a template to create the real command to be run,
155 CMD is used as a template to create the real command to be run,
151 with the strings INFILE and OUTFILE replaced by the real names of
156 with the strings INFILE and OUTFILE replaced by the real names of
152 the temporary files generated.'''
157 the temporary files generated.'''
153 inname, outname = None, None
158 inname, outname = None, None
154 try:
159 try:
155 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
160 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
156 fp = os.fdopen(infd, 'wb')
161 fp = os.fdopen(infd, 'wb')
157 fp.write(s)
162 fp.write(s)
158 fp.close()
163 fp.close()
159 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
164 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
160 os.close(outfd)
165 os.close(outfd)
161 cmd = cmd.replace('INFILE', inname)
166 cmd = cmd.replace('INFILE', inname)
162 cmd = cmd.replace('OUTFILE', outname)
167 cmd = cmd.replace('OUTFILE', outname)
163 code = os.system(cmd)
168 code = os.system(cmd)
164 if sys.platform == 'OpenVMS' and code & 1:
169 if sys.platform == 'OpenVMS' and code & 1:
165 code = 0
170 code = 0
166 if code: raise Abort(_("command '%s' failed: %s") %
171 if code: raise Abort(_("command '%s' failed: %s") %
167 (cmd, explain_exit(code)))
172 (cmd, explain_exit(code)))
168 return open(outname, 'rb').read()
173 return open(outname, 'rb').read()
169 finally:
174 finally:
170 try:
175 try:
171 if inname: os.unlink(inname)
176 if inname: os.unlink(inname)
172 except: pass
177 except: pass
173 try:
178 try:
174 if outname: os.unlink(outname)
179 if outname: os.unlink(outname)
175 except: pass
180 except: pass
176
181
177 filtertable = {
182 filtertable = {
178 'tempfile:': tempfilter,
183 'tempfile:': tempfilter,
179 'pipe:': pipefilter,
184 'pipe:': pipefilter,
180 }
185 }
181
186
182 def filter(s, cmd):
187 def filter(s, cmd):
183 "filter a string through a command that transforms its input to its output"
188 "filter a string through a command that transforms its input to its output"
184 for name, fn in filtertable.iteritems():
189 for name, fn in filtertable.iteritems():
185 if cmd.startswith(name):
190 if cmd.startswith(name):
186 return fn(s, cmd[len(name):].lstrip())
191 return fn(s, cmd[len(name):].lstrip())
187 return pipefilter(s, cmd)
192 return pipefilter(s, cmd)
188
193
189 def binary(s):
194 def binary(s):
190 """return true if a string is binary data"""
195 """return true if a string is binary data"""
191 return bool(s and '\0' in s)
196 return bool(s and '\0' in s)
192
197
193 def increasingchunks(source, min=1024, max=65536):
198 def increasingchunks(source, min=1024, max=65536):
194 '''return no less than min bytes per chunk while data remains,
199 '''return no less than min bytes per chunk while data remains,
195 doubling min after each chunk until it reaches max'''
200 doubling min after each chunk until it reaches max'''
196 def log2(x):
201 def log2(x):
197 if not x:
202 if not x:
198 return 0
203 return 0
199 i = 0
204 i = 0
200 while x:
205 while x:
201 x >>= 1
206 x >>= 1
202 i += 1
207 i += 1
203 return i - 1
208 return i - 1
204
209
205 buf = []
210 buf = []
206 blen = 0
211 blen = 0
207 for chunk in source:
212 for chunk in source:
208 buf.append(chunk)
213 buf.append(chunk)
209 blen += len(chunk)
214 blen += len(chunk)
210 if blen >= min:
215 if blen >= min:
211 if min < max:
216 if min < max:
212 min = min << 1
217 min = min << 1
213 nmin = 1 << log2(blen)
218 nmin = 1 << log2(blen)
214 if nmin > min:
219 if nmin > min:
215 min = nmin
220 min = nmin
216 if min > max:
221 if min > max:
217 min = max
222 min = max
218 yield ''.join(buf)
223 yield ''.join(buf)
219 blen = 0
224 blen = 0
220 buf = []
225 buf = []
221 if buf:
226 if buf:
222 yield ''.join(buf)
227 yield ''.join(buf)
223
228
224 Abort = error.Abort
229 Abort = error.Abort
225
230
226 def always(fn): return True
231 def always(fn): return True
227 def never(fn): return False
232 def never(fn): return False
228
233
229 def patkind(name, default):
234 def patkind(name, default):
230 """Split a string into an optional pattern kind prefix and the
235 """Split a string into an optional pattern kind prefix and the
231 actual pattern."""
236 actual pattern."""
232 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
237 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
233 if name.startswith(prefix + ':'): return name.split(':', 1)
238 if name.startswith(prefix + ':'): return name.split(':', 1)
234 return default, name
239 return default, name
235
240
236 def globre(pat, head='^', tail='$'):
241 def globre(pat, head='^', tail='$'):
237 "convert a glob pattern into a regexp"
242 "convert a glob pattern into a regexp"
238 i, n = 0, len(pat)
243 i, n = 0, len(pat)
239 res = ''
244 res = ''
240 group = 0
245 group = 0
241 def peek(): return i < n and pat[i]
246 def peek(): return i < n and pat[i]
242 while i < n:
247 while i < n:
243 c = pat[i]
248 c = pat[i]
244 i = i+1
249 i = i+1
245 if c == '*':
250 if c == '*':
246 if peek() == '*':
251 if peek() == '*':
247 i += 1
252 i += 1
248 res += '.*'
253 res += '.*'
249 else:
254 else:
250 res += '[^/]*'
255 res += '[^/]*'
251 elif c == '?':
256 elif c == '?':
252 res += '.'
257 res += '.'
253 elif c == '[':
258 elif c == '[':
254 j = i
259 j = i
255 if j < n and pat[j] in '!]':
260 if j < n and pat[j] in '!]':
256 j += 1
261 j += 1
257 while j < n and pat[j] != ']':
262 while j < n and pat[j] != ']':
258 j += 1
263 j += 1
259 if j >= n:
264 if j >= n:
260 res += '\\['
265 res += '\\['
261 else:
266 else:
262 stuff = pat[i:j].replace('\\','\\\\')
267 stuff = pat[i:j].replace('\\','\\\\')
263 i = j + 1
268 i = j + 1
264 if stuff[0] == '!':
269 if stuff[0] == '!':
265 stuff = '^' + stuff[1:]
270 stuff = '^' + stuff[1:]
266 elif stuff[0] == '^':
271 elif stuff[0] == '^':
267 stuff = '\\' + stuff
272 stuff = '\\' + stuff
268 res = '%s[%s]' % (res, stuff)
273 res = '%s[%s]' % (res, stuff)
269 elif c == '{':
274 elif c == '{':
270 group += 1
275 group += 1
271 res += '(?:'
276 res += '(?:'
272 elif c == '}' and group:
277 elif c == '}' and group:
273 res += ')'
278 res += ')'
274 group -= 1
279 group -= 1
275 elif c == ',' and group:
280 elif c == ',' and group:
276 res += '|'
281 res += '|'
277 elif c == '\\':
282 elif c == '\\':
278 p = peek()
283 p = peek()
279 if p:
284 if p:
280 i += 1
285 i += 1
281 res += re.escape(p)
286 res += re.escape(p)
282 else:
287 else:
283 res += re.escape(c)
288 res += re.escape(c)
284 else:
289 else:
285 res += re.escape(c)
290 res += re.escape(c)
286 return head + res + tail
291 return head + res + tail
287
292
288 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
293 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
289
294
290 def pathto(root, n1, n2):
295 def pathto(root, n1, n2):
291 '''return the relative path from one place to another.
296 '''return the relative path from one place to another.
292 root should use os.sep to separate directories
297 root should use os.sep to separate directories
293 n1 should use os.sep to separate directories
298 n1 should use os.sep to separate directories
294 n2 should use "/" to separate directories
299 n2 should use "/" to separate directories
295 returns an os.sep-separated path.
300 returns an os.sep-separated path.
296
301
297 If n1 is a relative path, it's assumed it's
302 If n1 is a relative path, it's assumed it's
298 relative to root.
303 relative to root.
299 n2 should always be relative to root.
304 n2 should always be relative to root.
300 '''
305 '''
301 if not n1: return localpath(n2)
306 if not n1: return localpath(n2)
302 if os.path.isabs(n1):
307 if os.path.isabs(n1):
303 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
308 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
304 return os.path.join(root, localpath(n2))
309 return os.path.join(root, localpath(n2))
305 n2 = '/'.join((pconvert(root), n2))
310 n2 = '/'.join((pconvert(root), n2))
306 a, b = splitpath(n1), n2.split('/')
311 a, b = splitpath(n1), n2.split('/')
307 a.reverse()
312 a.reverse()
308 b.reverse()
313 b.reverse()
309 while a and b and a[-1] == b[-1]:
314 while a and b and a[-1] == b[-1]:
310 a.pop()
315 a.pop()
311 b.pop()
316 b.pop()
312 b.reverse()
317 b.reverse()
313 return os.sep.join((['..'] * len(a)) + b) or '.'
318 return os.sep.join((['..'] * len(a)) + b) or '.'
314
319
315 def canonpath(root, cwd, myname):
320 def canonpath(root, cwd, myname):
316 """return the canonical path of myname, given cwd and root"""
321 """return the canonical path of myname, given cwd and root"""
317 if root == os.sep:
322 if root == os.sep:
318 rootsep = os.sep
323 rootsep = os.sep
319 elif endswithsep(root):
324 elif endswithsep(root):
320 rootsep = root
325 rootsep = root
321 else:
326 else:
322 rootsep = root + os.sep
327 rootsep = root + os.sep
323 name = myname
328 name = myname
324 if not os.path.isabs(name):
329 if not os.path.isabs(name):
325 name = os.path.join(root, cwd, name)
330 name = os.path.join(root, cwd, name)
326 name = os.path.normpath(name)
331 name = os.path.normpath(name)
327 audit_path = path_auditor(root)
332 audit_path = path_auditor(root)
328 if name != rootsep and name.startswith(rootsep):
333 if name != rootsep and name.startswith(rootsep):
329 name = name[len(rootsep):]
334 name = name[len(rootsep):]
330 audit_path(name)
335 audit_path(name)
331 return pconvert(name)
336 return pconvert(name)
332 elif name == root:
337 elif name == root:
333 return ''
338 return ''
334 else:
339 else:
335 # Determine whether `name' is in the hierarchy at or beneath `root',
340 # Determine whether `name' is in the hierarchy at or beneath `root',
336 # by iterating name=dirname(name) until that causes no change (can't
341 # by iterating name=dirname(name) until that causes no change (can't
337 # check name == '/', because that doesn't work on windows). For each
342 # check name == '/', because that doesn't work on windows). For each
338 # `name', compare dev/inode numbers. If they match, the list `rel'
343 # `name', compare dev/inode numbers. If they match, the list `rel'
339 # holds the reversed list of components making up the relative file
344 # holds the reversed list of components making up the relative file
340 # name we want.
345 # name we want.
341 root_st = os.stat(root)
346 root_st = os.stat(root)
342 rel = []
347 rel = []
343 while True:
348 while True:
344 try:
349 try:
345 name_st = os.stat(name)
350 name_st = os.stat(name)
346 except OSError:
351 except OSError:
347 break
352 break
348 if samestat(name_st, root_st):
353 if samestat(name_st, root_st):
349 if not rel:
354 if not rel:
350 # name was actually the same as root (maybe a symlink)
355 # name was actually the same as root (maybe a symlink)
351 return ''
356 return ''
352 rel.reverse()
357 rel.reverse()
353 name = os.path.join(*rel)
358 name = os.path.join(*rel)
354 audit_path(name)
359 audit_path(name)
355 return pconvert(name)
360 return pconvert(name)
356 dirname, basename = os.path.split(name)
361 dirname, basename = os.path.split(name)
357 rel.append(basename)
362 rel.append(basename)
358 if dirname == name:
363 if dirname == name:
359 break
364 break
360 name = dirname
365 name = dirname
361
366
362 raise Abort('%s not under root' % myname)
367 raise Abort('%s not under root' % myname)
363
368
364 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
369 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
365 """build a function to match a set of file patterns
370 """build a function to match a set of file patterns
366
371
367 arguments:
372 arguments:
368 canonroot - the canonical root of the tree you're matching against
373 canonroot - the canonical root of the tree you're matching against
369 cwd - the current working directory, if relevant
374 cwd - the current working directory, if relevant
370 names - patterns to find
375 names - patterns to find
371 inc - patterns to include
376 inc - patterns to include
372 exc - patterns to exclude
377 exc - patterns to exclude
373 dflt_pat - if a pattern in names has no explicit type, assume this one
378 dflt_pat - if a pattern in names has no explicit type, assume this one
374 src - where these patterns came from (e.g. .hgignore)
379 src - where these patterns came from (e.g. .hgignore)
375
380
376 a pattern is one of:
381 a pattern is one of:
377 'glob:<glob>' - a glob relative to cwd
382 'glob:<glob>' - a glob relative to cwd
378 're:<regexp>' - a regular expression
383 're:<regexp>' - a regular expression
379 'path:<path>' - a path relative to canonroot
384 'path:<path>' - a path relative to canonroot
380 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
385 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
381 'relpath:<path>' - a path relative to cwd
386 'relpath:<path>' - a path relative to cwd
382 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
387 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
383 '<something>' - one of the cases above, selected by the dflt_pat argument
388 '<something>' - one of the cases above, selected by the dflt_pat argument
384
389
385 returns:
390 returns:
386 a 3-tuple containing
391 a 3-tuple containing
387 - list of roots (places where one should start a recursive walk of the fs);
392 - list of roots (places where one should start a recursive walk of the fs);
388 this often matches the explicit non-pattern names passed in, but also
393 this often matches the explicit non-pattern names passed in, but also
389 includes the initial part of glob: patterns that has no glob characters
394 includes the initial part of glob: patterns that has no glob characters
390 - a bool match(filename) function
395 - a bool match(filename) function
391 - a bool indicating if any patterns were passed in
396 - a bool indicating if any patterns were passed in
392 """
397 """
393
398
394 # a common case: no patterns at all
399 # a common case: no patterns at all
395 if not names and not inc and not exc:
400 if not names and not inc and not exc:
396 return [], always, False
401 return [], always, False
397
402
398 def contains_glob(name):
403 def contains_glob(name):
399 for c in name:
404 for c in name:
400 if c in _globchars: return True
405 if c in _globchars: return True
401 return False
406 return False
402
407
403 def regex(kind, name, tail):
408 def regex(kind, name, tail):
404 '''convert a pattern into a regular expression'''
409 '''convert a pattern into a regular expression'''
405 if not name:
410 if not name:
406 return ''
411 return ''
407 if kind == 're':
412 if kind == 're':
408 return name
413 return name
409 elif kind == 'path':
414 elif kind == 'path':
410 return '^' + re.escape(name) + '(?:/|$)'
415 return '^' + re.escape(name) + '(?:/|$)'
411 elif kind == 'relglob':
416 elif kind == 'relglob':
412 return globre(name, '(?:|.*/)', tail)
417 return globre(name, '(?:|.*/)', tail)
413 elif kind == 'relpath':
418 elif kind == 'relpath':
414 return re.escape(name) + '(?:/|$)'
419 return re.escape(name) + '(?:/|$)'
415 elif kind == 'relre':
420 elif kind == 'relre':
416 if name.startswith('^'):
421 if name.startswith('^'):
417 return name
422 return name
418 return '.*' + name
423 return '.*' + name
419 return globre(name, '', tail)
424 return globre(name, '', tail)
420
425
421 def matchfn(pats, tail):
426 def matchfn(pats, tail):
422 """build a matching function from a set of patterns"""
427 """build a matching function from a set of patterns"""
423 if not pats:
428 if not pats:
424 return
429 return
425 try:
430 try:
426 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
431 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
427 if len(pat) > 20000:
432 if len(pat) > 20000:
428 raise OverflowError()
433 raise OverflowError()
429 return re.compile(pat).match
434 return re.compile(pat).match
430 except OverflowError:
435 except OverflowError:
431 # We're using a Python with a tiny regex engine and we
436 # We're using a Python with a tiny regex engine and we
432 # made it explode, so we'll divide the pattern list in two
437 # made it explode, so we'll divide the pattern list in two
433 # until it works
438 # until it works
434 l = len(pats)
439 l = len(pats)
435 if l < 2:
440 if l < 2:
436 raise
441 raise
437 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
442 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
438 return lambda s: a(s) or b(s)
443 return lambda s: a(s) or b(s)
439 except re.error:
444 except re.error:
440 for k, p in pats:
445 for k, p in pats:
441 try:
446 try:
442 re.compile('(?:%s)' % regex(k, p, tail))
447 re.compile('(?:%s)' % regex(k, p, tail))
443 except re.error:
448 except re.error:
444 if src:
449 if src:
445 raise Abort("%s: invalid pattern (%s): %s" %
450 raise Abort("%s: invalid pattern (%s): %s" %
446 (src, k, p))
451 (src, k, p))
447 else:
452 else:
448 raise Abort("invalid pattern (%s): %s" % (k, p))
453 raise Abort("invalid pattern (%s): %s" % (k, p))
449 raise Abort("invalid pattern")
454 raise Abort("invalid pattern")
450
455
451 def globprefix(pat):
456 def globprefix(pat):
452 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
457 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
453 root = []
458 root = []
454 for p in pat.split('/'):
459 for p in pat.split('/'):
455 if contains_glob(p): break
460 if contains_glob(p): break
456 root.append(p)
461 root.append(p)
457 return '/'.join(root) or '.'
462 return '/'.join(root) or '.'
458
463
459 def normalizepats(names, default):
464 def normalizepats(names, default):
460 pats = []
465 pats = []
461 roots = []
466 roots = []
462 anypats = False
467 anypats = False
463 for kind, name in [patkind(p, default) for p in names]:
468 for kind, name in [patkind(p, default) for p in names]:
464 if kind in ('glob', 'relpath'):
469 if kind in ('glob', 'relpath'):
465 name = canonpath(canonroot, cwd, name)
470 name = canonpath(canonroot, cwd, name)
466 elif kind in ('relglob', 'path'):
471 elif kind in ('relglob', 'path'):
467 name = normpath(name)
472 name = normpath(name)
468
473
469 pats.append((kind, name))
474 pats.append((kind, name))
470
475
471 if kind in ('glob', 're', 'relglob', 'relre'):
476 if kind in ('glob', 're', 'relglob', 'relre'):
472 anypats = True
477 anypats = True
473
478
474 if kind == 'glob':
479 if kind == 'glob':
475 root = globprefix(name)
480 root = globprefix(name)
476 roots.append(root)
481 roots.append(root)
477 elif kind in ('relpath', 'path'):
482 elif kind in ('relpath', 'path'):
478 roots.append(name or '.')
483 roots.append(name or '.')
479 elif kind == 'relglob':
484 elif kind == 'relglob':
480 roots.append('.')
485 roots.append('.')
481 return roots, pats, anypats
486 return roots, pats, anypats
482
487
483 roots, pats, anypats = normalizepats(names, dflt_pat)
488 roots, pats, anypats = normalizepats(names, dflt_pat)
484
489
485 patmatch = matchfn(pats, '$') or always
490 patmatch = matchfn(pats, '$') or always
486 incmatch = always
491 incmatch = always
487 if inc:
492 if inc:
488 dummy, inckinds, dummy = normalizepats(inc, 'glob')
493 dummy, inckinds, dummy = normalizepats(inc, 'glob')
489 incmatch = matchfn(inckinds, '(?:/|$)')
494 incmatch = matchfn(inckinds, '(?:/|$)')
490 excmatch = never
495 excmatch = never
491 if exc:
496 if exc:
492 dummy, exckinds, dummy = normalizepats(exc, 'glob')
497 dummy, exckinds, dummy = normalizepats(exc, 'glob')
493 excmatch = matchfn(exckinds, '(?:/|$)')
498 excmatch = matchfn(exckinds, '(?:/|$)')
494
499
495 if not names and inc and not exc:
500 if not names and inc and not exc:
496 # common case: hgignore patterns
501 # common case: hgignore patterns
497 match = incmatch
502 match = incmatch
498 else:
503 else:
499 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
504 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
500
505
501 return (roots, match, (inc or exc or anypats) and True)
506 return (roots, match, (inc or exc or anypats) and True)
502
507
503 _hgexecutable = None
508 _hgexecutable = None
504
509
505 def main_is_frozen():
510 def main_is_frozen():
506 """return True if we are a frozen executable.
511 """return True if we are a frozen executable.
507
512
508 The code supports py2exe (most common, Windows only) and tools/freeze
513 The code supports py2exe (most common, Windows only) and tools/freeze
509 (portable, not much used).
514 (portable, not much used).
510 """
515 """
511 return (hasattr(sys, "frozen") or # new py2exe
516 return (hasattr(sys, "frozen") or # new py2exe
512 hasattr(sys, "importers") or # old py2exe
517 hasattr(sys, "importers") or # old py2exe
513 imp.is_frozen("__main__")) # tools/freeze
518 imp.is_frozen("__main__")) # tools/freeze
514
519
515 def hgexecutable():
520 def hgexecutable():
516 """return location of the 'hg' executable.
521 """return location of the 'hg' executable.
517
522
518 Defaults to $HG or 'hg' in the search path.
523 Defaults to $HG or 'hg' in the search path.
519 """
524 """
520 if _hgexecutable is None:
525 if _hgexecutable is None:
521 hg = os.environ.get('HG')
526 hg = os.environ.get('HG')
522 if hg:
527 if hg:
523 set_hgexecutable(hg)
528 set_hgexecutable(hg)
524 elif main_is_frozen():
529 elif main_is_frozen():
525 set_hgexecutable(sys.executable)
530 set_hgexecutable(sys.executable)
526 else:
531 else:
527 set_hgexecutable(find_exe('hg') or 'hg')
532 set_hgexecutable(find_exe('hg') or 'hg')
528 return _hgexecutable
533 return _hgexecutable
529
534
530 def set_hgexecutable(path):
535 def set_hgexecutable(path):
531 """set location of the 'hg' executable"""
536 """set location of the 'hg' executable"""
532 global _hgexecutable
537 global _hgexecutable
533 _hgexecutable = path
538 _hgexecutable = path
534
539
535 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
540 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
536 '''enhanced shell command execution.
541 '''enhanced shell command execution.
537 run with environment maybe modified, maybe in different dir.
542 run with environment maybe modified, maybe in different dir.
538
543
539 if command fails and onerr is None, return status. if ui object,
544 if command fails and onerr is None, return status. if ui object,
540 print error message and return status, else raise onerr object as
545 print error message and return status, else raise onerr object as
541 exception.'''
546 exception.'''
542 def py2shell(val):
547 def py2shell(val):
543 'convert python object into string that is useful to shell'
548 'convert python object into string that is useful to shell'
544 if val in (None, False):
549 if val in (None, False):
545 return '0'
550 return '0'
546 if val == True:
551 if val == True:
547 return '1'
552 return '1'
548 return str(val)
553 return str(val)
549 oldenv = {}
554 oldenv = {}
550 for k in environ:
555 for k in environ:
551 oldenv[k] = os.environ.get(k)
556 oldenv[k] = os.environ.get(k)
552 if cwd is not None:
557 if cwd is not None:
553 oldcwd = os.getcwd()
558 oldcwd = os.getcwd()
554 origcmd = cmd
559 origcmd = cmd
555 if os.name == 'nt':
560 if os.name == 'nt':
556 cmd = '"%s"' % cmd
561 cmd = '"%s"' % cmd
557 try:
562 try:
558 for k, v in environ.iteritems():
563 for k, v in environ.iteritems():
559 os.environ[k] = py2shell(v)
564 os.environ[k] = py2shell(v)
560 os.environ['HG'] = hgexecutable()
565 os.environ['HG'] = hgexecutable()
561 if cwd is not None and oldcwd != cwd:
566 if cwd is not None and oldcwd != cwd:
562 os.chdir(cwd)
567 os.chdir(cwd)
563 rc = os.system(cmd)
568 rc = os.system(cmd)
564 if sys.platform == 'OpenVMS' and rc & 1:
569 if sys.platform == 'OpenVMS' and rc & 1:
565 rc = 0
570 rc = 0
566 if rc and onerr:
571 if rc and onerr:
567 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
572 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
568 explain_exit(rc)[0])
573 explain_exit(rc)[0])
569 if errprefix:
574 if errprefix:
570 errmsg = '%s: %s' % (errprefix, errmsg)
575 errmsg = '%s: %s' % (errprefix, errmsg)
571 try:
576 try:
572 onerr.warn(errmsg + '\n')
577 onerr.warn(errmsg + '\n')
573 except AttributeError:
578 except AttributeError:
574 raise onerr(errmsg)
579 raise onerr(errmsg)
575 return rc
580 return rc
576 finally:
581 finally:
577 for k, v in oldenv.iteritems():
582 for k, v in oldenv.iteritems():
578 if v is None:
583 if v is None:
579 del os.environ[k]
584 del os.environ[k]
580 else:
585 else:
581 os.environ[k] = v
586 os.environ[k] = v
582 if cwd is not None and oldcwd != cwd:
587 if cwd is not None and oldcwd != cwd:
583 os.chdir(oldcwd)
588 os.chdir(oldcwd)
584
589
585 def checksignature(func):
590 def checksignature(func):
586 '''wrap a function with code to check for calling errors'''
591 '''wrap a function with code to check for calling errors'''
587 def check(*args, **kwargs):
592 def check(*args, **kwargs):
588 try:
593 try:
589 return func(*args, **kwargs)
594 return func(*args, **kwargs)
590 except TypeError:
595 except TypeError:
591 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
596 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
592 raise error.SignatureError
597 raise error.SignatureError
593 raise
598 raise
594
599
595 return check
600 return check
596
601
597 # os.path.lexists is not available on python2.3
602 # os.path.lexists is not available on python2.3
598 def lexists(filename):
603 def lexists(filename):
599 "test whether a file with this name exists. does not follow symlinks"
604 "test whether a file with this name exists. does not follow symlinks"
600 try:
605 try:
601 os.lstat(filename)
606 os.lstat(filename)
602 except:
607 except:
603 return False
608 return False
604 return True
609 return True
605
610
606 def rename(src, dst):
611 def rename(src, dst):
607 """forcibly rename a file"""
612 """forcibly rename a file"""
608 try:
613 try:
609 os.rename(src, dst)
614 os.rename(src, dst)
610 except OSError, err: # FIXME: check err (EEXIST ?)
615 except OSError, err: # FIXME: check err (EEXIST ?)
611
616
612 # On windows, rename to existing file is not allowed, so we
617 # On windows, rename to existing file is not allowed, so we
613 # must delete destination first. But if a file is open, unlink
618 # must delete destination first. But if a file is open, unlink
614 # schedules it for delete but does not delete it. Rename
619 # schedules it for delete but does not delete it. Rename
615 # happens immediately even for open files, so we rename
620 # happens immediately even for open files, so we rename
616 # destination to a temporary name, then delete that. Then
621 # destination to a temporary name, then delete that. Then
617 # rename is safe to do.
622 # rename is safe to do.
618 # The temporary name is chosen at random to avoid the situation
623 # The temporary name is chosen at random to avoid the situation
619 # where a file is left lying around from a previous aborted run.
624 # where a file is left lying around from a previous aborted run.
620 # The usual race condition this introduces can't be avoided as
625 # The usual race condition this introduces can't be avoided as
621 # we need the name to rename into, and not the file itself. Due
626 # we need the name to rename into, and not the file itself. Due
622 # to the nature of the operation however, any races will at worst
627 # to the nature of the operation however, any races will at worst
623 # lead to the rename failing and the current operation aborting.
628 # lead to the rename failing and the current operation aborting.
624
629
625 def tempname(prefix):
630 def tempname(prefix):
626 for tries in xrange(10):
631 for tries in xrange(10):
627 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
632 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
628 if not os.path.exists(temp):
633 if not os.path.exists(temp):
629 return temp
634 return temp
630 raise IOError, (errno.EEXIST, "No usable temporary filename found")
635 raise IOError, (errno.EEXIST, "No usable temporary filename found")
631
636
632 temp = tempname(dst)
637 temp = tempname(dst)
633 os.rename(dst, temp)
638 os.rename(dst, temp)
634 os.unlink(temp)
639 os.unlink(temp)
635 os.rename(src, dst)
640 os.rename(src, dst)
636
641
637 def unlink(f):
642 def unlink(f):
638 """unlink and remove the directory if it is empty"""
643 """unlink and remove the directory if it is empty"""
639 os.unlink(f)
644 os.unlink(f)
640 # try removing directories that might now be empty
645 # try removing directories that might now be empty
641 try:
646 try:
642 os.removedirs(os.path.dirname(f))
647 os.removedirs(os.path.dirname(f))
643 except OSError:
648 except OSError:
644 pass
649 pass
645
650
646 def copyfile(src, dest):
651 def copyfile(src, dest):
647 "copy a file, preserving mode and atime/mtime"
652 "copy a file, preserving mode and atime/mtime"
648 if os.path.islink(src):
653 if os.path.islink(src):
649 try:
654 try:
650 os.unlink(dest)
655 os.unlink(dest)
651 except:
656 except:
652 pass
657 pass
653 os.symlink(os.readlink(src), dest)
658 os.symlink(os.readlink(src), dest)
654 else:
659 else:
655 try:
660 try:
656 shutil.copyfile(src, dest)
661 shutil.copyfile(src, dest)
657 shutil.copystat(src, dest)
662 shutil.copystat(src, dest)
658 except shutil.Error, inst:
663 except shutil.Error, inst:
659 raise Abort(str(inst))
664 raise Abort(str(inst))
660
665
661 def copyfiles(src, dst, hardlink=None):
666 def copyfiles(src, dst, hardlink=None):
662 """Copy a directory tree using hardlinks if possible"""
667 """Copy a directory tree using hardlinks if possible"""
663
668
664 if hardlink is None:
669 if hardlink is None:
665 hardlink = (os.stat(src).st_dev ==
670 hardlink = (os.stat(src).st_dev ==
666 os.stat(os.path.dirname(dst)).st_dev)
671 os.stat(os.path.dirname(dst)).st_dev)
667
672
668 if os.path.isdir(src):
673 if os.path.isdir(src):
669 os.mkdir(dst)
674 os.mkdir(dst)
670 for name, kind in osutil.listdir(src):
675 for name, kind in osutil.listdir(src):
671 srcname = os.path.join(src, name)
676 srcname = os.path.join(src, name)
672 dstname = os.path.join(dst, name)
677 dstname = os.path.join(dst, name)
673 copyfiles(srcname, dstname, hardlink)
678 copyfiles(srcname, dstname, hardlink)
674 else:
679 else:
675 if hardlink:
680 if hardlink:
676 try:
681 try:
677 os_link(src, dst)
682 os_link(src, dst)
678 except (IOError, OSError):
683 except (IOError, OSError):
679 hardlink = False
684 hardlink = False
680 shutil.copy(src, dst)
685 shutil.copy(src, dst)
681 else:
686 else:
682 shutil.copy(src, dst)
687 shutil.copy(src, dst)
683
688
684 class path_auditor(object):
689 class path_auditor(object):
685 '''ensure that a filesystem path contains no banned components.
690 '''ensure that a filesystem path contains no banned components.
686 the following properties of a path are checked:
691 the following properties of a path are checked:
687
692
688 - under top-level .hg
693 - under top-level .hg
689 - starts at the root of a windows drive
694 - starts at the root of a windows drive
690 - contains ".."
695 - contains ".."
691 - traverses a symlink (e.g. a/symlink_here/b)
696 - traverses a symlink (e.g. a/symlink_here/b)
692 - inside a nested repository'''
697 - inside a nested repository'''
693
698
694 def __init__(self, root):
699 def __init__(self, root):
695 self.audited = set()
700 self.audited = set()
696 self.auditeddir = set()
701 self.auditeddir = set()
697 self.root = root
702 self.root = root
698
703
699 def __call__(self, path):
704 def __call__(self, path):
700 if path in self.audited:
705 if path in self.audited:
701 return
706 return
702 normpath = os.path.normcase(path)
707 normpath = os.path.normcase(path)
703 parts = splitpath(normpath)
708 parts = splitpath(normpath)
704 if (os.path.splitdrive(path)[0]
709 if (os.path.splitdrive(path)[0]
705 or parts[0].lower() in ('.hg', '.hg.', '')
710 or parts[0].lower() in ('.hg', '.hg.', '')
706 or os.pardir in parts):
711 or os.pardir in parts):
707 raise Abort(_("path contains illegal component: %s") % path)
712 raise Abort(_("path contains illegal component: %s") % path)
708 if '.hg' in path.lower():
713 if '.hg' in path.lower():
709 lparts = [p.lower() for p in parts]
714 lparts = [p.lower() for p in parts]
710 for p in '.hg', '.hg.':
715 for p in '.hg', '.hg.':
711 if p in lparts[1:]:
716 if p in lparts[1:]:
712 pos = lparts.index(p)
717 pos = lparts.index(p)
713 base = os.path.join(*parts[:pos])
718 base = os.path.join(*parts[:pos])
714 raise Abort(_('path %r is inside repo %r') % (path, base))
719 raise Abort(_('path %r is inside repo %r') % (path, base))
715 def check(prefix):
720 def check(prefix):
716 curpath = os.path.join(self.root, prefix)
721 curpath = os.path.join(self.root, prefix)
717 try:
722 try:
718 st = os.lstat(curpath)
723 st = os.lstat(curpath)
719 except OSError, err:
724 except OSError, err:
720 # EINVAL can be raised as invalid path syntax under win32.
725 # EINVAL can be raised as invalid path syntax under win32.
721 # They must be ignored for patterns can be checked too.
726 # They must be ignored for patterns can be checked too.
722 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
727 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
723 raise
728 raise
724 else:
729 else:
725 if stat.S_ISLNK(st.st_mode):
730 if stat.S_ISLNK(st.st_mode):
726 raise Abort(_('path %r traverses symbolic link %r') %
731 raise Abort(_('path %r traverses symbolic link %r') %
727 (path, prefix))
732 (path, prefix))
728 elif (stat.S_ISDIR(st.st_mode) and
733 elif (stat.S_ISDIR(st.st_mode) and
729 os.path.isdir(os.path.join(curpath, '.hg'))):
734 os.path.isdir(os.path.join(curpath, '.hg'))):
730 raise Abort(_('path %r is inside repo %r') %
735 raise Abort(_('path %r is inside repo %r') %
731 (path, prefix))
736 (path, prefix))
732 parts.pop()
737 parts.pop()
733 prefixes = []
738 prefixes = []
734 for n in range(len(parts)):
739 for n in range(len(parts)):
735 prefix = os.sep.join(parts)
740 prefix = os.sep.join(parts)
736 if prefix in self.auditeddir:
741 if prefix in self.auditeddir:
737 break
742 break
738 check(prefix)
743 check(prefix)
739 prefixes.append(prefix)
744 prefixes.append(prefix)
740 parts.pop()
745 parts.pop()
741
746
742 self.audited.add(path)
747 self.audited.add(path)
743 # only add prefixes to the cache after checking everything: we don't
748 # only add prefixes to the cache after checking everything: we don't
744 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
749 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
745 self.auditeddir.update(prefixes)
750 self.auditeddir.update(prefixes)
746
751
747 def nlinks(pathname):
752 def nlinks(pathname):
748 """Return number of hardlinks for the given file."""
753 """Return number of hardlinks for the given file."""
749 return os.lstat(pathname).st_nlink
754 return os.lstat(pathname).st_nlink
750
755
751 if hasattr(os, 'link'):
756 if hasattr(os, 'link'):
752 os_link = os.link
757 os_link = os.link
753 else:
758 else:
754 def os_link(src, dst):
759 def os_link(src, dst):
755 raise OSError(0, _("Hardlinks not supported"))
760 raise OSError(0, _("Hardlinks not supported"))
756
761
757 def lookup_reg(key, name=None, scope=None):
762 def lookup_reg(key, name=None, scope=None):
758 return None
763 return None
759
764
760 if os.name == 'nt':
765 if os.name == 'nt':
761 from windows import *
766 from windows import *
762 def expand_glob(pats):
767 def expand_glob(pats):
763 '''On Windows, expand the implicit globs in a list of patterns'''
768 '''On Windows, expand the implicit globs in a list of patterns'''
764 ret = []
769 ret = []
765 for p in pats:
770 for p in pats:
766 kind, name = patkind(p, None)
771 kind, name = patkind(p, None)
767 if kind is None:
772 if kind is None:
768 globbed = glob.glob(name)
773 globbed = glob.glob(name)
769 if globbed:
774 if globbed:
770 ret.extend(globbed)
775 ret.extend(globbed)
771 continue
776 continue
772 # if we couldn't expand the glob, just keep it around
777 # if we couldn't expand the glob, just keep it around
773 ret.append(p)
778 ret.append(p)
774 return ret
779 return ret
775 else:
780 else:
776 from posix import *
781 from posix import *
777
782
778 def makelock(info, pathname):
783 def makelock(info, pathname):
779 try:
784 try:
780 return os.symlink(info, pathname)
785 return os.symlink(info, pathname)
781 except OSError, why:
786 except OSError, why:
782 if why.errno == errno.EEXIST:
787 if why.errno == errno.EEXIST:
783 raise
788 raise
784 except AttributeError: # no symlink in os
789 except AttributeError: # no symlink in os
785 pass
790 pass
786
791
787 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
792 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
788 os.write(ld, info)
793 os.write(ld, info)
789 os.close(ld)
794 os.close(ld)
790
795
791 def readlock(pathname):
796 def readlock(pathname):
792 try:
797 try:
793 return os.readlink(pathname)
798 return os.readlink(pathname)
794 except OSError, why:
799 except OSError, why:
795 if why.errno not in (errno.EINVAL, errno.ENOSYS):
800 if why.errno not in (errno.EINVAL, errno.ENOSYS):
796 raise
801 raise
797 except AttributeError: # no symlink in os
802 except AttributeError: # no symlink in os
798 pass
803 pass
799 return posixfile(pathname).read()
804 return posixfile(pathname).read()
800
805
801 def fstat(fp):
806 def fstat(fp):
802 '''stat file object that may not have fileno method.'''
807 '''stat file object that may not have fileno method.'''
803 try:
808 try:
804 return os.fstat(fp.fileno())
809 return os.fstat(fp.fileno())
805 except AttributeError:
810 except AttributeError:
806 return os.stat(fp.name)
811 return os.stat(fp.name)
807
812
808 # File system features
813 # File system features
809
814
810 def checkcase(path):
815 def checkcase(path):
811 """
816 """
812 Check whether the given path is on a case-sensitive filesystem
817 Check whether the given path is on a case-sensitive filesystem
813
818
814 Requires a path (like /foo/.hg) ending with a foldable final
819 Requires a path (like /foo/.hg) ending with a foldable final
815 directory component.
820 directory component.
816 """
821 """
817 s1 = os.stat(path)
822 s1 = os.stat(path)
818 d, b = os.path.split(path)
823 d, b = os.path.split(path)
819 p2 = os.path.join(d, b.upper())
824 p2 = os.path.join(d, b.upper())
820 if path == p2:
825 if path == p2:
821 p2 = os.path.join(d, b.lower())
826 p2 = os.path.join(d, b.lower())
822 try:
827 try:
823 s2 = os.stat(p2)
828 s2 = os.stat(p2)
824 if s2 == s1:
829 if s2 == s1:
825 return False
830 return False
826 return True
831 return True
827 except:
832 except:
828 return True
833 return True
829
834
830 _fspathcache = {}
835 _fspathcache = {}
831 def fspath(name, root):
836 def fspath(name, root):
832 '''Get name in the case stored in the filesystem
837 '''Get name in the case stored in the filesystem
833
838
834 The name is either relative to root, or it is an absolute path starting
839 The name is either relative to root, or it is an absolute path starting
835 with root. Note that this function is unnecessary, and should not be
840 with root. Note that this function is unnecessary, and should not be
836 called, for case-sensitive filesystems (simply because it's expensive).
841 called, for case-sensitive filesystems (simply because it's expensive).
837 '''
842 '''
838 # If name is absolute, make it relative
843 # If name is absolute, make it relative
839 if name.lower().startswith(root.lower()):
844 if name.lower().startswith(root.lower()):
840 l = len(root)
845 l = len(root)
841 if name[l] == os.sep or name[l] == os.altsep:
846 if name[l] == os.sep or name[l] == os.altsep:
842 l = l + 1
847 l = l + 1
843 name = name[l:]
848 name = name[l:]
844
849
845 if not os.path.exists(os.path.join(root, name)):
850 if not os.path.exists(os.path.join(root, name)):
846 return None
851 return None
847
852
848 seps = os.sep
853 seps = os.sep
849 if os.altsep:
854 if os.altsep:
850 seps = seps + os.altsep
855 seps = seps + os.altsep
851 # Protect backslashes. This gets silly very quickly.
856 # Protect backslashes. This gets silly very quickly.
852 seps.replace('\\','\\\\')
857 seps.replace('\\','\\\\')
853 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
858 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
854 dir = os.path.normcase(os.path.normpath(root))
859 dir = os.path.normcase(os.path.normpath(root))
855 result = []
860 result = []
856 for part, sep in pattern.findall(name):
861 for part, sep in pattern.findall(name):
857 if sep:
862 if sep:
858 result.append(sep)
863 result.append(sep)
859 continue
864 continue
860
865
861 if dir not in _fspathcache:
866 if dir not in _fspathcache:
862 _fspathcache[dir] = os.listdir(dir)
867 _fspathcache[dir] = os.listdir(dir)
863 contents = _fspathcache[dir]
868 contents = _fspathcache[dir]
864
869
865 lpart = part.lower()
870 lpart = part.lower()
866 for n in contents:
871 for n in contents:
867 if n.lower() == lpart:
872 if n.lower() == lpart:
868 result.append(n)
873 result.append(n)
869 break
874 break
870 else:
875 else:
871 # Cannot happen, as the file exists!
876 # Cannot happen, as the file exists!
872 result.append(part)
877 result.append(part)
873 dir = os.path.join(dir, lpart)
878 dir = os.path.join(dir, lpart)
874
879
875 return ''.join(result)
880 return ''.join(result)
876
881
877 def checkexec(path):
882 def checkexec(path):
878 """
883 """
879 Check whether the given path is on a filesystem with UNIX-like exec flags
884 Check whether the given path is on a filesystem with UNIX-like exec flags
880
885
881 Requires a directory (like /foo/.hg)
886 Requires a directory (like /foo/.hg)
882 """
887 """
883
888
884 # VFAT on some Linux versions can flip mode but it doesn't persist
889 # VFAT on some Linux versions can flip mode but it doesn't persist
885 # a FS remount. Frequently we can detect it if files are created
890 # a FS remount. Frequently we can detect it if files are created
886 # with exec bit on.
891 # with exec bit on.
887
892
888 try:
893 try:
889 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
894 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
890 fh, fn = tempfile.mkstemp("", "", path)
895 fh, fn = tempfile.mkstemp("", "", path)
891 try:
896 try:
892 os.close(fh)
897 os.close(fh)
893 m = os.stat(fn).st_mode & 0777
898 m = os.stat(fn).st_mode & 0777
894 new_file_has_exec = m & EXECFLAGS
899 new_file_has_exec = m & EXECFLAGS
895 os.chmod(fn, m ^ EXECFLAGS)
900 os.chmod(fn, m ^ EXECFLAGS)
896 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
901 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
897 finally:
902 finally:
898 os.unlink(fn)
903 os.unlink(fn)
899 except (IOError, OSError):
904 except (IOError, OSError):
900 # we don't care, the user probably won't be able to commit anyway
905 # we don't care, the user probably won't be able to commit anyway
901 return False
906 return False
902 return not (new_file_has_exec or exec_flags_cannot_flip)
907 return not (new_file_has_exec or exec_flags_cannot_flip)
903
908
904 def checklink(path):
909 def checklink(path):
905 """check whether the given path is on a symlink-capable filesystem"""
910 """check whether the given path is on a symlink-capable filesystem"""
906 # mktemp is not racy because symlink creation will fail if the
911 # mktemp is not racy because symlink creation will fail if the
907 # file already exists
912 # file already exists
908 name = tempfile.mktemp(dir=path)
913 name = tempfile.mktemp(dir=path)
909 try:
914 try:
910 os.symlink(".", name)
915 os.symlink(".", name)
911 os.unlink(name)
916 os.unlink(name)
912 return True
917 return True
913 except (OSError, AttributeError):
918 except (OSError, AttributeError):
914 return False
919 return False
915
920
916 def needbinarypatch():
921 def needbinarypatch():
917 """return True if patches should be applied in binary mode by default."""
922 """return True if patches should be applied in binary mode by default."""
918 return os.name == 'nt'
923 return os.name == 'nt'
919
924
920 def endswithsep(path):
925 def endswithsep(path):
921 '''Check path ends with os.sep or os.altsep.'''
926 '''Check path ends with os.sep or os.altsep.'''
922 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
927 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
923
928
924 def splitpath(path):
929 def splitpath(path):
925 '''Split path by os.sep.
930 '''Split path by os.sep.
926 Note that this function does not use os.altsep because this is
931 Note that this function does not use os.altsep because this is
927 an alternative of simple "xxx.split(os.sep)".
932 an alternative of simple "xxx.split(os.sep)".
928 It is recommended to use os.path.normpath() before using this
933 It is recommended to use os.path.normpath() before using this
929 function if need.'''
934 function if need.'''
930 return path.split(os.sep)
935 return path.split(os.sep)
931
936
932 def gui():
937 def gui():
933 '''Are we running in a GUI?'''
938 '''Are we running in a GUI?'''
934 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
939 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
935
940
936 def mktempcopy(name, emptyok=False, createmode=None):
941 def mktempcopy(name, emptyok=False, createmode=None):
937 """Create a temporary file with the same contents from name
942 """Create a temporary file with the same contents from name
938
943
939 The permission bits are copied from the original file.
944 The permission bits are copied from the original file.
940
945
941 If the temporary file is going to be truncated immediately, you
946 If the temporary file is going to be truncated immediately, you
942 can use emptyok=True as an optimization.
947 can use emptyok=True as an optimization.
943
948
944 Returns the name of the temporary file.
949 Returns the name of the temporary file.
945 """
950 """
946 d, fn = os.path.split(name)
951 d, fn = os.path.split(name)
947 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
952 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
948 os.close(fd)
953 os.close(fd)
949 # Temporary files are created with mode 0600, which is usually not
954 # Temporary files are created with mode 0600, which is usually not
950 # what we want. If the original file already exists, just copy
955 # what we want. If the original file already exists, just copy
951 # its mode. Otherwise, manually obey umask.
956 # its mode. Otherwise, manually obey umask.
952 try:
957 try:
953 st_mode = os.lstat(name).st_mode & 0777
958 st_mode = os.lstat(name).st_mode & 0777
954 except OSError, inst:
959 except OSError, inst:
955 if inst.errno != errno.ENOENT:
960 if inst.errno != errno.ENOENT:
956 raise
961 raise
957 st_mode = createmode
962 st_mode = createmode
958 if st_mode is None:
963 if st_mode is None:
959 st_mode = ~umask
964 st_mode = ~umask
960 st_mode &= 0666
965 st_mode &= 0666
961 os.chmod(temp, st_mode)
966 os.chmod(temp, st_mode)
962 if emptyok:
967 if emptyok:
963 return temp
968 return temp
964 try:
969 try:
965 try:
970 try:
966 ifp = posixfile(name, "rb")
971 ifp = posixfile(name, "rb")
967 except IOError, inst:
972 except IOError, inst:
968 if inst.errno == errno.ENOENT:
973 if inst.errno == errno.ENOENT:
969 return temp
974 return temp
970 if not getattr(inst, 'filename', None):
975 if not getattr(inst, 'filename', None):
971 inst.filename = name
976 inst.filename = name
972 raise
977 raise
973 ofp = posixfile(temp, "wb")
978 ofp = posixfile(temp, "wb")
974 for chunk in filechunkiter(ifp):
979 for chunk in filechunkiter(ifp):
975 ofp.write(chunk)
980 ofp.write(chunk)
976 ifp.close()
981 ifp.close()
977 ofp.close()
982 ofp.close()
978 except:
983 except:
979 try: os.unlink(temp)
984 try: os.unlink(temp)
980 except: pass
985 except: pass
981 raise
986 raise
982 return temp
987 return temp
983
988
984 class atomictempfile(posixfile):
989 class atomictempfile(posixfile):
985 """file-like object that atomically updates a file
990 """file-like object that atomically updates a file
986
991
987 All writes will be redirected to a temporary copy of the original
992 All writes will be redirected to a temporary copy of the original
988 file. When rename is called, the copy is renamed to the original
993 file. When rename is called, the copy is renamed to the original
989 name, making the changes visible.
994 name, making the changes visible.
990 """
995 """
991 def __init__(self, name, mode, createmode):
996 def __init__(self, name, mode, createmode):
992 self.__name = name
997 self.__name = name
993 self.temp = mktempcopy(name, emptyok=('w' in mode),
998 self.temp = mktempcopy(name, emptyok=('w' in mode),
994 createmode=createmode)
999 createmode=createmode)
995 posixfile.__init__(self, self.temp, mode)
1000 posixfile.__init__(self, self.temp, mode)
996
1001
997 def rename(self):
1002 def rename(self):
998 if not self.closed:
1003 if not self.closed:
999 posixfile.close(self)
1004 posixfile.close(self)
1000 rename(self.temp, localpath(self.__name))
1005 rename(self.temp, localpath(self.__name))
1001
1006
1002 def __del__(self):
1007 def __del__(self):
1003 if not self.closed:
1008 if not self.closed:
1004 try:
1009 try:
1005 os.unlink(self.temp)
1010 os.unlink(self.temp)
1006 except: pass
1011 except: pass
1007 posixfile.close(self)
1012 posixfile.close(self)
1008
1013
1009 def makedirs(name, mode=None):
1014 def makedirs(name, mode=None):
1010 """recursive directory creation with parent mode inheritance"""
1015 """recursive directory creation with parent mode inheritance"""
1011 try:
1016 try:
1012 os.mkdir(name)
1017 os.mkdir(name)
1013 if mode is not None:
1018 if mode is not None:
1014 os.chmod(name, mode)
1019 os.chmod(name, mode)
1015 return
1020 return
1016 except OSError, err:
1021 except OSError, err:
1017 if err.errno == errno.EEXIST:
1022 if err.errno == errno.EEXIST:
1018 return
1023 return
1019 if err.errno != errno.ENOENT:
1024 if err.errno != errno.ENOENT:
1020 raise
1025 raise
1021 parent = os.path.abspath(os.path.dirname(name))
1026 parent = os.path.abspath(os.path.dirname(name))
1022 makedirs(parent, mode)
1027 makedirs(parent, mode)
1023 makedirs(name, mode)
1028 makedirs(name, mode)
1024
1029
1025 class opener(object):
1030 class opener(object):
1026 """Open files relative to a base directory
1031 """Open files relative to a base directory
1027
1032
1028 This class is used to hide the details of COW semantics and
1033 This class is used to hide the details of COW semantics and
1029 remote file access from higher level code.
1034 remote file access from higher level code.
1030 """
1035 """
1031 def __init__(self, base, audit=True):
1036 def __init__(self, base, audit=True):
1032 self.base = base
1037 self.base = base
1033 if audit:
1038 if audit:
1034 self.audit_path = path_auditor(base)
1039 self.audit_path = path_auditor(base)
1035 else:
1040 else:
1036 self.audit_path = always
1041 self.audit_path = always
1037 self.createmode = None
1042 self.createmode = None
1038
1043
1039 def __getattr__(self, name):
1044 def __getattr__(self, name):
1040 if name == '_can_symlink':
1045 if name == '_can_symlink':
1041 self._can_symlink = checklink(self.base)
1046 self._can_symlink = checklink(self.base)
1042 return self._can_symlink
1047 return self._can_symlink
1043 raise AttributeError(name)
1048 raise AttributeError(name)
1044
1049
1045 def _fixfilemode(self, name):
1050 def _fixfilemode(self, name):
1046 if self.createmode is None:
1051 if self.createmode is None:
1047 return
1052 return
1048 os.chmod(name, self.createmode & 0666)
1053 os.chmod(name, self.createmode & 0666)
1049
1054
1050 def __call__(self, path, mode="r", text=False, atomictemp=False):
1055 def __call__(self, path, mode="r", text=False, atomictemp=False):
1051 self.audit_path(path)
1056 self.audit_path(path)
1052 f = os.path.join(self.base, path)
1057 f = os.path.join(self.base, path)
1053
1058
1054 if not text and "b" not in mode:
1059 if not text and "b" not in mode:
1055 mode += "b" # for that other OS
1060 mode += "b" # for that other OS
1056
1061
1057 nlink = -1
1062 nlink = -1
1058 if mode not in ("r", "rb"):
1063 if mode not in ("r", "rb"):
1059 try:
1064 try:
1060 nlink = nlinks(f)
1065 nlink = nlinks(f)
1061 except OSError:
1066 except OSError:
1062 nlink = 0
1067 nlink = 0
1063 d = os.path.dirname(f)
1068 d = os.path.dirname(f)
1064 if not os.path.isdir(d):
1069 if not os.path.isdir(d):
1065 makedirs(d, self.createmode)
1070 makedirs(d, self.createmode)
1066 if atomictemp:
1071 if atomictemp:
1067 return atomictempfile(f, mode, self.createmode)
1072 return atomictempfile(f, mode, self.createmode)
1068 if nlink > 1:
1073 if nlink > 1:
1069 rename(mktempcopy(f), f)
1074 rename(mktempcopy(f), f)
1070 fp = posixfile(f, mode)
1075 fp = posixfile(f, mode)
1071 if nlink == 0:
1076 if nlink == 0:
1072 self._fixfilemode(f)
1077 self._fixfilemode(f)
1073 return fp
1078 return fp
1074
1079
1075 def symlink(self, src, dst):
1080 def symlink(self, src, dst):
1076 self.audit_path(dst)
1081 self.audit_path(dst)
1077 linkname = os.path.join(self.base, dst)
1082 linkname = os.path.join(self.base, dst)
1078 try:
1083 try:
1079 os.unlink(linkname)
1084 os.unlink(linkname)
1080 except OSError:
1085 except OSError:
1081 pass
1086 pass
1082
1087
1083 dirname = os.path.dirname(linkname)
1088 dirname = os.path.dirname(linkname)
1084 if not os.path.exists(dirname):
1089 if not os.path.exists(dirname):
1085 makedirs(dirname, self.createmode)
1090 makedirs(dirname, self.createmode)
1086
1091
1087 if self._can_symlink:
1092 if self._can_symlink:
1088 try:
1093 try:
1089 os.symlink(src, linkname)
1094 os.symlink(src, linkname)
1090 except OSError, err:
1095 except OSError, err:
1091 raise OSError(err.errno, _('could not symlink to %r: %s') %
1096 raise OSError(err.errno, _('could not symlink to %r: %s') %
1092 (src, err.strerror), linkname)
1097 (src, err.strerror), linkname)
1093 else:
1098 else:
1094 f = self(dst, "w")
1099 f = self(dst, "w")
1095 f.write(src)
1100 f.write(src)
1096 f.close()
1101 f.close()
1097 self._fixfilemode(dst)
1102 self._fixfilemode(dst)
1098
1103
1099 class chunkbuffer(object):
1104 class chunkbuffer(object):
1100 """Allow arbitrary sized chunks of data to be efficiently read from an
1105 """Allow arbitrary sized chunks of data to be efficiently read from an
1101 iterator over chunks of arbitrary size."""
1106 iterator over chunks of arbitrary size."""
1102
1107
1103 def __init__(self, in_iter):
1108 def __init__(self, in_iter):
1104 """in_iter is the iterator that's iterating over the input chunks.
1109 """in_iter is the iterator that's iterating over the input chunks.
1105 targetsize is how big a buffer to try to maintain."""
1110 targetsize is how big a buffer to try to maintain."""
1106 self.iter = iter(in_iter)
1111 self.iter = iter(in_iter)
1107 self.buf = ''
1112 self.buf = ''
1108 self.targetsize = 2**16
1113 self.targetsize = 2**16
1109
1114
1110 def read(self, l):
1115 def read(self, l):
1111 """Read L bytes of data from the iterator of chunks of data.
1116 """Read L bytes of data from the iterator of chunks of data.
1112 Returns less than L bytes if the iterator runs dry."""
1117 Returns less than L bytes if the iterator runs dry."""
1113 if l > len(self.buf) and self.iter:
1118 if l > len(self.buf) and self.iter:
1114 # Clamp to a multiple of self.targetsize
1119 # Clamp to a multiple of self.targetsize
1115 targetsize = max(l, self.targetsize)
1120 targetsize = max(l, self.targetsize)
1116 collector = cStringIO.StringIO()
1121 collector = cStringIO.StringIO()
1117 collector.write(self.buf)
1122 collector.write(self.buf)
1118 collected = len(self.buf)
1123 collected = len(self.buf)
1119 for chunk in self.iter:
1124 for chunk in self.iter:
1120 collector.write(chunk)
1125 collector.write(chunk)
1121 collected += len(chunk)
1126 collected += len(chunk)
1122 if collected >= targetsize:
1127 if collected >= targetsize:
1123 break
1128 break
1124 if collected < targetsize:
1129 if collected < targetsize:
1125 self.iter = False
1130 self.iter = False
1126 self.buf = collector.getvalue()
1131 self.buf = collector.getvalue()
1127 if len(self.buf) == l:
1132 if len(self.buf) == l:
1128 s, self.buf = str(self.buf), ''
1133 s, self.buf = str(self.buf), ''
1129 else:
1134 else:
1130 s, self.buf = self.buf[:l], buffer(self.buf, l)
1135 s, self.buf = self.buf[:l], buffer(self.buf, l)
1131 return s
1136 return s
1132
1137
1133 def filechunkiter(f, size=65536, limit=None):
1138 def filechunkiter(f, size=65536, limit=None):
1134 """Create a generator that produces the data in the file size
1139 """Create a generator that produces the data in the file size
1135 (default 65536) bytes at a time, up to optional limit (default is
1140 (default 65536) bytes at a time, up to optional limit (default is
1136 to read all data). Chunks may be less than size bytes if the
1141 to read all data). Chunks may be less than size bytes if the
1137 chunk is the last chunk in the file, or the file is a socket or
1142 chunk is the last chunk in the file, or the file is a socket or
1138 some other type of file that sometimes reads less data than is
1143 some other type of file that sometimes reads less data than is
1139 requested."""
1144 requested."""
1140 assert size >= 0
1145 assert size >= 0
1141 assert limit is None or limit >= 0
1146 assert limit is None or limit >= 0
1142 while True:
1147 while True:
1143 if limit is None: nbytes = size
1148 if limit is None: nbytes = size
1144 else: nbytes = min(limit, size)
1149 else: nbytes = min(limit, size)
1145 s = nbytes and f.read(nbytes)
1150 s = nbytes and f.read(nbytes)
1146 if not s: break
1151 if not s: break
1147 if limit: limit -= len(s)
1152 if limit: limit -= len(s)
1148 yield s
1153 yield s
1149
1154
1150 def makedate():
1155 def makedate():
1151 lt = time.localtime()
1156 lt = time.localtime()
1152 if lt[8] == 1 and time.daylight:
1157 if lt[8] == 1 and time.daylight:
1153 tz = time.altzone
1158 tz = time.altzone
1154 else:
1159 else:
1155 tz = time.timezone
1160 tz = time.timezone
1156 return time.mktime(lt), tz
1161 return time.mktime(lt), tz
1157
1162
1158 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1163 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1159 """represent a (unixtime, offset) tuple as a localized time.
1164 """represent a (unixtime, offset) tuple as a localized time.
1160 unixtime is seconds since the epoch, and offset is the time zone's
1165 unixtime is seconds since the epoch, and offset is the time zone's
1161 number of seconds away from UTC. if timezone is false, do not
1166 number of seconds away from UTC. if timezone is false, do not
1162 append time zone to string."""
1167 append time zone to string."""
1163 t, tz = date or makedate()
1168 t, tz = date or makedate()
1164 if "%1" in format or "%2" in format:
1169 if "%1" in format or "%2" in format:
1165 sign = (tz > 0) and "-" or "+"
1170 sign = (tz > 0) and "-" or "+"
1166 minutes = abs(tz) / 60
1171 minutes = abs(tz) / 60
1167 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1172 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1168 format = format.replace("%2", "%02d" % (minutes % 60))
1173 format = format.replace("%2", "%02d" % (minutes % 60))
1169 s = time.strftime(format, time.gmtime(float(t) - tz))
1174 s = time.strftime(format, time.gmtime(float(t) - tz))
1170 return s
1175 return s
1171
1176
1172 def shortdate(date=None):
1177 def shortdate(date=None):
1173 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1178 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1174 return datestr(date, format='%Y-%m-%d')
1179 return datestr(date, format='%Y-%m-%d')
1175
1180
1176 def strdate(string, format, defaults=[]):
1181 def strdate(string, format, defaults=[]):
1177 """parse a localized time string and return a (unixtime, offset) tuple.
1182 """parse a localized time string and return a (unixtime, offset) tuple.
1178 if the string cannot be parsed, ValueError is raised."""
1183 if the string cannot be parsed, ValueError is raised."""
1179 def timezone(string):
1184 def timezone(string):
1180 tz = string.split()[-1]
1185 tz = string.split()[-1]
1181 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1186 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1182 sign = (tz[0] == "+") and 1 or -1
1187 sign = (tz[0] == "+") and 1 or -1
1183 hours = int(tz[1:3])
1188 hours = int(tz[1:3])
1184 minutes = int(tz[3:5])
1189 minutes = int(tz[3:5])
1185 return -sign * (hours * 60 + minutes) * 60
1190 return -sign * (hours * 60 + minutes) * 60
1186 if tz == "GMT" or tz == "UTC":
1191 if tz == "GMT" or tz == "UTC":
1187 return 0
1192 return 0
1188 return None
1193 return None
1189
1194
1190 # NOTE: unixtime = localunixtime + offset
1195 # NOTE: unixtime = localunixtime + offset
1191 offset, date = timezone(string), string
1196 offset, date = timezone(string), string
1192 if offset != None:
1197 if offset != None:
1193 date = " ".join(string.split()[:-1])
1198 date = " ".join(string.split()[:-1])
1194
1199
1195 # add missing elements from defaults
1200 # add missing elements from defaults
1196 for part in defaults:
1201 for part in defaults:
1197 found = [True for p in part if ("%"+p) in format]
1202 found = [True for p in part if ("%"+p) in format]
1198 if not found:
1203 if not found:
1199 date += "@" + defaults[part]
1204 date += "@" + defaults[part]
1200 format += "@%" + part[0]
1205 format += "@%" + part[0]
1201
1206
1202 timetuple = time.strptime(date, format)
1207 timetuple = time.strptime(date, format)
1203 localunixtime = int(calendar.timegm(timetuple))
1208 localunixtime = int(calendar.timegm(timetuple))
1204 if offset is None:
1209 if offset is None:
1205 # local timezone
1210 # local timezone
1206 unixtime = int(time.mktime(timetuple))
1211 unixtime = int(time.mktime(timetuple))
1207 offset = unixtime - localunixtime
1212 offset = unixtime - localunixtime
1208 else:
1213 else:
1209 unixtime = localunixtime + offset
1214 unixtime = localunixtime + offset
1210 return unixtime, offset
1215 return unixtime, offset
1211
1216
1212 def parsedate(date, formats=None, defaults=None):
1217 def parsedate(date, formats=None, defaults=None):
1213 """parse a localized date/time string and return a (unixtime, offset) tuple.
1218 """parse a localized date/time string and return a (unixtime, offset) tuple.
1214
1219
1215 The date may be a "unixtime offset" string or in one of the specified
1220 The date may be a "unixtime offset" string or in one of the specified
1216 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1221 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1217 """
1222 """
1218 if not date:
1223 if not date:
1219 return 0, 0
1224 return 0, 0
1220 if isinstance(date, tuple) and len(date) == 2:
1225 if isinstance(date, tuple) and len(date) == 2:
1221 return date
1226 return date
1222 if not formats:
1227 if not formats:
1223 formats = defaultdateformats
1228 formats = defaultdateformats
1224 date = date.strip()
1229 date = date.strip()
1225 try:
1230 try:
1226 when, offset = map(int, date.split(' '))
1231 when, offset = map(int, date.split(' '))
1227 except ValueError:
1232 except ValueError:
1228 # fill out defaults
1233 # fill out defaults
1229 if not defaults:
1234 if not defaults:
1230 defaults = {}
1235 defaults = {}
1231 now = makedate()
1236 now = makedate()
1232 for part in "d mb yY HI M S".split():
1237 for part in "d mb yY HI M S".split():
1233 if part not in defaults:
1238 if part not in defaults:
1234 if part[0] in "HMS":
1239 if part[0] in "HMS":
1235 defaults[part] = "00"
1240 defaults[part] = "00"
1236 else:
1241 else:
1237 defaults[part] = datestr(now, "%" + part[0])
1242 defaults[part] = datestr(now, "%" + part[0])
1238
1243
1239 for format in formats:
1244 for format in formats:
1240 try:
1245 try:
1241 when, offset = strdate(date, format, defaults)
1246 when, offset = strdate(date, format, defaults)
1242 except (ValueError, OverflowError):
1247 except (ValueError, OverflowError):
1243 pass
1248 pass
1244 else:
1249 else:
1245 break
1250 break
1246 else:
1251 else:
1247 raise Abort(_('invalid date: %r ') % date)
1252 raise Abort(_('invalid date: %r ') % date)
1248 # validate explicit (probably user-specified) date and
1253 # validate explicit (probably user-specified) date and
1249 # time zone offset. values must fit in signed 32 bits for
1254 # time zone offset. values must fit in signed 32 bits for
1250 # current 32-bit linux runtimes. timezones go from UTC-12
1255 # current 32-bit linux runtimes. timezones go from UTC-12
1251 # to UTC+14
1256 # to UTC+14
1252 if abs(when) > 0x7fffffff:
1257 if abs(when) > 0x7fffffff:
1253 raise Abort(_('date exceeds 32 bits: %d') % when)
1258 raise Abort(_('date exceeds 32 bits: %d') % when)
1254 if offset < -50400 or offset > 43200:
1259 if offset < -50400 or offset > 43200:
1255 raise Abort(_('impossible time zone offset: %d') % offset)
1260 raise Abort(_('impossible time zone offset: %d') % offset)
1256 return when, offset
1261 return when, offset
1257
1262
1258 def matchdate(date):
1263 def matchdate(date):
1259 """Return a function that matches a given date match specifier
1264 """Return a function that matches a given date match specifier
1260
1265
1261 Formats include:
1266 Formats include:
1262
1267
1263 '{date}' match a given date to the accuracy provided
1268 '{date}' match a given date to the accuracy provided
1264
1269
1265 '<{date}' on or before a given date
1270 '<{date}' on or before a given date
1266
1271
1267 '>{date}' on or after a given date
1272 '>{date}' on or after a given date
1268
1273
1269 """
1274 """
1270
1275
1271 def lower(date):
1276 def lower(date):
1272 d = dict(mb="1", d="1")
1277 d = dict(mb="1", d="1")
1273 return parsedate(date, extendeddateformats, d)[0]
1278 return parsedate(date, extendeddateformats, d)[0]
1274
1279
1275 def upper(date):
1280 def upper(date):
1276 d = dict(mb="12", HI="23", M="59", S="59")
1281 d = dict(mb="12", HI="23", M="59", S="59")
1277 for days in "31 30 29".split():
1282 for days in "31 30 29".split():
1278 try:
1283 try:
1279 d["d"] = days
1284 d["d"] = days
1280 return parsedate(date, extendeddateformats, d)[0]
1285 return parsedate(date, extendeddateformats, d)[0]
1281 except:
1286 except:
1282 pass
1287 pass
1283 d["d"] = "28"
1288 d["d"] = "28"
1284 return parsedate(date, extendeddateformats, d)[0]
1289 return parsedate(date, extendeddateformats, d)[0]
1285
1290
1286 date = date.strip()
1291 date = date.strip()
1287 if date[0] == "<":
1292 if date[0] == "<":
1288 when = upper(date[1:])
1293 when = upper(date[1:])
1289 return lambda x: x <= when
1294 return lambda x: x <= when
1290 elif date[0] == ">":
1295 elif date[0] == ">":
1291 when = lower(date[1:])
1296 when = lower(date[1:])
1292 return lambda x: x >= when
1297 return lambda x: x >= when
1293 elif date[0] == "-":
1298 elif date[0] == "-":
1294 try:
1299 try:
1295 days = int(date[1:])
1300 days = int(date[1:])
1296 except ValueError:
1301 except ValueError:
1297 raise Abort(_("invalid day spec: %s") % date[1:])
1302 raise Abort(_("invalid day spec: %s") % date[1:])
1298 when = makedate()[0] - days * 3600 * 24
1303 when = makedate()[0] - days * 3600 * 24
1299 return lambda x: x >= when
1304 return lambda x: x >= when
1300 elif " to " in date:
1305 elif " to " in date:
1301 a, b = date.split(" to ")
1306 a, b = date.split(" to ")
1302 start, stop = lower(a), upper(b)
1307 start, stop = lower(a), upper(b)
1303 return lambda x: x >= start and x <= stop
1308 return lambda x: x >= start and x <= stop
1304 else:
1309 else:
1305 start, stop = lower(date), upper(date)
1310 start, stop = lower(date), upper(date)
1306 return lambda x: x >= start and x <= stop
1311 return lambda x: x >= start and x <= stop
1307
1312
1308 def shortuser(user):
1313 def shortuser(user):
1309 """Return a short representation of a user name or email address."""
1314 """Return a short representation of a user name or email address."""
1310 f = user.find('@')
1315 f = user.find('@')
1311 if f >= 0:
1316 if f >= 0:
1312 user = user[:f]
1317 user = user[:f]
1313 f = user.find('<')
1318 f = user.find('<')
1314 if f >= 0:
1319 if f >= 0:
1315 user = user[f+1:]
1320 user = user[f+1:]
1316 f = user.find(' ')
1321 f = user.find(' ')
1317 if f >= 0:
1322 if f >= 0:
1318 user = user[:f]
1323 user = user[:f]
1319 f = user.find('.')
1324 f = user.find('.')
1320 if f >= 0:
1325 if f >= 0:
1321 user = user[:f]
1326 user = user[:f]
1322 return user
1327 return user
1323
1328
1324 def email(author):
1329 def email(author):
1325 '''get email of author.'''
1330 '''get email of author.'''
1326 r = author.find('>')
1331 r = author.find('>')
1327 if r == -1: r = None
1332 if r == -1: r = None
1328 return author[author.find('<')+1:r]
1333 return author[author.find('<')+1:r]
1329
1334
1330 def ellipsis(text, maxlength=400):
1335 def ellipsis(text, maxlength=400):
1331 """Trim string to at most maxlength (default: 400) characters."""
1336 """Trim string to at most maxlength (default: 400) characters."""
1332 if len(text) <= maxlength:
1337 if len(text) <= maxlength:
1333 return text
1338 return text
1334 else:
1339 else:
1335 return "%s..." % (text[:maxlength-3])
1340 return "%s..." % (text[:maxlength-3])
1336
1341
1337 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1342 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1338 '''yield every hg repository under path, recursively.'''
1343 '''yield every hg repository under path, recursively.'''
1339 def errhandler(err):
1344 def errhandler(err):
1340 if err.filename == path:
1345 if err.filename == path:
1341 raise err
1346 raise err
1342 if followsym and hasattr(os.path, 'samestat'):
1347 if followsym and hasattr(os.path, 'samestat'):
1343 def _add_dir_if_not_there(dirlst, dirname):
1348 def _add_dir_if_not_there(dirlst, dirname):
1344 match = False
1349 match = False
1345 samestat = os.path.samestat
1350 samestat = os.path.samestat
1346 dirstat = os.stat(dirname)
1351 dirstat = os.stat(dirname)
1347 for lstdirstat in dirlst:
1352 for lstdirstat in dirlst:
1348 if samestat(dirstat, lstdirstat):
1353 if samestat(dirstat, lstdirstat):
1349 match = True
1354 match = True
1350 break
1355 break
1351 if not match:
1356 if not match:
1352 dirlst.append(dirstat)
1357 dirlst.append(dirstat)
1353 return not match
1358 return not match
1354 else:
1359 else:
1355 followsym = False
1360 followsym = False
1356
1361
1357 if (seen_dirs is None) and followsym:
1362 if (seen_dirs is None) and followsym:
1358 seen_dirs = []
1363 seen_dirs = []
1359 _add_dir_if_not_there(seen_dirs, path)
1364 _add_dir_if_not_there(seen_dirs, path)
1360 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1365 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1361 if '.hg' in dirs:
1366 if '.hg' in dirs:
1362 yield root # found a repository
1367 yield root # found a repository
1363 qroot = os.path.join(root, '.hg', 'patches')
1368 qroot = os.path.join(root, '.hg', 'patches')
1364 if os.path.isdir(os.path.join(qroot, '.hg')):
1369 if os.path.isdir(os.path.join(qroot, '.hg')):
1365 yield qroot # we have a patch queue repo here
1370 yield qroot # we have a patch queue repo here
1366 if recurse:
1371 if recurse:
1367 # avoid recursing inside the .hg directory
1372 # avoid recursing inside the .hg directory
1368 dirs.remove('.hg')
1373 dirs.remove('.hg')
1369 else:
1374 else:
1370 dirs[:] = [] # don't descend further
1375 dirs[:] = [] # don't descend further
1371 elif followsym:
1376 elif followsym:
1372 newdirs = []
1377 newdirs = []
1373 for d in dirs:
1378 for d in dirs:
1374 fname = os.path.join(root, d)
1379 fname = os.path.join(root, d)
1375 if _add_dir_if_not_there(seen_dirs, fname):
1380 if _add_dir_if_not_there(seen_dirs, fname):
1376 if os.path.islink(fname):
1381 if os.path.islink(fname):
1377 for hgname in walkrepos(fname, True, seen_dirs):
1382 for hgname in walkrepos(fname, True, seen_dirs):
1378 yield hgname
1383 yield hgname
1379 else:
1384 else:
1380 newdirs.append(d)
1385 newdirs.append(d)
1381 dirs[:] = newdirs
1386 dirs[:] = newdirs
1382
1387
1383 _rcpath = None
1388 _rcpath = None
1384
1389
1385 def os_rcpath():
1390 def os_rcpath():
1386 '''return default os-specific hgrc search path'''
1391 '''return default os-specific hgrc search path'''
1387 path = system_rcpath()
1392 path = system_rcpath()
1388 path.extend(user_rcpath())
1393 path.extend(user_rcpath())
1389 path = [os.path.normpath(f) for f in path]
1394 path = [os.path.normpath(f) for f in path]
1390 return path
1395 return path
1391
1396
1392 def rcpath():
1397 def rcpath():
1393 '''return hgrc search path. if env var HGRCPATH is set, use it.
1398 '''return hgrc search path. if env var HGRCPATH is set, use it.
1394 for each item in path, if directory, use files ending in .rc,
1399 for each item in path, if directory, use files ending in .rc,
1395 else use item.
1400 else use item.
1396 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1401 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1397 if no HGRCPATH, use default os-specific path.'''
1402 if no HGRCPATH, use default os-specific path.'''
1398 global _rcpath
1403 global _rcpath
1399 if _rcpath is None:
1404 if _rcpath is None:
1400 if 'HGRCPATH' in os.environ:
1405 if 'HGRCPATH' in os.environ:
1401 _rcpath = []
1406 _rcpath = []
1402 for p in os.environ['HGRCPATH'].split(os.pathsep):
1407 for p in os.environ['HGRCPATH'].split(os.pathsep):
1403 if not p: continue
1408 if not p: continue
1404 if os.path.isdir(p):
1409 if os.path.isdir(p):
1405 for f, kind in osutil.listdir(p):
1410 for f, kind in osutil.listdir(p):
1406 if f.endswith('.rc'):
1411 if f.endswith('.rc'):
1407 _rcpath.append(os.path.join(p, f))
1412 _rcpath.append(os.path.join(p, f))
1408 else:
1413 else:
1409 _rcpath.append(p)
1414 _rcpath.append(p)
1410 else:
1415 else:
1411 _rcpath = os_rcpath()
1416 _rcpath = os_rcpath()
1412 return _rcpath
1417 return _rcpath
1413
1418
1414 def bytecount(nbytes):
1419 def bytecount(nbytes):
1415 '''return byte count formatted as readable string, with units'''
1420 '''return byte count formatted as readable string, with units'''
1416
1421
1417 units = (
1422 units = (
1418 (100, 1<<30, _('%.0f GB')),
1423 (100, 1<<30, _('%.0f GB')),
1419 (10, 1<<30, _('%.1f GB')),
1424 (10, 1<<30, _('%.1f GB')),
1420 (1, 1<<30, _('%.2f GB')),
1425 (1, 1<<30, _('%.2f GB')),
1421 (100, 1<<20, _('%.0f MB')),
1426 (100, 1<<20, _('%.0f MB')),
1422 (10, 1<<20, _('%.1f MB')),
1427 (10, 1<<20, _('%.1f MB')),
1423 (1, 1<<20, _('%.2f MB')),
1428 (1, 1<<20, _('%.2f MB')),
1424 (100, 1<<10, _('%.0f KB')),
1429 (100, 1<<10, _('%.0f KB')),
1425 (10, 1<<10, _('%.1f KB')),
1430 (10, 1<<10, _('%.1f KB')),
1426 (1, 1<<10, _('%.2f KB')),
1431 (1, 1<<10, _('%.2f KB')),
1427 (1, 1, _('%.0f bytes')),
1432 (1, 1, _('%.0f bytes')),
1428 )
1433 )
1429
1434
1430 for multiplier, divisor, format in units:
1435 for multiplier, divisor, format in units:
1431 if nbytes >= divisor * multiplier:
1436 if nbytes >= divisor * multiplier:
1432 return format % (nbytes / float(divisor))
1437 return format % (nbytes / float(divisor))
1433 return units[-1][2] % nbytes
1438 return units[-1][2] % nbytes
1434
1439
1435 def drop_scheme(scheme, path):
1440 def drop_scheme(scheme, path):
1436 sc = scheme + ':'
1441 sc = scheme + ':'
1437 if path.startswith(sc):
1442 if path.startswith(sc):
1438 path = path[len(sc):]
1443 path = path[len(sc):]
1439 if path.startswith('//'):
1444 if path.startswith('//'):
1440 path = path[2:]
1445 path = path[2:]
1441 return path
1446 return path
1442
1447
1443 def uirepr(s):
1448 def uirepr(s):
1444 # Avoid double backslash in Windows path repr()
1449 # Avoid double backslash in Windows path repr()
1445 return repr(s).replace('\\\\', '\\')
1450 return repr(s).replace('\\\\', '\\')
1446
1451
1447 def termwidth():
1452 def termwidth():
1448 if 'COLUMNS' in os.environ:
1453 if 'COLUMNS' in os.environ:
1449 try:
1454 try:
1450 return int(os.environ['COLUMNS'])
1455 return int(os.environ['COLUMNS'])
1451 except ValueError:
1456 except ValueError:
1452 pass
1457 pass
1453 try:
1458 try:
1454 import termios, array, fcntl
1459 import termios, array, fcntl
1455 for dev in (sys.stdout, sys.stdin):
1460 for dev in (sys.stdout, sys.stdin):
1456 try:
1461 try:
1457 fd = dev.fileno()
1462 fd = dev.fileno()
1458 if not os.isatty(fd):
1463 if not os.isatty(fd):
1459 continue
1464 continue
1460 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1465 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1461 return array.array('h', arri)[1]
1466 return array.array('h', arri)[1]
1462 except ValueError:
1467 except ValueError:
1463 pass
1468 pass
1464 except ImportError:
1469 except ImportError:
1465 pass
1470 pass
1466 return 80
1471 return 80
1467
1472
1468 def iterlines(iterator):
1473 def iterlines(iterator):
1469 for chunk in iterator:
1474 for chunk in iterator:
1470 for line in chunk.splitlines():
1475 for line in chunk.splitlines():
1471 yield line
1476 yield line
General Comments 0
You need to be logged in to leave comments. Login now