##// END OF EJS Templates
atomictempfile: delegate to posixfile instead of inheriting from it
Bryan O'Sullivan -
r8327:aa25be1c default
parent child Browse files
Show More
@@ -1,1454 +1,1457 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, threading, time, calendar, glob, random
19 import os, stat, threading, time, calendar, glob, random
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd, mode='t', bufsize=-1):
41 def popen2(cmd, mode='t', bufsize=-1):
42 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
42 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
43 close_fds=closefds,
43 close_fds=closefds,
44 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
44 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
45 return p.stdin, p.stdout
45 return p.stdin, p.stdout
46 def popen3(cmd, mode='t', bufsize=-1):
46 def popen3(cmd, mode='t', bufsize=-1):
47 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
47 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
48 close_fds=closefds,
48 close_fds=closefds,
49 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
49 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
50 stderr=subprocess.PIPE)
50 stderr=subprocess.PIPE)
51 return p.stdin, p.stdout, p.stderr
51 return p.stdin, p.stdout, p.stderr
52
52
53 def version():
53 def version():
54 """Return version information if available."""
54 """Return version information if available."""
55 try:
55 try:
56 import __version__
56 import __version__
57 return __version__.version
57 return __version__.version
58 except ImportError:
58 except ImportError:
59 return 'unknown'
59 return 'unknown'
60
60
61 # used by parsedate
61 # used by parsedate
62 defaultdateformats = (
62 defaultdateformats = (
63 '%Y-%m-%d %H:%M:%S',
63 '%Y-%m-%d %H:%M:%S',
64 '%Y-%m-%d %I:%M:%S%p',
64 '%Y-%m-%d %I:%M:%S%p',
65 '%Y-%m-%d %H:%M',
65 '%Y-%m-%d %H:%M',
66 '%Y-%m-%d %I:%M%p',
66 '%Y-%m-%d %I:%M%p',
67 '%Y-%m-%d',
67 '%Y-%m-%d',
68 '%m-%d',
68 '%m-%d',
69 '%m/%d',
69 '%m/%d',
70 '%m/%d/%y',
70 '%m/%d/%y',
71 '%m/%d/%Y',
71 '%m/%d/%Y',
72 '%a %b %d %H:%M:%S %Y',
72 '%a %b %d %H:%M:%S %Y',
73 '%a %b %d %I:%M:%S%p %Y',
73 '%a %b %d %I:%M:%S%p %Y',
74 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
74 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
75 '%b %d %H:%M:%S %Y',
75 '%b %d %H:%M:%S %Y',
76 '%b %d %I:%M:%S%p %Y',
76 '%b %d %I:%M:%S%p %Y',
77 '%b %d %H:%M:%S',
77 '%b %d %H:%M:%S',
78 '%b %d %I:%M:%S%p',
78 '%b %d %I:%M:%S%p',
79 '%b %d %H:%M',
79 '%b %d %H:%M',
80 '%b %d %I:%M%p',
80 '%b %d %I:%M%p',
81 '%b %d %Y',
81 '%b %d %Y',
82 '%b %d',
82 '%b %d',
83 '%H:%M:%S',
83 '%H:%M:%S',
84 '%I:%M:%SP',
84 '%I:%M:%SP',
85 '%H:%M',
85 '%H:%M',
86 '%I:%M%p',
86 '%I:%M%p',
87 )
87 )
88
88
89 extendeddateformats = defaultdateformats + (
89 extendeddateformats = defaultdateformats + (
90 "%Y",
90 "%Y",
91 "%Y-%m",
91 "%Y-%m",
92 "%b",
92 "%b",
93 "%b %Y",
93 "%b %Y",
94 )
94 )
95
95
96 def cachefunc(func):
96 def cachefunc(func):
97 '''cache the result of function calls'''
97 '''cache the result of function calls'''
98 # XXX doesn't handle keywords args
98 # XXX doesn't handle keywords args
99 cache = {}
99 cache = {}
100 if func.func_code.co_argcount == 1:
100 if func.func_code.co_argcount == 1:
101 # we gain a small amount of time because
101 # we gain a small amount of time because
102 # we don't need to pack/unpack the list
102 # we don't need to pack/unpack the list
103 def f(arg):
103 def f(arg):
104 if arg not in cache:
104 if arg not in cache:
105 cache[arg] = func(arg)
105 cache[arg] = func(arg)
106 return cache[arg]
106 return cache[arg]
107 else:
107 else:
108 def f(*args):
108 def f(*args):
109 if args not in cache:
109 if args not in cache:
110 cache[args] = func(*args)
110 cache[args] = func(*args)
111 return cache[args]
111 return cache[args]
112
112
113 return f
113 return f
114
114
115 class propertycache(object):
115 class propertycache(object):
116 def __init__(self, func):
116 def __init__(self, func):
117 self.func = func
117 self.func = func
118 self.name = func.__name__
118 self.name = func.__name__
119 def __get__(self, obj, type=None):
119 def __get__(self, obj, type=None):
120 result = self.func(obj)
120 result = self.func(obj)
121 setattr(obj, self.name, result)
121 setattr(obj, self.name, result)
122 return result
122 return result
123
123
124 def pipefilter(s, cmd):
124 def pipefilter(s, cmd):
125 '''filter string S through command CMD, returning its output'''
125 '''filter string S through command CMD, returning its output'''
126 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
126 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
127 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
127 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
128 pout, perr = p.communicate(s)
128 pout, perr = p.communicate(s)
129 return pout
129 return pout
130
130
131 def tempfilter(s, cmd):
131 def tempfilter(s, cmd):
132 '''filter string S through a pair of temporary files with CMD.
132 '''filter string S through a pair of temporary files with CMD.
133 CMD is used as a template to create the real command to be run,
133 CMD is used as a template to create the real command to be run,
134 with the strings INFILE and OUTFILE replaced by the real names of
134 with the strings INFILE and OUTFILE replaced by the real names of
135 the temporary files generated.'''
135 the temporary files generated.'''
136 inname, outname = None, None
136 inname, outname = None, None
137 try:
137 try:
138 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
138 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
139 fp = os.fdopen(infd, 'wb')
139 fp = os.fdopen(infd, 'wb')
140 fp.write(s)
140 fp.write(s)
141 fp.close()
141 fp.close()
142 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
142 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
143 os.close(outfd)
143 os.close(outfd)
144 cmd = cmd.replace('INFILE', inname)
144 cmd = cmd.replace('INFILE', inname)
145 cmd = cmd.replace('OUTFILE', outname)
145 cmd = cmd.replace('OUTFILE', outname)
146 code = os.system(cmd)
146 code = os.system(cmd)
147 if sys.platform == 'OpenVMS' and code & 1:
147 if sys.platform == 'OpenVMS' and code & 1:
148 code = 0
148 code = 0
149 if code: raise Abort(_("command '%s' failed: %s") %
149 if code: raise Abort(_("command '%s' failed: %s") %
150 (cmd, explain_exit(code)))
150 (cmd, explain_exit(code)))
151 return open(outname, 'rb').read()
151 return open(outname, 'rb').read()
152 finally:
152 finally:
153 try:
153 try:
154 if inname: os.unlink(inname)
154 if inname: os.unlink(inname)
155 except: pass
155 except: pass
156 try:
156 try:
157 if outname: os.unlink(outname)
157 if outname: os.unlink(outname)
158 except: pass
158 except: pass
159
159
160 filtertable = {
160 filtertable = {
161 'tempfile:': tempfilter,
161 'tempfile:': tempfilter,
162 'pipe:': pipefilter,
162 'pipe:': pipefilter,
163 }
163 }
164
164
165 def filter(s, cmd):
165 def filter(s, cmd):
166 "filter a string through a command that transforms its input to its output"
166 "filter a string through a command that transforms its input to its output"
167 for name, fn in filtertable.iteritems():
167 for name, fn in filtertable.iteritems():
168 if cmd.startswith(name):
168 if cmd.startswith(name):
169 return fn(s, cmd[len(name):].lstrip())
169 return fn(s, cmd[len(name):].lstrip())
170 return pipefilter(s, cmd)
170 return pipefilter(s, cmd)
171
171
172 def binary(s):
172 def binary(s):
173 """return true if a string is binary data"""
173 """return true if a string is binary data"""
174 return bool(s and '\0' in s)
174 return bool(s and '\0' in s)
175
175
176 def increasingchunks(source, min=1024, max=65536):
176 def increasingchunks(source, min=1024, max=65536):
177 '''return no less than min bytes per chunk while data remains,
177 '''return no less than min bytes per chunk while data remains,
178 doubling min after each chunk until it reaches max'''
178 doubling min after each chunk until it reaches max'''
179 def log2(x):
179 def log2(x):
180 if not x:
180 if not x:
181 return 0
181 return 0
182 i = 0
182 i = 0
183 while x:
183 while x:
184 x >>= 1
184 x >>= 1
185 i += 1
185 i += 1
186 return i - 1
186 return i - 1
187
187
188 buf = []
188 buf = []
189 blen = 0
189 blen = 0
190 for chunk in source:
190 for chunk in source:
191 buf.append(chunk)
191 buf.append(chunk)
192 blen += len(chunk)
192 blen += len(chunk)
193 if blen >= min:
193 if blen >= min:
194 if min < max:
194 if min < max:
195 min = min << 1
195 min = min << 1
196 nmin = 1 << log2(blen)
196 nmin = 1 << log2(blen)
197 if nmin > min:
197 if nmin > min:
198 min = nmin
198 min = nmin
199 if min > max:
199 if min > max:
200 min = max
200 min = max
201 yield ''.join(buf)
201 yield ''.join(buf)
202 blen = 0
202 blen = 0
203 buf = []
203 buf = []
204 if buf:
204 if buf:
205 yield ''.join(buf)
205 yield ''.join(buf)
206
206
207 Abort = error.Abort
207 Abort = error.Abort
208
208
209 def always(fn): return True
209 def always(fn): return True
210 def never(fn): return False
210 def never(fn): return False
211
211
212 def patkind(name, default):
212 def patkind(name, default):
213 """Split a string into an optional pattern kind prefix and the
213 """Split a string into an optional pattern kind prefix and the
214 actual pattern."""
214 actual pattern."""
215 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
215 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
216 if name.startswith(prefix + ':'): return name.split(':', 1)
216 if name.startswith(prefix + ':'): return name.split(':', 1)
217 return default, name
217 return default, name
218
218
219 def globre(pat, head='^', tail='$'):
219 def globre(pat, head='^', tail='$'):
220 "convert a glob pattern into a regexp"
220 "convert a glob pattern into a regexp"
221 i, n = 0, len(pat)
221 i, n = 0, len(pat)
222 res = ''
222 res = ''
223 group = 0
223 group = 0
224 def peek(): return i < n and pat[i]
224 def peek(): return i < n and pat[i]
225 while i < n:
225 while i < n:
226 c = pat[i]
226 c = pat[i]
227 i = i+1
227 i = i+1
228 if c == '*':
228 if c == '*':
229 if peek() == '*':
229 if peek() == '*':
230 i += 1
230 i += 1
231 res += '.*'
231 res += '.*'
232 else:
232 else:
233 res += '[^/]*'
233 res += '[^/]*'
234 elif c == '?':
234 elif c == '?':
235 res += '.'
235 res += '.'
236 elif c == '[':
236 elif c == '[':
237 j = i
237 j = i
238 if j < n and pat[j] in '!]':
238 if j < n and pat[j] in '!]':
239 j += 1
239 j += 1
240 while j < n and pat[j] != ']':
240 while j < n and pat[j] != ']':
241 j += 1
241 j += 1
242 if j >= n:
242 if j >= n:
243 res += '\\['
243 res += '\\['
244 else:
244 else:
245 stuff = pat[i:j].replace('\\','\\\\')
245 stuff = pat[i:j].replace('\\','\\\\')
246 i = j + 1
246 i = j + 1
247 if stuff[0] == '!':
247 if stuff[0] == '!':
248 stuff = '^' + stuff[1:]
248 stuff = '^' + stuff[1:]
249 elif stuff[0] == '^':
249 elif stuff[0] == '^':
250 stuff = '\\' + stuff
250 stuff = '\\' + stuff
251 res = '%s[%s]' % (res, stuff)
251 res = '%s[%s]' % (res, stuff)
252 elif c == '{':
252 elif c == '{':
253 group += 1
253 group += 1
254 res += '(?:'
254 res += '(?:'
255 elif c == '}' and group:
255 elif c == '}' and group:
256 res += ')'
256 res += ')'
257 group -= 1
257 group -= 1
258 elif c == ',' and group:
258 elif c == ',' and group:
259 res += '|'
259 res += '|'
260 elif c == '\\':
260 elif c == '\\':
261 p = peek()
261 p = peek()
262 if p:
262 if p:
263 i += 1
263 i += 1
264 res += re.escape(p)
264 res += re.escape(p)
265 else:
265 else:
266 res += re.escape(c)
266 res += re.escape(c)
267 else:
267 else:
268 res += re.escape(c)
268 res += re.escape(c)
269 return head + res + tail
269 return head + res + tail
270
270
271 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
271 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
272
272
273 def pathto(root, n1, n2):
273 def pathto(root, n1, n2):
274 '''return the relative path from one place to another.
274 '''return the relative path from one place to another.
275 root should use os.sep to separate directories
275 root should use os.sep to separate directories
276 n1 should use os.sep to separate directories
276 n1 should use os.sep to separate directories
277 n2 should use "/" to separate directories
277 n2 should use "/" to separate directories
278 returns an os.sep-separated path.
278 returns an os.sep-separated path.
279
279
280 If n1 is a relative path, it's assumed it's
280 If n1 is a relative path, it's assumed it's
281 relative to root.
281 relative to root.
282 n2 should always be relative to root.
282 n2 should always be relative to root.
283 '''
283 '''
284 if not n1: return localpath(n2)
284 if not n1: return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 def canonpath(root, cwd, myname):
298 def canonpath(root, cwd, myname):
299 """return the canonical path of myname, given cwd and root"""
299 """return the canonical path of myname, given cwd and root"""
300 if root == os.sep:
300 if root == os.sep:
301 rootsep = os.sep
301 rootsep = os.sep
302 elif endswithsep(root):
302 elif endswithsep(root):
303 rootsep = root
303 rootsep = root
304 else:
304 else:
305 rootsep = root + os.sep
305 rootsep = root + os.sep
306 name = myname
306 name = myname
307 if not os.path.isabs(name):
307 if not os.path.isabs(name):
308 name = os.path.join(root, cwd, name)
308 name = os.path.join(root, cwd, name)
309 name = os.path.normpath(name)
309 name = os.path.normpath(name)
310 audit_path = path_auditor(root)
310 audit_path = path_auditor(root)
311 if name != rootsep and name.startswith(rootsep):
311 if name != rootsep and name.startswith(rootsep):
312 name = name[len(rootsep):]
312 name = name[len(rootsep):]
313 audit_path(name)
313 audit_path(name)
314 return pconvert(name)
314 return pconvert(name)
315 elif name == root:
315 elif name == root:
316 return ''
316 return ''
317 else:
317 else:
318 # Determine whether `name' is in the hierarchy at or beneath `root',
318 # Determine whether `name' is in the hierarchy at or beneath `root',
319 # by iterating name=dirname(name) until that causes no change (can't
319 # by iterating name=dirname(name) until that causes no change (can't
320 # check name == '/', because that doesn't work on windows). For each
320 # check name == '/', because that doesn't work on windows). For each
321 # `name', compare dev/inode numbers. If they match, the list `rel'
321 # `name', compare dev/inode numbers. If they match, the list `rel'
322 # holds the reversed list of components making up the relative file
322 # holds the reversed list of components making up the relative file
323 # name we want.
323 # name we want.
324 root_st = os.stat(root)
324 root_st = os.stat(root)
325 rel = []
325 rel = []
326 while True:
326 while True:
327 try:
327 try:
328 name_st = os.stat(name)
328 name_st = os.stat(name)
329 except OSError:
329 except OSError:
330 break
330 break
331 if samestat(name_st, root_st):
331 if samestat(name_st, root_st):
332 if not rel:
332 if not rel:
333 # name was actually the same as root (maybe a symlink)
333 # name was actually the same as root (maybe a symlink)
334 return ''
334 return ''
335 rel.reverse()
335 rel.reverse()
336 name = os.path.join(*rel)
336 name = os.path.join(*rel)
337 audit_path(name)
337 audit_path(name)
338 return pconvert(name)
338 return pconvert(name)
339 dirname, basename = os.path.split(name)
339 dirname, basename = os.path.split(name)
340 rel.append(basename)
340 rel.append(basename)
341 if dirname == name:
341 if dirname == name:
342 break
342 break
343 name = dirname
343 name = dirname
344
344
345 raise Abort('%s not under root' % myname)
345 raise Abort('%s not under root' % myname)
346
346
347 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
347 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
348 """build a function to match a set of file patterns
348 """build a function to match a set of file patterns
349
349
350 arguments:
350 arguments:
351 canonroot - the canonical root of the tree you're matching against
351 canonroot - the canonical root of the tree you're matching against
352 cwd - the current working directory, if relevant
352 cwd - the current working directory, if relevant
353 names - patterns to find
353 names - patterns to find
354 inc - patterns to include
354 inc - patterns to include
355 exc - patterns to exclude
355 exc - patterns to exclude
356 dflt_pat - if a pattern in names has no explicit type, assume this one
356 dflt_pat - if a pattern in names has no explicit type, assume this one
357 src - where these patterns came from (e.g. .hgignore)
357 src - where these patterns came from (e.g. .hgignore)
358
358
359 a pattern is one of:
359 a pattern is one of:
360 'glob:<glob>' - a glob relative to cwd
360 'glob:<glob>' - a glob relative to cwd
361 're:<regexp>' - a regular expression
361 're:<regexp>' - a regular expression
362 'path:<path>' - a path relative to canonroot
362 'path:<path>' - a path relative to canonroot
363 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
363 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
364 'relpath:<path>' - a path relative to cwd
364 'relpath:<path>' - a path relative to cwd
365 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
365 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
366 '<something>' - one of the cases above, selected by the dflt_pat argument
366 '<something>' - one of the cases above, selected by the dflt_pat argument
367
367
368 returns:
368 returns:
369 a 3-tuple containing
369 a 3-tuple containing
370 - list of roots (places where one should start a recursive walk of the fs);
370 - list of roots (places where one should start a recursive walk of the fs);
371 this often matches the explicit non-pattern names passed in, but also
371 this often matches the explicit non-pattern names passed in, but also
372 includes the initial part of glob: patterns that has no glob characters
372 includes the initial part of glob: patterns that has no glob characters
373 - a bool match(filename) function
373 - a bool match(filename) function
374 - a bool indicating if any patterns were passed in
374 - a bool indicating if any patterns were passed in
375 """
375 """
376
376
377 # a common case: no patterns at all
377 # a common case: no patterns at all
378 if not names and not inc and not exc:
378 if not names and not inc and not exc:
379 return [], always, False
379 return [], always, False
380
380
381 def contains_glob(name):
381 def contains_glob(name):
382 for c in name:
382 for c in name:
383 if c in _globchars: return True
383 if c in _globchars: return True
384 return False
384 return False
385
385
386 def regex(kind, name, tail):
386 def regex(kind, name, tail):
387 '''convert a pattern into a regular expression'''
387 '''convert a pattern into a regular expression'''
388 if not name:
388 if not name:
389 return ''
389 return ''
390 if kind == 're':
390 if kind == 're':
391 return name
391 return name
392 elif kind == 'path':
392 elif kind == 'path':
393 return '^' + re.escape(name) + '(?:/|$)'
393 return '^' + re.escape(name) + '(?:/|$)'
394 elif kind == 'relglob':
394 elif kind == 'relglob':
395 return globre(name, '(?:|.*/)', tail)
395 return globre(name, '(?:|.*/)', tail)
396 elif kind == 'relpath':
396 elif kind == 'relpath':
397 return re.escape(name) + '(?:/|$)'
397 return re.escape(name) + '(?:/|$)'
398 elif kind == 'relre':
398 elif kind == 'relre':
399 if name.startswith('^'):
399 if name.startswith('^'):
400 return name
400 return name
401 return '.*' + name
401 return '.*' + name
402 return globre(name, '', tail)
402 return globre(name, '', tail)
403
403
404 def matchfn(pats, tail):
404 def matchfn(pats, tail):
405 """build a matching function from a set of patterns"""
405 """build a matching function from a set of patterns"""
406 if not pats:
406 if not pats:
407 return
407 return
408 try:
408 try:
409 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
409 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
410 if len(pat) > 20000:
410 if len(pat) > 20000:
411 raise OverflowError()
411 raise OverflowError()
412 return re.compile(pat).match
412 return re.compile(pat).match
413 except OverflowError:
413 except OverflowError:
414 # We're using a Python with a tiny regex engine and we
414 # We're using a Python with a tiny regex engine and we
415 # made it explode, so we'll divide the pattern list in two
415 # made it explode, so we'll divide the pattern list in two
416 # until it works
416 # until it works
417 l = len(pats)
417 l = len(pats)
418 if l < 2:
418 if l < 2:
419 raise
419 raise
420 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
420 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
421 return lambda s: a(s) or b(s)
421 return lambda s: a(s) or b(s)
422 except re.error:
422 except re.error:
423 for k, p in pats:
423 for k, p in pats:
424 try:
424 try:
425 re.compile('(?:%s)' % regex(k, p, tail))
425 re.compile('(?:%s)' % regex(k, p, tail))
426 except re.error:
426 except re.error:
427 if src:
427 if src:
428 raise Abort("%s: invalid pattern (%s): %s" %
428 raise Abort("%s: invalid pattern (%s): %s" %
429 (src, k, p))
429 (src, k, p))
430 else:
430 else:
431 raise Abort("invalid pattern (%s): %s" % (k, p))
431 raise Abort("invalid pattern (%s): %s" % (k, p))
432 raise Abort("invalid pattern")
432 raise Abort("invalid pattern")
433
433
434 def globprefix(pat):
434 def globprefix(pat):
435 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
435 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
436 root = []
436 root = []
437 for p in pat.split('/'):
437 for p in pat.split('/'):
438 if contains_glob(p): break
438 if contains_glob(p): break
439 root.append(p)
439 root.append(p)
440 return '/'.join(root) or '.'
440 return '/'.join(root) or '.'
441
441
442 def normalizepats(names, default):
442 def normalizepats(names, default):
443 pats = []
443 pats = []
444 roots = []
444 roots = []
445 anypats = False
445 anypats = False
446 for kind, name in [patkind(p, default) for p in names]:
446 for kind, name in [patkind(p, default) for p in names]:
447 if kind in ('glob', 'relpath'):
447 if kind in ('glob', 'relpath'):
448 name = canonpath(canonroot, cwd, name)
448 name = canonpath(canonroot, cwd, name)
449 elif kind in ('relglob', 'path'):
449 elif kind in ('relglob', 'path'):
450 name = normpath(name)
450 name = normpath(name)
451
451
452 pats.append((kind, name))
452 pats.append((kind, name))
453
453
454 if kind in ('glob', 're', 'relglob', 'relre'):
454 if kind in ('glob', 're', 'relglob', 'relre'):
455 anypats = True
455 anypats = True
456
456
457 if kind == 'glob':
457 if kind == 'glob':
458 root = globprefix(name)
458 root = globprefix(name)
459 roots.append(root)
459 roots.append(root)
460 elif kind in ('relpath', 'path'):
460 elif kind in ('relpath', 'path'):
461 roots.append(name or '.')
461 roots.append(name or '.')
462 elif kind == 'relglob':
462 elif kind == 'relglob':
463 roots.append('.')
463 roots.append('.')
464 return roots, pats, anypats
464 return roots, pats, anypats
465
465
466 roots, pats, anypats = normalizepats(names, dflt_pat)
466 roots, pats, anypats = normalizepats(names, dflt_pat)
467
467
468 patmatch = matchfn(pats, '$') or always
468 patmatch = matchfn(pats, '$') or always
469 incmatch = always
469 incmatch = always
470 if inc:
470 if inc:
471 dummy, inckinds, dummy = normalizepats(inc, 'glob')
471 dummy, inckinds, dummy = normalizepats(inc, 'glob')
472 incmatch = matchfn(inckinds, '(?:/|$)')
472 incmatch = matchfn(inckinds, '(?:/|$)')
473 excmatch = never
473 excmatch = never
474 if exc:
474 if exc:
475 dummy, exckinds, dummy = normalizepats(exc, 'glob')
475 dummy, exckinds, dummy = normalizepats(exc, 'glob')
476 excmatch = matchfn(exckinds, '(?:/|$)')
476 excmatch = matchfn(exckinds, '(?:/|$)')
477
477
478 if not names and inc and not exc:
478 if not names and inc and not exc:
479 # common case: hgignore patterns
479 # common case: hgignore patterns
480 match = incmatch
480 match = incmatch
481 else:
481 else:
482 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
482 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
483
483
484 return (roots, match, (inc or exc or anypats) and True)
484 return (roots, match, (inc or exc or anypats) and True)
485
485
486 _hgexecutable = None
486 _hgexecutable = None
487
487
488 def main_is_frozen():
488 def main_is_frozen():
489 """return True if we are a frozen executable.
489 """return True if we are a frozen executable.
490
490
491 The code supports py2exe (most common, Windows only) and tools/freeze
491 The code supports py2exe (most common, Windows only) and tools/freeze
492 (portable, not much used).
492 (portable, not much used).
493 """
493 """
494 return (hasattr(sys, "frozen") or # new py2exe
494 return (hasattr(sys, "frozen") or # new py2exe
495 hasattr(sys, "importers") or # old py2exe
495 hasattr(sys, "importers") or # old py2exe
496 imp.is_frozen("__main__")) # tools/freeze
496 imp.is_frozen("__main__")) # tools/freeze
497
497
498 def hgexecutable():
498 def hgexecutable():
499 """return location of the 'hg' executable.
499 """return location of the 'hg' executable.
500
500
501 Defaults to $HG or 'hg' in the search path.
501 Defaults to $HG or 'hg' in the search path.
502 """
502 """
503 if _hgexecutable is None:
503 if _hgexecutable is None:
504 hg = os.environ.get('HG')
504 hg = os.environ.get('HG')
505 if hg:
505 if hg:
506 set_hgexecutable(hg)
506 set_hgexecutable(hg)
507 elif main_is_frozen():
507 elif main_is_frozen():
508 set_hgexecutable(sys.executable)
508 set_hgexecutable(sys.executable)
509 else:
509 else:
510 set_hgexecutable(find_exe('hg') or 'hg')
510 set_hgexecutable(find_exe('hg') or 'hg')
511 return _hgexecutable
511 return _hgexecutable
512
512
513 def set_hgexecutable(path):
513 def set_hgexecutable(path):
514 """set location of the 'hg' executable"""
514 """set location of the 'hg' executable"""
515 global _hgexecutable
515 global _hgexecutable
516 _hgexecutable = path
516 _hgexecutable = path
517
517
518 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
518 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
519 '''enhanced shell command execution.
519 '''enhanced shell command execution.
520 run with environment maybe modified, maybe in different dir.
520 run with environment maybe modified, maybe in different dir.
521
521
522 if command fails and onerr is None, return status. if ui object,
522 if command fails and onerr is None, return status. if ui object,
523 print error message and return status, else raise onerr object as
523 print error message and return status, else raise onerr object as
524 exception.'''
524 exception.'''
525 def py2shell(val):
525 def py2shell(val):
526 'convert python object into string that is useful to shell'
526 'convert python object into string that is useful to shell'
527 if val in (None, False):
527 if val in (None, False):
528 return '0'
528 return '0'
529 if val == True:
529 if val == True:
530 return '1'
530 return '1'
531 return str(val)
531 return str(val)
532 oldenv = {}
532 oldenv = {}
533 for k in environ:
533 for k in environ:
534 oldenv[k] = os.environ.get(k)
534 oldenv[k] = os.environ.get(k)
535 if cwd is not None:
535 if cwd is not None:
536 oldcwd = os.getcwd()
536 oldcwd = os.getcwd()
537 origcmd = cmd
537 origcmd = cmd
538 if os.name == 'nt':
538 if os.name == 'nt':
539 cmd = '"%s"' % cmd
539 cmd = '"%s"' % cmd
540 try:
540 try:
541 for k, v in environ.iteritems():
541 for k, v in environ.iteritems():
542 os.environ[k] = py2shell(v)
542 os.environ[k] = py2shell(v)
543 os.environ['HG'] = hgexecutable()
543 os.environ['HG'] = hgexecutable()
544 if cwd is not None and oldcwd != cwd:
544 if cwd is not None and oldcwd != cwd:
545 os.chdir(cwd)
545 os.chdir(cwd)
546 rc = os.system(cmd)
546 rc = os.system(cmd)
547 if sys.platform == 'OpenVMS' and rc & 1:
547 if sys.platform == 'OpenVMS' and rc & 1:
548 rc = 0
548 rc = 0
549 if rc and onerr:
549 if rc and onerr:
550 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
550 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
551 explain_exit(rc)[0])
551 explain_exit(rc)[0])
552 if errprefix:
552 if errprefix:
553 errmsg = '%s: %s' % (errprefix, errmsg)
553 errmsg = '%s: %s' % (errprefix, errmsg)
554 try:
554 try:
555 onerr.warn(errmsg + '\n')
555 onerr.warn(errmsg + '\n')
556 except AttributeError:
556 except AttributeError:
557 raise onerr(errmsg)
557 raise onerr(errmsg)
558 return rc
558 return rc
559 finally:
559 finally:
560 for k, v in oldenv.iteritems():
560 for k, v in oldenv.iteritems():
561 if v is None:
561 if v is None:
562 del os.environ[k]
562 del os.environ[k]
563 else:
563 else:
564 os.environ[k] = v
564 os.environ[k] = v
565 if cwd is not None and oldcwd != cwd:
565 if cwd is not None and oldcwd != cwd:
566 os.chdir(oldcwd)
566 os.chdir(oldcwd)
567
567
568 def checksignature(func):
568 def checksignature(func):
569 '''wrap a function with code to check for calling errors'''
569 '''wrap a function with code to check for calling errors'''
570 def check(*args, **kwargs):
570 def check(*args, **kwargs):
571 try:
571 try:
572 return func(*args, **kwargs)
572 return func(*args, **kwargs)
573 except TypeError:
573 except TypeError:
574 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
574 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
575 raise error.SignatureError
575 raise error.SignatureError
576 raise
576 raise
577
577
578 return check
578 return check
579
579
580 # os.path.lexists is not available on python2.3
580 # os.path.lexists is not available on python2.3
581 def lexists(filename):
581 def lexists(filename):
582 "test whether a file with this name exists. does not follow symlinks"
582 "test whether a file with this name exists. does not follow symlinks"
583 try:
583 try:
584 os.lstat(filename)
584 os.lstat(filename)
585 except:
585 except:
586 return False
586 return False
587 return True
587 return True
588
588
589 def rename(src, dst):
589 def rename(src, dst):
590 """forcibly rename a file"""
590 """forcibly rename a file"""
591 try:
591 try:
592 os.rename(src, dst)
592 os.rename(src, dst)
593 except OSError, err: # FIXME: check err (EEXIST ?)
593 except OSError, err: # FIXME: check err (EEXIST ?)
594
594
595 # On windows, rename to existing file is not allowed, so we
595 # On windows, rename to existing file is not allowed, so we
596 # must delete destination first. But if a file is open, unlink
596 # must delete destination first. But if a file is open, unlink
597 # schedules it for delete but does not delete it. Rename
597 # schedules it for delete but does not delete it. Rename
598 # happens immediately even for open files, so we rename
598 # happens immediately even for open files, so we rename
599 # destination to a temporary name, then delete that. Then
599 # destination to a temporary name, then delete that. Then
600 # rename is safe to do.
600 # rename is safe to do.
601 # The temporary name is chosen at random to avoid the situation
601 # The temporary name is chosen at random to avoid the situation
602 # where a file is left lying around from a previous aborted run.
602 # where a file is left lying around from a previous aborted run.
603 # The usual race condition this introduces can't be avoided as
603 # The usual race condition this introduces can't be avoided as
604 # we need the name to rename into, and not the file itself. Due
604 # we need the name to rename into, and not the file itself. Due
605 # to the nature of the operation however, any races will at worst
605 # to the nature of the operation however, any races will at worst
606 # lead to the rename failing and the current operation aborting.
606 # lead to the rename failing and the current operation aborting.
607
607
608 def tempname(prefix):
608 def tempname(prefix):
609 for tries in xrange(10):
609 for tries in xrange(10):
610 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
610 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
611 if not os.path.exists(temp):
611 if not os.path.exists(temp):
612 return temp
612 return temp
613 raise IOError, (errno.EEXIST, "No usable temporary filename found")
613 raise IOError, (errno.EEXIST, "No usable temporary filename found")
614
614
615 temp = tempname(dst)
615 temp = tempname(dst)
616 os.rename(dst, temp)
616 os.rename(dst, temp)
617 os.unlink(temp)
617 os.unlink(temp)
618 os.rename(src, dst)
618 os.rename(src, dst)
619
619
620 def unlink(f):
620 def unlink(f):
621 """unlink and remove the directory if it is empty"""
621 """unlink and remove the directory if it is empty"""
622 os.unlink(f)
622 os.unlink(f)
623 # try removing directories that might now be empty
623 # try removing directories that might now be empty
624 try:
624 try:
625 os.removedirs(os.path.dirname(f))
625 os.removedirs(os.path.dirname(f))
626 except OSError:
626 except OSError:
627 pass
627 pass
628
628
629 def copyfile(src, dest):
629 def copyfile(src, dest):
630 "copy a file, preserving mode and atime/mtime"
630 "copy a file, preserving mode and atime/mtime"
631 if os.path.islink(src):
631 if os.path.islink(src):
632 try:
632 try:
633 os.unlink(dest)
633 os.unlink(dest)
634 except:
634 except:
635 pass
635 pass
636 os.symlink(os.readlink(src), dest)
636 os.symlink(os.readlink(src), dest)
637 else:
637 else:
638 try:
638 try:
639 shutil.copyfile(src, dest)
639 shutil.copyfile(src, dest)
640 shutil.copystat(src, dest)
640 shutil.copystat(src, dest)
641 except shutil.Error, inst:
641 except shutil.Error, inst:
642 raise Abort(str(inst))
642 raise Abort(str(inst))
643
643
644 def copyfiles(src, dst, hardlink=None):
644 def copyfiles(src, dst, hardlink=None):
645 """Copy a directory tree using hardlinks if possible"""
645 """Copy a directory tree using hardlinks if possible"""
646
646
647 if hardlink is None:
647 if hardlink is None:
648 hardlink = (os.stat(src).st_dev ==
648 hardlink = (os.stat(src).st_dev ==
649 os.stat(os.path.dirname(dst)).st_dev)
649 os.stat(os.path.dirname(dst)).st_dev)
650
650
651 if os.path.isdir(src):
651 if os.path.isdir(src):
652 os.mkdir(dst)
652 os.mkdir(dst)
653 for name, kind in osutil.listdir(src):
653 for name, kind in osutil.listdir(src):
654 srcname = os.path.join(src, name)
654 srcname = os.path.join(src, name)
655 dstname = os.path.join(dst, name)
655 dstname = os.path.join(dst, name)
656 copyfiles(srcname, dstname, hardlink)
656 copyfiles(srcname, dstname, hardlink)
657 else:
657 else:
658 if hardlink:
658 if hardlink:
659 try:
659 try:
660 os_link(src, dst)
660 os_link(src, dst)
661 except (IOError, OSError):
661 except (IOError, OSError):
662 hardlink = False
662 hardlink = False
663 shutil.copy(src, dst)
663 shutil.copy(src, dst)
664 else:
664 else:
665 shutil.copy(src, dst)
665 shutil.copy(src, dst)
666
666
667 class path_auditor(object):
667 class path_auditor(object):
668 '''ensure that a filesystem path contains no banned components.
668 '''ensure that a filesystem path contains no banned components.
669 the following properties of a path are checked:
669 the following properties of a path are checked:
670
670
671 - under top-level .hg
671 - under top-level .hg
672 - starts at the root of a windows drive
672 - starts at the root of a windows drive
673 - contains ".."
673 - contains ".."
674 - traverses a symlink (e.g. a/symlink_here/b)
674 - traverses a symlink (e.g. a/symlink_here/b)
675 - inside a nested repository'''
675 - inside a nested repository'''
676
676
677 def __init__(self, root):
677 def __init__(self, root):
678 self.audited = set()
678 self.audited = set()
679 self.auditeddir = set()
679 self.auditeddir = set()
680 self.root = root
680 self.root = root
681
681
682 def __call__(self, path):
682 def __call__(self, path):
683 if path in self.audited:
683 if path in self.audited:
684 return
684 return
685 normpath = os.path.normcase(path)
685 normpath = os.path.normcase(path)
686 parts = splitpath(normpath)
686 parts = splitpath(normpath)
687 if (os.path.splitdrive(path)[0]
687 if (os.path.splitdrive(path)[0]
688 or parts[0].lower() in ('.hg', '.hg.', '')
688 or parts[0].lower() in ('.hg', '.hg.', '')
689 or os.pardir in parts):
689 or os.pardir in parts):
690 raise Abort(_("path contains illegal component: %s") % path)
690 raise Abort(_("path contains illegal component: %s") % path)
691 if '.hg' in path.lower():
691 if '.hg' in path.lower():
692 lparts = [p.lower() for p in parts]
692 lparts = [p.lower() for p in parts]
693 for p in '.hg', '.hg.':
693 for p in '.hg', '.hg.':
694 if p in lparts[1:]:
694 if p in lparts[1:]:
695 pos = lparts.index(p)
695 pos = lparts.index(p)
696 base = os.path.join(*parts[:pos])
696 base = os.path.join(*parts[:pos])
697 raise Abort(_('path %r is inside repo %r') % (path, base))
697 raise Abort(_('path %r is inside repo %r') % (path, base))
698 def check(prefix):
698 def check(prefix):
699 curpath = os.path.join(self.root, prefix)
699 curpath = os.path.join(self.root, prefix)
700 try:
700 try:
701 st = os.lstat(curpath)
701 st = os.lstat(curpath)
702 except OSError, err:
702 except OSError, err:
703 # EINVAL can be raised as invalid path syntax under win32.
703 # EINVAL can be raised as invalid path syntax under win32.
704 # They must be ignored for patterns can be checked too.
704 # They must be ignored for patterns can be checked too.
705 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
705 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
706 raise
706 raise
707 else:
707 else:
708 if stat.S_ISLNK(st.st_mode):
708 if stat.S_ISLNK(st.st_mode):
709 raise Abort(_('path %r traverses symbolic link %r') %
709 raise Abort(_('path %r traverses symbolic link %r') %
710 (path, prefix))
710 (path, prefix))
711 elif (stat.S_ISDIR(st.st_mode) and
711 elif (stat.S_ISDIR(st.st_mode) and
712 os.path.isdir(os.path.join(curpath, '.hg'))):
712 os.path.isdir(os.path.join(curpath, '.hg'))):
713 raise Abort(_('path %r is inside repo %r') %
713 raise Abort(_('path %r is inside repo %r') %
714 (path, prefix))
714 (path, prefix))
715 parts.pop()
715 parts.pop()
716 prefixes = []
716 prefixes = []
717 for n in range(len(parts)):
717 for n in range(len(parts)):
718 prefix = os.sep.join(parts)
718 prefix = os.sep.join(parts)
719 if prefix in self.auditeddir:
719 if prefix in self.auditeddir:
720 break
720 break
721 check(prefix)
721 check(prefix)
722 prefixes.append(prefix)
722 prefixes.append(prefix)
723 parts.pop()
723 parts.pop()
724
724
725 self.audited.add(path)
725 self.audited.add(path)
726 # only add prefixes to the cache after checking everything: we don't
726 # only add prefixes to the cache after checking everything: we don't
727 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
727 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
728 self.auditeddir.update(prefixes)
728 self.auditeddir.update(prefixes)
729
729
730 def nlinks(pathname):
730 def nlinks(pathname):
731 """Return number of hardlinks for the given file."""
731 """Return number of hardlinks for the given file."""
732 return os.lstat(pathname).st_nlink
732 return os.lstat(pathname).st_nlink
733
733
734 if hasattr(os, 'link'):
734 if hasattr(os, 'link'):
735 os_link = os.link
735 os_link = os.link
736 else:
736 else:
737 def os_link(src, dst):
737 def os_link(src, dst):
738 raise OSError(0, _("Hardlinks not supported"))
738 raise OSError(0, _("Hardlinks not supported"))
739
739
740 def lookup_reg(key, name=None, scope=None):
740 def lookup_reg(key, name=None, scope=None):
741 return None
741 return None
742
742
743 if os.name == 'nt':
743 if os.name == 'nt':
744 from windows import *
744 from windows import *
745 def expand_glob(pats):
745 def expand_glob(pats):
746 '''On Windows, expand the implicit globs in a list of patterns'''
746 '''On Windows, expand the implicit globs in a list of patterns'''
747 ret = []
747 ret = []
748 for p in pats:
748 for p in pats:
749 kind, name = patkind(p, None)
749 kind, name = patkind(p, None)
750 if kind is None:
750 if kind is None:
751 globbed = glob.glob(name)
751 globbed = glob.glob(name)
752 if globbed:
752 if globbed:
753 ret.extend(globbed)
753 ret.extend(globbed)
754 continue
754 continue
755 # if we couldn't expand the glob, just keep it around
755 # if we couldn't expand the glob, just keep it around
756 ret.append(p)
756 ret.append(p)
757 return ret
757 return ret
758 else:
758 else:
759 from posix import *
759 from posix import *
760
760
761 def makelock(info, pathname):
761 def makelock(info, pathname):
762 try:
762 try:
763 return os.symlink(info, pathname)
763 return os.symlink(info, pathname)
764 except OSError, why:
764 except OSError, why:
765 if why.errno == errno.EEXIST:
765 if why.errno == errno.EEXIST:
766 raise
766 raise
767 except AttributeError: # no symlink in os
767 except AttributeError: # no symlink in os
768 pass
768 pass
769
769
770 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
770 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
771 os.write(ld, info)
771 os.write(ld, info)
772 os.close(ld)
772 os.close(ld)
773
773
774 def readlock(pathname):
774 def readlock(pathname):
775 try:
775 try:
776 return os.readlink(pathname)
776 return os.readlink(pathname)
777 except OSError, why:
777 except OSError, why:
778 if why.errno not in (errno.EINVAL, errno.ENOSYS):
778 if why.errno not in (errno.EINVAL, errno.ENOSYS):
779 raise
779 raise
780 except AttributeError: # no symlink in os
780 except AttributeError: # no symlink in os
781 pass
781 pass
782 return posixfile(pathname).read()
782 return posixfile(pathname).read()
783
783
784 def fstat(fp):
784 def fstat(fp):
785 '''stat file object that may not have fileno method.'''
785 '''stat file object that may not have fileno method.'''
786 try:
786 try:
787 return os.fstat(fp.fileno())
787 return os.fstat(fp.fileno())
788 except AttributeError:
788 except AttributeError:
789 return os.stat(fp.name)
789 return os.stat(fp.name)
790
790
791 # File system features
791 # File system features
792
792
793 def checkcase(path):
793 def checkcase(path):
794 """
794 """
795 Check whether the given path is on a case-sensitive filesystem
795 Check whether the given path is on a case-sensitive filesystem
796
796
797 Requires a path (like /foo/.hg) ending with a foldable final
797 Requires a path (like /foo/.hg) ending with a foldable final
798 directory component.
798 directory component.
799 """
799 """
800 s1 = os.stat(path)
800 s1 = os.stat(path)
801 d, b = os.path.split(path)
801 d, b = os.path.split(path)
802 p2 = os.path.join(d, b.upper())
802 p2 = os.path.join(d, b.upper())
803 if path == p2:
803 if path == p2:
804 p2 = os.path.join(d, b.lower())
804 p2 = os.path.join(d, b.lower())
805 try:
805 try:
806 s2 = os.stat(p2)
806 s2 = os.stat(p2)
807 if s2 == s1:
807 if s2 == s1:
808 return False
808 return False
809 return True
809 return True
810 except:
810 except:
811 return True
811 return True
812
812
813 _fspathcache = {}
813 _fspathcache = {}
814 def fspath(name, root):
814 def fspath(name, root):
815 '''Get name in the case stored in the filesystem
815 '''Get name in the case stored in the filesystem
816
816
817 The name is either relative to root, or it is an absolute path starting
817 The name is either relative to root, or it is an absolute path starting
818 with root. Note that this function is unnecessary, and should not be
818 with root. Note that this function is unnecessary, and should not be
819 called, for case-sensitive filesystems (simply because it's expensive).
819 called, for case-sensitive filesystems (simply because it's expensive).
820 '''
820 '''
821 # If name is absolute, make it relative
821 # If name is absolute, make it relative
822 if name.lower().startswith(root.lower()):
822 if name.lower().startswith(root.lower()):
823 l = len(root)
823 l = len(root)
824 if name[l] == os.sep or name[l] == os.altsep:
824 if name[l] == os.sep or name[l] == os.altsep:
825 l = l + 1
825 l = l + 1
826 name = name[l:]
826 name = name[l:]
827
827
828 if not os.path.exists(os.path.join(root, name)):
828 if not os.path.exists(os.path.join(root, name)):
829 return None
829 return None
830
830
831 seps = os.sep
831 seps = os.sep
832 if os.altsep:
832 if os.altsep:
833 seps = seps + os.altsep
833 seps = seps + os.altsep
834 # Protect backslashes. This gets silly very quickly.
834 # Protect backslashes. This gets silly very quickly.
835 seps.replace('\\','\\\\')
835 seps.replace('\\','\\\\')
836 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
836 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
837 dir = os.path.normcase(os.path.normpath(root))
837 dir = os.path.normcase(os.path.normpath(root))
838 result = []
838 result = []
839 for part, sep in pattern.findall(name):
839 for part, sep in pattern.findall(name):
840 if sep:
840 if sep:
841 result.append(sep)
841 result.append(sep)
842 continue
842 continue
843
843
844 if dir not in _fspathcache:
844 if dir not in _fspathcache:
845 _fspathcache[dir] = os.listdir(dir)
845 _fspathcache[dir] = os.listdir(dir)
846 contents = _fspathcache[dir]
846 contents = _fspathcache[dir]
847
847
848 lpart = part.lower()
848 lpart = part.lower()
849 for n in contents:
849 for n in contents:
850 if n.lower() == lpart:
850 if n.lower() == lpart:
851 result.append(n)
851 result.append(n)
852 break
852 break
853 else:
853 else:
854 # Cannot happen, as the file exists!
854 # Cannot happen, as the file exists!
855 result.append(part)
855 result.append(part)
856 dir = os.path.join(dir, lpart)
856 dir = os.path.join(dir, lpart)
857
857
858 return ''.join(result)
858 return ''.join(result)
859
859
860 def checkexec(path):
860 def checkexec(path):
861 """
861 """
862 Check whether the given path is on a filesystem with UNIX-like exec flags
862 Check whether the given path is on a filesystem with UNIX-like exec flags
863
863
864 Requires a directory (like /foo/.hg)
864 Requires a directory (like /foo/.hg)
865 """
865 """
866
866
867 # VFAT on some Linux versions can flip mode but it doesn't persist
867 # VFAT on some Linux versions can flip mode but it doesn't persist
868 # a FS remount. Frequently we can detect it if files are created
868 # a FS remount. Frequently we can detect it if files are created
869 # with exec bit on.
869 # with exec bit on.
870
870
871 try:
871 try:
872 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
872 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
873 fh, fn = tempfile.mkstemp("", "", path)
873 fh, fn = tempfile.mkstemp("", "", path)
874 try:
874 try:
875 os.close(fh)
875 os.close(fh)
876 m = os.stat(fn).st_mode & 0777
876 m = os.stat(fn).st_mode & 0777
877 new_file_has_exec = m & EXECFLAGS
877 new_file_has_exec = m & EXECFLAGS
878 os.chmod(fn, m ^ EXECFLAGS)
878 os.chmod(fn, m ^ EXECFLAGS)
879 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
879 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
880 finally:
880 finally:
881 os.unlink(fn)
881 os.unlink(fn)
882 except (IOError, OSError):
882 except (IOError, OSError):
883 # we don't care, the user probably won't be able to commit anyway
883 # we don't care, the user probably won't be able to commit anyway
884 return False
884 return False
885 return not (new_file_has_exec or exec_flags_cannot_flip)
885 return not (new_file_has_exec or exec_flags_cannot_flip)
886
886
887 def checklink(path):
887 def checklink(path):
888 """check whether the given path is on a symlink-capable filesystem"""
888 """check whether the given path is on a symlink-capable filesystem"""
889 # mktemp is not racy because symlink creation will fail if the
889 # mktemp is not racy because symlink creation will fail if the
890 # file already exists
890 # file already exists
891 name = tempfile.mktemp(dir=path)
891 name = tempfile.mktemp(dir=path)
892 try:
892 try:
893 os.symlink(".", name)
893 os.symlink(".", name)
894 os.unlink(name)
894 os.unlink(name)
895 return True
895 return True
896 except (OSError, AttributeError):
896 except (OSError, AttributeError):
897 return False
897 return False
898
898
899 def needbinarypatch():
899 def needbinarypatch():
900 """return True if patches should be applied in binary mode by default."""
900 """return True if patches should be applied in binary mode by default."""
901 return os.name == 'nt'
901 return os.name == 'nt'
902
902
903 def endswithsep(path):
903 def endswithsep(path):
904 '''Check path ends with os.sep or os.altsep.'''
904 '''Check path ends with os.sep or os.altsep.'''
905 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
905 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
906
906
907 def splitpath(path):
907 def splitpath(path):
908 '''Split path by os.sep.
908 '''Split path by os.sep.
909 Note that this function does not use os.altsep because this is
909 Note that this function does not use os.altsep because this is
910 an alternative of simple "xxx.split(os.sep)".
910 an alternative of simple "xxx.split(os.sep)".
911 It is recommended to use os.path.normpath() before using this
911 It is recommended to use os.path.normpath() before using this
912 function if need.'''
912 function if need.'''
913 return path.split(os.sep)
913 return path.split(os.sep)
914
914
915 def gui():
915 def gui():
916 '''Are we running in a GUI?'''
916 '''Are we running in a GUI?'''
917 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
917 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
918
918
919 def mktempcopy(name, emptyok=False, createmode=None):
919 def mktempcopy(name, emptyok=False, createmode=None):
920 """Create a temporary file with the same contents from name
920 """Create a temporary file with the same contents from name
921
921
922 The permission bits are copied from the original file.
922 The permission bits are copied from the original file.
923
923
924 If the temporary file is going to be truncated immediately, you
924 If the temporary file is going to be truncated immediately, you
925 can use emptyok=True as an optimization.
925 can use emptyok=True as an optimization.
926
926
927 Returns the name of the temporary file.
927 Returns the name of the temporary file.
928 """
928 """
929 d, fn = os.path.split(name)
929 d, fn = os.path.split(name)
930 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
930 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
931 os.close(fd)
931 os.close(fd)
932 # Temporary files are created with mode 0600, which is usually not
932 # Temporary files are created with mode 0600, which is usually not
933 # what we want. If the original file already exists, just copy
933 # what we want. If the original file already exists, just copy
934 # its mode. Otherwise, manually obey umask.
934 # its mode. Otherwise, manually obey umask.
935 try:
935 try:
936 st_mode = os.lstat(name).st_mode & 0777
936 st_mode = os.lstat(name).st_mode & 0777
937 except OSError, inst:
937 except OSError, inst:
938 if inst.errno != errno.ENOENT:
938 if inst.errno != errno.ENOENT:
939 raise
939 raise
940 st_mode = createmode
940 st_mode = createmode
941 if st_mode is None:
941 if st_mode is None:
942 st_mode = ~umask
942 st_mode = ~umask
943 st_mode &= 0666
943 st_mode &= 0666
944 os.chmod(temp, st_mode)
944 os.chmod(temp, st_mode)
945 if emptyok:
945 if emptyok:
946 return temp
946 return temp
947 try:
947 try:
948 try:
948 try:
949 ifp = posixfile(name, "rb")
949 ifp = posixfile(name, "rb")
950 except IOError, inst:
950 except IOError, inst:
951 if inst.errno == errno.ENOENT:
951 if inst.errno == errno.ENOENT:
952 return temp
952 return temp
953 if not getattr(inst, 'filename', None):
953 if not getattr(inst, 'filename', None):
954 inst.filename = name
954 inst.filename = name
955 raise
955 raise
956 ofp = posixfile(temp, "wb")
956 ofp = posixfile(temp, "wb")
957 for chunk in filechunkiter(ifp):
957 for chunk in filechunkiter(ifp):
958 ofp.write(chunk)
958 ofp.write(chunk)
959 ifp.close()
959 ifp.close()
960 ofp.close()
960 ofp.close()
961 except:
961 except:
962 try: os.unlink(temp)
962 try: os.unlink(temp)
963 except: pass
963 except: pass
964 raise
964 raise
965 return temp
965 return temp
966
966
967 class atomictempfile(posixfile):
967 class atomictempfile:
968 """file-like object that atomically updates a file
968 """file-like object that atomically updates a file
969
969
970 All writes will be redirected to a temporary copy of the original
970 All writes will be redirected to a temporary copy of the original
971 file. When rename is called, the copy is renamed to the original
971 file. When rename is called, the copy is renamed to the original
972 name, making the changes visible.
972 name, making the changes visible.
973 """
973 """
974 def __init__(self, name, mode, createmode):
974 def __init__(self, name, mode, createmode):
975 self.__name = name
975 self.__name = name
976 self.temp = mktempcopy(name, emptyok=('w' in mode),
976 self.temp = mktempcopy(name, emptyok=('w' in mode),
977 createmode=createmode)
977 createmode=createmode)
978 posixfile.__init__(self, self.temp, mode)
978 self._fp = posixfile(self.temp, mode)
979
980 def __getattr__(self, name):
981 return getattr(self._fp, name)
979
982
980 def rename(self):
983 def rename(self):
981 if not self.closed:
984 if not self.closed:
982 posixfile.close(self)
985 self._fp.close()
983 rename(self.temp, localpath(self.__name))
986 rename(self.temp, localpath(self.__name))
984
987
985 def __del__(self):
988 def __del__(self):
986 if not self.closed:
989 if not self.closed:
987 try:
990 try:
988 os.unlink(self.temp)
991 os.unlink(self.temp)
989 except: pass
992 except: pass
990 posixfile.close(self)
993 self._fp.close()
991
994
992 def makedirs(name, mode=None):
995 def makedirs(name, mode=None):
993 """recursive directory creation with parent mode inheritance"""
996 """recursive directory creation with parent mode inheritance"""
994 try:
997 try:
995 os.mkdir(name)
998 os.mkdir(name)
996 if mode is not None:
999 if mode is not None:
997 os.chmod(name, mode)
1000 os.chmod(name, mode)
998 return
1001 return
999 except OSError, err:
1002 except OSError, err:
1000 if err.errno == errno.EEXIST:
1003 if err.errno == errno.EEXIST:
1001 return
1004 return
1002 if err.errno != errno.ENOENT:
1005 if err.errno != errno.ENOENT:
1003 raise
1006 raise
1004 parent = os.path.abspath(os.path.dirname(name))
1007 parent = os.path.abspath(os.path.dirname(name))
1005 makedirs(parent, mode)
1008 makedirs(parent, mode)
1006 makedirs(name, mode)
1009 makedirs(name, mode)
1007
1010
1008 class opener(object):
1011 class opener(object):
1009 """Open files relative to a base directory
1012 """Open files relative to a base directory
1010
1013
1011 This class is used to hide the details of COW semantics and
1014 This class is used to hide the details of COW semantics and
1012 remote file access from higher level code.
1015 remote file access from higher level code.
1013 """
1016 """
1014 def __init__(self, base, audit=True):
1017 def __init__(self, base, audit=True):
1015 self.base = base
1018 self.base = base
1016 if audit:
1019 if audit:
1017 self.audit_path = path_auditor(base)
1020 self.audit_path = path_auditor(base)
1018 else:
1021 else:
1019 self.audit_path = always
1022 self.audit_path = always
1020 self.createmode = None
1023 self.createmode = None
1021
1024
1022 def __getattr__(self, name):
1025 def __getattr__(self, name):
1023 if name == '_can_symlink':
1026 if name == '_can_symlink':
1024 self._can_symlink = checklink(self.base)
1027 self._can_symlink = checklink(self.base)
1025 return self._can_symlink
1028 return self._can_symlink
1026 raise AttributeError(name)
1029 raise AttributeError(name)
1027
1030
1028 def _fixfilemode(self, name):
1031 def _fixfilemode(self, name):
1029 if self.createmode is None:
1032 if self.createmode is None:
1030 return
1033 return
1031 os.chmod(name, self.createmode & 0666)
1034 os.chmod(name, self.createmode & 0666)
1032
1035
1033 def __call__(self, path, mode="r", text=False, atomictemp=False):
1036 def __call__(self, path, mode="r", text=False, atomictemp=False):
1034 self.audit_path(path)
1037 self.audit_path(path)
1035 f = os.path.join(self.base, path)
1038 f = os.path.join(self.base, path)
1036
1039
1037 if not text and "b" not in mode:
1040 if not text and "b" not in mode:
1038 mode += "b" # for that other OS
1041 mode += "b" # for that other OS
1039
1042
1040 nlink = -1
1043 nlink = -1
1041 if mode not in ("r", "rb"):
1044 if mode not in ("r", "rb"):
1042 try:
1045 try:
1043 nlink = nlinks(f)
1046 nlink = nlinks(f)
1044 except OSError:
1047 except OSError:
1045 nlink = 0
1048 nlink = 0
1046 d = os.path.dirname(f)
1049 d = os.path.dirname(f)
1047 if not os.path.isdir(d):
1050 if not os.path.isdir(d):
1048 makedirs(d, self.createmode)
1051 makedirs(d, self.createmode)
1049 if atomictemp:
1052 if atomictemp:
1050 return atomictempfile(f, mode, self.createmode)
1053 return atomictempfile(f, mode, self.createmode)
1051 if nlink > 1:
1054 if nlink > 1:
1052 rename(mktempcopy(f), f)
1055 rename(mktempcopy(f), f)
1053 fp = posixfile(f, mode)
1056 fp = posixfile(f, mode)
1054 if nlink == 0:
1057 if nlink == 0:
1055 self._fixfilemode(f)
1058 self._fixfilemode(f)
1056 return fp
1059 return fp
1057
1060
1058 def symlink(self, src, dst):
1061 def symlink(self, src, dst):
1059 self.audit_path(dst)
1062 self.audit_path(dst)
1060 linkname = os.path.join(self.base, dst)
1063 linkname = os.path.join(self.base, dst)
1061 try:
1064 try:
1062 os.unlink(linkname)
1065 os.unlink(linkname)
1063 except OSError:
1066 except OSError:
1064 pass
1067 pass
1065
1068
1066 dirname = os.path.dirname(linkname)
1069 dirname = os.path.dirname(linkname)
1067 if not os.path.exists(dirname):
1070 if not os.path.exists(dirname):
1068 makedirs(dirname, self.createmode)
1071 makedirs(dirname, self.createmode)
1069
1072
1070 if self._can_symlink:
1073 if self._can_symlink:
1071 try:
1074 try:
1072 os.symlink(src, linkname)
1075 os.symlink(src, linkname)
1073 except OSError, err:
1076 except OSError, err:
1074 raise OSError(err.errno, _('could not symlink to %r: %s') %
1077 raise OSError(err.errno, _('could not symlink to %r: %s') %
1075 (src, err.strerror), linkname)
1078 (src, err.strerror), linkname)
1076 else:
1079 else:
1077 f = self(dst, "w")
1080 f = self(dst, "w")
1078 f.write(src)
1081 f.write(src)
1079 f.close()
1082 f.close()
1080 self._fixfilemode(dst)
1083 self._fixfilemode(dst)
1081
1084
1082 class chunkbuffer(object):
1085 class chunkbuffer(object):
1083 """Allow arbitrary sized chunks of data to be efficiently read from an
1086 """Allow arbitrary sized chunks of data to be efficiently read from an
1084 iterator over chunks of arbitrary size."""
1087 iterator over chunks of arbitrary size."""
1085
1088
1086 def __init__(self, in_iter):
1089 def __init__(self, in_iter):
1087 """in_iter is the iterator that's iterating over the input chunks.
1090 """in_iter is the iterator that's iterating over the input chunks.
1088 targetsize is how big a buffer to try to maintain."""
1091 targetsize is how big a buffer to try to maintain."""
1089 self.iter = iter(in_iter)
1092 self.iter = iter(in_iter)
1090 self.buf = ''
1093 self.buf = ''
1091 self.targetsize = 2**16
1094 self.targetsize = 2**16
1092
1095
1093 def read(self, l):
1096 def read(self, l):
1094 """Read L bytes of data from the iterator of chunks of data.
1097 """Read L bytes of data from the iterator of chunks of data.
1095 Returns less than L bytes if the iterator runs dry."""
1098 Returns less than L bytes if the iterator runs dry."""
1096 if l > len(self.buf) and self.iter:
1099 if l > len(self.buf) and self.iter:
1097 # Clamp to a multiple of self.targetsize
1100 # Clamp to a multiple of self.targetsize
1098 targetsize = max(l, self.targetsize)
1101 targetsize = max(l, self.targetsize)
1099 collector = cStringIO.StringIO()
1102 collector = cStringIO.StringIO()
1100 collector.write(self.buf)
1103 collector.write(self.buf)
1101 collected = len(self.buf)
1104 collected = len(self.buf)
1102 for chunk in self.iter:
1105 for chunk in self.iter:
1103 collector.write(chunk)
1106 collector.write(chunk)
1104 collected += len(chunk)
1107 collected += len(chunk)
1105 if collected >= targetsize:
1108 if collected >= targetsize:
1106 break
1109 break
1107 if collected < targetsize:
1110 if collected < targetsize:
1108 self.iter = False
1111 self.iter = False
1109 self.buf = collector.getvalue()
1112 self.buf = collector.getvalue()
1110 if len(self.buf) == l:
1113 if len(self.buf) == l:
1111 s, self.buf = str(self.buf), ''
1114 s, self.buf = str(self.buf), ''
1112 else:
1115 else:
1113 s, self.buf = self.buf[:l], buffer(self.buf, l)
1116 s, self.buf = self.buf[:l], buffer(self.buf, l)
1114 return s
1117 return s
1115
1118
1116 def filechunkiter(f, size=65536, limit=None):
1119 def filechunkiter(f, size=65536, limit=None):
1117 """Create a generator that produces the data in the file size
1120 """Create a generator that produces the data in the file size
1118 (default 65536) bytes at a time, up to optional limit (default is
1121 (default 65536) bytes at a time, up to optional limit (default is
1119 to read all data). Chunks may be less than size bytes if the
1122 to read all data). Chunks may be less than size bytes if the
1120 chunk is the last chunk in the file, or the file is a socket or
1123 chunk is the last chunk in the file, or the file is a socket or
1121 some other type of file that sometimes reads less data than is
1124 some other type of file that sometimes reads less data than is
1122 requested."""
1125 requested."""
1123 assert size >= 0
1126 assert size >= 0
1124 assert limit is None or limit >= 0
1127 assert limit is None or limit >= 0
1125 while True:
1128 while True:
1126 if limit is None: nbytes = size
1129 if limit is None: nbytes = size
1127 else: nbytes = min(limit, size)
1130 else: nbytes = min(limit, size)
1128 s = nbytes and f.read(nbytes)
1131 s = nbytes and f.read(nbytes)
1129 if not s: break
1132 if not s: break
1130 if limit: limit -= len(s)
1133 if limit: limit -= len(s)
1131 yield s
1134 yield s
1132
1135
1133 def makedate():
1136 def makedate():
1134 lt = time.localtime()
1137 lt = time.localtime()
1135 if lt[8] == 1 and time.daylight:
1138 if lt[8] == 1 and time.daylight:
1136 tz = time.altzone
1139 tz = time.altzone
1137 else:
1140 else:
1138 tz = time.timezone
1141 tz = time.timezone
1139 return time.mktime(lt), tz
1142 return time.mktime(lt), tz
1140
1143
1141 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1144 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1142 """represent a (unixtime, offset) tuple as a localized time.
1145 """represent a (unixtime, offset) tuple as a localized time.
1143 unixtime is seconds since the epoch, and offset is the time zone's
1146 unixtime is seconds since the epoch, and offset is the time zone's
1144 number of seconds away from UTC. if timezone is false, do not
1147 number of seconds away from UTC. if timezone is false, do not
1145 append time zone to string."""
1148 append time zone to string."""
1146 t, tz = date or makedate()
1149 t, tz = date or makedate()
1147 if "%1" in format or "%2" in format:
1150 if "%1" in format or "%2" in format:
1148 sign = (tz > 0) and "-" or "+"
1151 sign = (tz > 0) and "-" or "+"
1149 minutes = abs(tz) / 60
1152 minutes = abs(tz) / 60
1150 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1153 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1151 format = format.replace("%2", "%02d" % (minutes % 60))
1154 format = format.replace("%2", "%02d" % (minutes % 60))
1152 s = time.strftime(format, time.gmtime(float(t) - tz))
1155 s = time.strftime(format, time.gmtime(float(t) - tz))
1153 return s
1156 return s
1154
1157
1155 def shortdate(date=None):
1158 def shortdate(date=None):
1156 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1159 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1157 return datestr(date, format='%Y-%m-%d')
1160 return datestr(date, format='%Y-%m-%d')
1158
1161
1159 def strdate(string, format, defaults=[]):
1162 def strdate(string, format, defaults=[]):
1160 """parse a localized time string and return a (unixtime, offset) tuple.
1163 """parse a localized time string and return a (unixtime, offset) tuple.
1161 if the string cannot be parsed, ValueError is raised."""
1164 if the string cannot be parsed, ValueError is raised."""
1162 def timezone(string):
1165 def timezone(string):
1163 tz = string.split()[-1]
1166 tz = string.split()[-1]
1164 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1167 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1165 sign = (tz[0] == "+") and 1 or -1
1168 sign = (tz[0] == "+") and 1 or -1
1166 hours = int(tz[1:3])
1169 hours = int(tz[1:3])
1167 minutes = int(tz[3:5])
1170 minutes = int(tz[3:5])
1168 return -sign * (hours * 60 + minutes) * 60
1171 return -sign * (hours * 60 + minutes) * 60
1169 if tz == "GMT" or tz == "UTC":
1172 if tz == "GMT" or tz == "UTC":
1170 return 0
1173 return 0
1171 return None
1174 return None
1172
1175
1173 # NOTE: unixtime = localunixtime + offset
1176 # NOTE: unixtime = localunixtime + offset
1174 offset, date = timezone(string), string
1177 offset, date = timezone(string), string
1175 if offset != None:
1178 if offset != None:
1176 date = " ".join(string.split()[:-1])
1179 date = " ".join(string.split()[:-1])
1177
1180
1178 # add missing elements from defaults
1181 # add missing elements from defaults
1179 for part in defaults:
1182 for part in defaults:
1180 found = [True for p in part if ("%"+p) in format]
1183 found = [True for p in part if ("%"+p) in format]
1181 if not found:
1184 if not found:
1182 date += "@" + defaults[part]
1185 date += "@" + defaults[part]
1183 format += "@%" + part[0]
1186 format += "@%" + part[0]
1184
1187
1185 timetuple = time.strptime(date, format)
1188 timetuple = time.strptime(date, format)
1186 localunixtime = int(calendar.timegm(timetuple))
1189 localunixtime = int(calendar.timegm(timetuple))
1187 if offset is None:
1190 if offset is None:
1188 # local timezone
1191 # local timezone
1189 unixtime = int(time.mktime(timetuple))
1192 unixtime = int(time.mktime(timetuple))
1190 offset = unixtime - localunixtime
1193 offset = unixtime - localunixtime
1191 else:
1194 else:
1192 unixtime = localunixtime + offset
1195 unixtime = localunixtime + offset
1193 return unixtime, offset
1196 return unixtime, offset
1194
1197
1195 def parsedate(date, formats=None, defaults=None):
1198 def parsedate(date, formats=None, defaults=None):
1196 """parse a localized date/time string and return a (unixtime, offset) tuple.
1199 """parse a localized date/time string and return a (unixtime, offset) tuple.
1197
1200
1198 The date may be a "unixtime offset" string or in one of the specified
1201 The date may be a "unixtime offset" string or in one of the specified
1199 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1202 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1200 """
1203 """
1201 if not date:
1204 if not date:
1202 return 0, 0
1205 return 0, 0
1203 if isinstance(date, tuple) and len(date) == 2:
1206 if isinstance(date, tuple) and len(date) == 2:
1204 return date
1207 return date
1205 if not formats:
1208 if not formats:
1206 formats = defaultdateformats
1209 formats = defaultdateformats
1207 date = date.strip()
1210 date = date.strip()
1208 try:
1211 try:
1209 when, offset = map(int, date.split(' '))
1212 when, offset = map(int, date.split(' '))
1210 except ValueError:
1213 except ValueError:
1211 # fill out defaults
1214 # fill out defaults
1212 if not defaults:
1215 if not defaults:
1213 defaults = {}
1216 defaults = {}
1214 now = makedate()
1217 now = makedate()
1215 for part in "d mb yY HI M S".split():
1218 for part in "d mb yY HI M S".split():
1216 if part not in defaults:
1219 if part not in defaults:
1217 if part[0] in "HMS":
1220 if part[0] in "HMS":
1218 defaults[part] = "00"
1221 defaults[part] = "00"
1219 else:
1222 else:
1220 defaults[part] = datestr(now, "%" + part[0])
1223 defaults[part] = datestr(now, "%" + part[0])
1221
1224
1222 for format in formats:
1225 for format in formats:
1223 try:
1226 try:
1224 when, offset = strdate(date, format, defaults)
1227 when, offset = strdate(date, format, defaults)
1225 except (ValueError, OverflowError):
1228 except (ValueError, OverflowError):
1226 pass
1229 pass
1227 else:
1230 else:
1228 break
1231 break
1229 else:
1232 else:
1230 raise Abort(_('invalid date: %r ') % date)
1233 raise Abort(_('invalid date: %r ') % date)
1231 # validate explicit (probably user-specified) date and
1234 # validate explicit (probably user-specified) date and
1232 # time zone offset. values must fit in signed 32 bits for
1235 # time zone offset. values must fit in signed 32 bits for
1233 # current 32-bit linux runtimes. timezones go from UTC-12
1236 # current 32-bit linux runtimes. timezones go from UTC-12
1234 # to UTC+14
1237 # to UTC+14
1235 if abs(when) > 0x7fffffff:
1238 if abs(when) > 0x7fffffff:
1236 raise Abort(_('date exceeds 32 bits: %d') % when)
1239 raise Abort(_('date exceeds 32 bits: %d') % when)
1237 if offset < -50400 or offset > 43200:
1240 if offset < -50400 or offset > 43200:
1238 raise Abort(_('impossible time zone offset: %d') % offset)
1241 raise Abort(_('impossible time zone offset: %d') % offset)
1239 return when, offset
1242 return when, offset
1240
1243
1241 def matchdate(date):
1244 def matchdate(date):
1242 """Return a function that matches a given date match specifier
1245 """Return a function that matches a given date match specifier
1243
1246
1244 Formats include:
1247 Formats include:
1245
1248
1246 '{date}' match a given date to the accuracy provided
1249 '{date}' match a given date to the accuracy provided
1247
1250
1248 '<{date}' on or before a given date
1251 '<{date}' on or before a given date
1249
1252
1250 '>{date}' on or after a given date
1253 '>{date}' on or after a given date
1251
1254
1252 """
1255 """
1253
1256
1254 def lower(date):
1257 def lower(date):
1255 d = dict(mb="1", d="1")
1258 d = dict(mb="1", d="1")
1256 return parsedate(date, extendeddateformats, d)[0]
1259 return parsedate(date, extendeddateformats, d)[0]
1257
1260
1258 def upper(date):
1261 def upper(date):
1259 d = dict(mb="12", HI="23", M="59", S="59")
1262 d = dict(mb="12", HI="23", M="59", S="59")
1260 for days in "31 30 29".split():
1263 for days in "31 30 29".split():
1261 try:
1264 try:
1262 d["d"] = days
1265 d["d"] = days
1263 return parsedate(date, extendeddateformats, d)[0]
1266 return parsedate(date, extendeddateformats, d)[0]
1264 except:
1267 except:
1265 pass
1268 pass
1266 d["d"] = "28"
1269 d["d"] = "28"
1267 return parsedate(date, extendeddateformats, d)[0]
1270 return parsedate(date, extendeddateformats, d)[0]
1268
1271
1269 date = date.strip()
1272 date = date.strip()
1270 if date[0] == "<":
1273 if date[0] == "<":
1271 when = upper(date[1:])
1274 when = upper(date[1:])
1272 return lambda x: x <= when
1275 return lambda x: x <= when
1273 elif date[0] == ">":
1276 elif date[0] == ">":
1274 when = lower(date[1:])
1277 when = lower(date[1:])
1275 return lambda x: x >= when
1278 return lambda x: x >= when
1276 elif date[0] == "-":
1279 elif date[0] == "-":
1277 try:
1280 try:
1278 days = int(date[1:])
1281 days = int(date[1:])
1279 except ValueError:
1282 except ValueError:
1280 raise Abort(_("invalid day spec: %s") % date[1:])
1283 raise Abort(_("invalid day spec: %s") % date[1:])
1281 when = makedate()[0] - days * 3600 * 24
1284 when = makedate()[0] - days * 3600 * 24
1282 return lambda x: x >= when
1285 return lambda x: x >= when
1283 elif " to " in date:
1286 elif " to " in date:
1284 a, b = date.split(" to ")
1287 a, b = date.split(" to ")
1285 start, stop = lower(a), upper(b)
1288 start, stop = lower(a), upper(b)
1286 return lambda x: x >= start and x <= stop
1289 return lambda x: x >= start and x <= stop
1287 else:
1290 else:
1288 start, stop = lower(date), upper(date)
1291 start, stop = lower(date), upper(date)
1289 return lambda x: x >= start and x <= stop
1292 return lambda x: x >= start and x <= stop
1290
1293
1291 def shortuser(user):
1294 def shortuser(user):
1292 """Return a short representation of a user name or email address."""
1295 """Return a short representation of a user name or email address."""
1293 f = user.find('@')
1296 f = user.find('@')
1294 if f >= 0:
1297 if f >= 0:
1295 user = user[:f]
1298 user = user[:f]
1296 f = user.find('<')
1299 f = user.find('<')
1297 if f >= 0:
1300 if f >= 0:
1298 user = user[f+1:]
1301 user = user[f+1:]
1299 f = user.find(' ')
1302 f = user.find(' ')
1300 if f >= 0:
1303 if f >= 0:
1301 user = user[:f]
1304 user = user[:f]
1302 f = user.find('.')
1305 f = user.find('.')
1303 if f >= 0:
1306 if f >= 0:
1304 user = user[:f]
1307 user = user[:f]
1305 return user
1308 return user
1306
1309
1307 def email(author):
1310 def email(author):
1308 '''get email of author.'''
1311 '''get email of author.'''
1309 r = author.find('>')
1312 r = author.find('>')
1310 if r == -1: r = None
1313 if r == -1: r = None
1311 return author[author.find('<')+1:r]
1314 return author[author.find('<')+1:r]
1312
1315
1313 def ellipsis(text, maxlength=400):
1316 def ellipsis(text, maxlength=400):
1314 """Trim string to at most maxlength (default: 400) characters."""
1317 """Trim string to at most maxlength (default: 400) characters."""
1315 if len(text) <= maxlength:
1318 if len(text) <= maxlength:
1316 return text
1319 return text
1317 else:
1320 else:
1318 return "%s..." % (text[:maxlength-3])
1321 return "%s..." % (text[:maxlength-3])
1319
1322
1320 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1323 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1321 '''yield every hg repository under path, recursively.'''
1324 '''yield every hg repository under path, recursively.'''
1322 def errhandler(err):
1325 def errhandler(err):
1323 if err.filename == path:
1326 if err.filename == path:
1324 raise err
1327 raise err
1325 if followsym and hasattr(os.path, 'samestat'):
1328 if followsym and hasattr(os.path, 'samestat'):
1326 def _add_dir_if_not_there(dirlst, dirname):
1329 def _add_dir_if_not_there(dirlst, dirname):
1327 match = False
1330 match = False
1328 samestat = os.path.samestat
1331 samestat = os.path.samestat
1329 dirstat = os.stat(dirname)
1332 dirstat = os.stat(dirname)
1330 for lstdirstat in dirlst:
1333 for lstdirstat in dirlst:
1331 if samestat(dirstat, lstdirstat):
1334 if samestat(dirstat, lstdirstat):
1332 match = True
1335 match = True
1333 break
1336 break
1334 if not match:
1337 if not match:
1335 dirlst.append(dirstat)
1338 dirlst.append(dirstat)
1336 return not match
1339 return not match
1337 else:
1340 else:
1338 followsym = False
1341 followsym = False
1339
1342
1340 if (seen_dirs is None) and followsym:
1343 if (seen_dirs is None) and followsym:
1341 seen_dirs = []
1344 seen_dirs = []
1342 _add_dir_if_not_there(seen_dirs, path)
1345 _add_dir_if_not_there(seen_dirs, path)
1343 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1346 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1344 if '.hg' in dirs:
1347 if '.hg' in dirs:
1345 yield root # found a repository
1348 yield root # found a repository
1346 qroot = os.path.join(root, '.hg', 'patches')
1349 qroot = os.path.join(root, '.hg', 'patches')
1347 if os.path.isdir(os.path.join(qroot, '.hg')):
1350 if os.path.isdir(os.path.join(qroot, '.hg')):
1348 yield qroot # we have a patch queue repo here
1351 yield qroot # we have a patch queue repo here
1349 if recurse:
1352 if recurse:
1350 # avoid recursing inside the .hg directory
1353 # avoid recursing inside the .hg directory
1351 dirs.remove('.hg')
1354 dirs.remove('.hg')
1352 else:
1355 else:
1353 dirs[:] = [] # don't descend further
1356 dirs[:] = [] # don't descend further
1354 elif followsym:
1357 elif followsym:
1355 newdirs = []
1358 newdirs = []
1356 for d in dirs:
1359 for d in dirs:
1357 fname = os.path.join(root, d)
1360 fname = os.path.join(root, d)
1358 if _add_dir_if_not_there(seen_dirs, fname):
1361 if _add_dir_if_not_there(seen_dirs, fname):
1359 if os.path.islink(fname):
1362 if os.path.islink(fname):
1360 for hgname in walkrepos(fname, True, seen_dirs):
1363 for hgname in walkrepos(fname, True, seen_dirs):
1361 yield hgname
1364 yield hgname
1362 else:
1365 else:
1363 newdirs.append(d)
1366 newdirs.append(d)
1364 dirs[:] = newdirs
1367 dirs[:] = newdirs
1365
1368
1366 _rcpath = None
1369 _rcpath = None
1367
1370
1368 def os_rcpath():
1371 def os_rcpath():
1369 '''return default os-specific hgrc search path'''
1372 '''return default os-specific hgrc search path'''
1370 path = system_rcpath()
1373 path = system_rcpath()
1371 path.extend(user_rcpath())
1374 path.extend(user_rcpath())
1372 path = [os.path.normpath(f) for f in path]
1375 path = [os.path.normpath(f) for f in path]
1373 return path
1376 return path
1374
1377
1375 def rcpath():
1378 def rcpath():
1376 '''return hgrc search path. if env var HGRCPATH is set, use it.
1379 '''return hgrc search path. if env var HGRCPATH is set, use it.
1377 for each item in path, if directory, use files ending in .rc,
1380 for each item in path, if directory, use files ending in .rc,
1378 else use item.
1381 else use item.
1379 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1382 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1380 if no HGRCPATH, use default os-specific path.'''
1383 if no HGRCPATH, use default os-specific path.'''
1381 global _rcpath
1384 global _rcpath
1382 if _rcpath is None:
1385 if _rcpath is None:
1383 if 'HGRCPATH' in os.environ:
1386 if 'HGRCPATH' in os.environ:
1384 _rcpath = []
1387 _rcpath = []
1385 for p in os.environ['HGRCPATH'].split(os.pathsep):
1388 for p in os.environ['HGRCPATH'].split(os.pathsep):
1386 if not p: continue
1389 if not p: continue
1387 if os.path.isdir(p):
1390 if os.path.isdir(p):
1388 for f, kind in osutil.listdir(p):
1391 for f, kind in osutil.listdir(p):
1389 if f.endswith('.rc'):
1392 if f.endswith('.rc'):
1390 _rcpath.append(os.path.join(p, f))
1393 _rcpath.append(os.path.join(p, f))
1391 else:
1394 else:
1392 _rcpath.append(p)
1395 _rcpath.append(p)
1393 else:
1396 else:
1394 _rcpath = os_rcpath()
1397 _rcpath = os_rcpath()
1395 return _rcpath
1398 return _rcpath
1396
1399
1397 def bytecount(nbytes):
1400 def bytecount(nbytes):
1398 '''return byte count formatted as readable string, with units'''
1401 '''return byte count formatted as readable string, with units'''
1399
1402
1400 units = (
1403 units = (
1401 (100, 1<<30, _('%.0f GB')),
1404 (100, 1<<30, _('%.0f GB')),
1402 (10, 1<<30, _('%.1f GB')),
1405 (10, 1<<30, _('%.1f GB')),
1403 (1, 1<<30, _('%.2f GB')),
1406 (1, 1<<30, _('%.2f GB')),
1404 (100, 1<<20, _('%.0f MB')),
1407 (100, 1<<20, _('%.0f MB')),
1405 (10, 1<<20, _('%.1f MB')),
1408 (10, 1<<20, _('%.1f MB')),
1406 (1, 1<<20, _('%.2f MB')),
1409 (1, 1<<20, _('%.2f MB')),
1407 (100, 1<<10, _('%.0f KB')),
1410 (100, 1<<10, _('%.0f KB')),
1408 (10, 1<<10, _('%.1f KB')),
1411 (10, 1<<10, _('%.1f KB')),
1409 (1, 1<<10, _('%.2f KB')),
1412 (1, 1<<10, _('%.2f KB')),
1410 (1, 1, _('%.0f bytes')),
1413 (1, 1, _('%.0f bytes')),
1411 )
1414 )
1412
1415
1413 for multiplier, divisor, format in units:
1416 for multiplier, divisor, format in units:
1414 if nbytes >= divisor * multiplier:
1417 if nbytes >= divisor * multiplier:
1415 return format % (nbytes / float(divisor))
1418 return format % (nbytes / float(divisor))
1416 return units[-1][2] % nbytes
1419 return units[-1][2] % nbytes
1417
1420
1418 def drop_scheme(scheme, path):
1421 def drop_scheme(scheme, path):
1419 sc = scheme + ':'
1422 sc = scheme + ':'
1420 if path.startswith(sc):
1423 if path.startswith(sc):
1421 path = path[len(sc):]
1424 path = path[len(sc):]
1422 if path.startswith('//'):
1425 if path.startswith('//'):
1423 path = path[2:]
1426 path = path[2:]
1424 return path
1427 return path
1425
1428
1426 def uirepr(s):
1429 def uirepr(s):
1427 # Avoid double backslash in Windows path repr()
1430 # Avoid double backslash in Windows path repr()
1428 return repr(s).replace('\\\\', '\\')
1431 return repr(s).replace('\\\\', '\\')
1429
1432
1430 def termwidth():
1433 def termwidth():
1431 if 'COLUMNS' in os.environ:
1434 if 'COLUMNS' in os.environ:
1432 try:
1435 try:
1433 return int(os.environ['COLUMNS'])
1436 return int(os.environ['COLUMNS'])
1434 except ValueError:
1437 except ValueError:
1435 pass
1438 pass
1436 try:
1439 try:
1437 import termios, array, fcntl
1440 import termios, array, fcntl
1438 for dev in (sys.stdout, sys.stdin):
1441 for dev in (sys.stdout, sys.stdin):
1439 try:
1442 try:
1440 fd = dev.fileno()
1443 fd = dev.fileno()
1441 if not os.isatty(fd):
1444 if not os.isatty(fd):
1442 continue
1445 continue
1443 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1446 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1444 return array.array('h', arri)[1]
1447 return array.array('h', arri)[1]
1445 except ValueError:
1448 except ValueError:
1446 pass
1449 pass
1447 except ImportError:
1450 except ImportError:
1448 pass
1451 pass
1449 return 80
1452 return 80
1450
1453
1451 def iterlines(iterator):
1454 def iterlines(iterator):
1452 for chunk in iterator:
1455 for chunk in iterator:
1453 for line in chunk.splitlines():
1456 for line in chunk.splitlines():
1454 yield line
1457 yield line
General Comments 0
You need to be logged in to leave comments. Login now