##// END OF EJS Templates
Merge with -stable
Matt Mackall -
r9552:f0417b6f merge default
parent child Browse files
Show More
@@ -1,1247 +1,1252 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap
19 import os, stat, time, calendar, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 # Setting bufsize to -1 lets the system decide the buffer size.
42 # Setting bufsize to -1 lets the system decide the buffer size.
43 # The default for bufsize is 0, meaning unbuffered. This leads to
43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 close_fds=closefds,
46 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 return p.stdin, p.stdout
48 return p.stdin, p.stdout
49 def popen3(cmd):
49 def popen3(cmd):
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 close_fds=closefds,
51 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stderr=subprocess.PIPE)
53 stderr=subprocess.PIPE)
54 return p.stdin, p.stdout, p.stderr
54 return p.stdin, p.stdout, p.stderr
55
55
56 def version():
56 def version():
57 """Return version information if available."""
57 """Return version information if available."""
58 try:
58 try:
59 import __version__
59 import __version__
60 return __version__.version
60 return __version__.version
61 except ImportError:
61 except ImportError:
62 return 'unknown'
62 return 'unknown'
63
63
64 # used by parsedate
64 # used by parsedate
65 defaultdateformats = (
65 defaultdateformats = (
66 '%Y-%m-%d %H:%M:%S',
66 '%Y-%m-%d %H:%M:%S',
67 '%Y-%m-%d %I:%M:%S%p',
67 '%Y-%m-%d %I:%M:%S%p',
68 '%Y-%m-%d %H:%M',
68 '%Y-%m-%d %H:%M',
69 '%Y-%m-%d %I:%M%p',
69 '%Y-%m-%d %I:%M%p',
70 '%Y-%m-%d',
70 '%Y-%m-%d',
71 '%m-%d',
71 '%m-%d',
72 '%m/%d',
72 '%m/%d',
73 '%m/%d/%y',
73 '%m/%d/%y',
74 '%m/%d/%Y',
74 '%m/%d/%Y',
75 '%a %b %d %H:%M:%S %Y',
75 '%a %b %d %H:%M:%S %Y',
76 '%a %b %d %I:%M:%S%p %Y',
76 '%a %b %d %I:%M:%S%p %Y',
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 '%b %d %H:%M:%S %Y',
78 '%b %d %H:%M:%S %Y',
79 '%b %d %I:%M:%S%p %Y',
79 '%b %d %I:%M:%S%p %Y',
80 '%b %d %H:%M:%S',
80 '%b %d %H:%M:%S',
81 '%b %d %I:%M:%S%p',
81 '%b %d %I:%M:%S%p',
82 '%b %d %H:%M',
82 '%b %d %H:%M',
83 '%b %d %I:%M%p',
83 '%b %d %I:%M%p',
84 '%b %d %Y',
84 '%b %d %Y',
85 '%b %d',
85 '%b %d',
86 '%H:%M:%S',
86 '%H:%M:%S',
87 '%I:%M:%S%p',
87 '%I:%M:%S%p',
88 '%H:%M',
88 '%H:%M',
89 '%I:%M%p',
89 '%I:%M%p',
90 )
90 )
91
91
92 extendeddateformats = defaultdateformats + (
92 extendeddateformats = defaultdateformats + (
93 "%Y",
93 "%Y",
94 "%Y-%m",
94 "%Y-%m",
95 "%b",
95 "%b",
96 "%b %Y",
96 "%b %Y",
97 )
97 )
98
98
99 def cachefunc(func):
99 def cachefunc(func):
100 '''cache the result of function calls'''
100 '''cache the result of function calls'''
101 # XXX doesn't handle keywords args
101 # XXX doesn't handle keywords args
102 cache = {}
102 cache = {}
103 if func.func_code.co_argcount == 1:
103 if func.func_code.co_argcount == 1:
104 # we gain a small amount of time because
104 # we gain a small amount of time because
105 # we don't need to pack/unpack the list
105 # we don't need to pack/unpack the list
106 def f(arg):
106 def f(arg):
107 if arg not in cache:
107 if arg not in cache:
108 cache[arg] = func(arg)
108 cache[arg] = func(arg)
109 return cache[arg]
109 return cache[arg]
110 else:
110 else:
111 def f(*args):
111 def f(*args):
112 if args not in cache:
112 if args not in cache:
113 cache[args] = func(*args)
113 cache[args] = func(*args)
114 return cache[args]
114 return cache[args]
115
115
116 return f
116 return f
117
117
118 def lrucachefunc(func):
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
119 '''cache most recent results of function calls'''
120 cache = {}
120 cache = {}
121 order = []
121 order = []
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 def f(arg):
123 def f(arg):
124 if arg not in cache:
124 if arg not in cache:
125 if len(cache) > 20:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 else:
128 else:
129 order.remove(arg)
129 order.remove(arg)
130 order.append(arg)
130 order.append(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 if len(cache) > 20:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
137 cache[args] = func(*args)
138 else:
138 else:
139 order.remove(args)
139 order.remove(args)
140 order.append(args)
140 order.append(args)
141 return cache[args]
141 return cache[args]
142
142
143 return f
143 return f
144
144
145 class propertycache(object):
145 class propertycache(object):
146 def __init__(self, func):
146 def __init__(self, func):
147 self.func = func
147 self.func = func
148 self.name = func.__name__
148 self.name = func.__name__
149 def __get__(self, obj, type=None):
149 def __get__(self, obj, type=None):
150 result = self.func(obj)
150 result = self.func(obj)
151 setattr(obj, self.name, result)
151 setattr(obj, self.name, result)
152 return result
152 return result
153
153
154 def pipefilter(s, cmd):
154 def pipefilter(s, cmd):
155 '''filter string S through command CMD, returning its output'''
155 '''filter string S through command CMD, returning its output'''
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
158 pout, perr = p.communicate(s)
158 pout, perr = p.communicate(s)
159 return pout
159 return pout
160
160
161 def tempfilter(s, cmd):
161 def tempfilter(s, cmd):
162 '''filter string S through a pair of temporary files with CMD.
162 '''filter string S through a pair of temporary files with CMD.
163 CMD is used as a template to create the real command to be run,
163 CMD is used as a template to create the real command to be run,
164 with the strings INFILE and OUTFILE replaced by the real names of
164 with the strings INFILE and OUTFILE replaced by the real names of
165 the temporary files generated.'''
165 the temporary files generated.'''
166 inname, outname = None, None
166 inname, outname = None, None
167 try:
167 try:
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
169 fp = os.fdopen(infd, 'wb')
169 fp = os.fdopen(infd, 'wb')
170 fp.write(s)
170 fp.write(s)
171 fp.close()
171 fp.close()
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
173 os.close(outfd)
173 os.close(outfd)
174 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('INFILE', inname)
175 cmd = cmd.replace('OUTFILE', outname)
175 cmd = cmd.replace('OUTFILE', outname)
176 code = os.system(cmd)
176 code = os.system(cmd)
177 if sys.platform == 'OpenVMS' and code & 1:
177 if sys.platform == 'OpenVMS' and code & 1:
178 code = 0
178 code = 0
179 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
180 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
181 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
182 finally:
182 finally:
183 try:
183 try:
184 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
185 except: pass
185 except: pass
186 try:
186 try:
187 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
188 except: pass
188 except: pass
189
189
190 filtertable = {
190 filtertable = {
191 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
192 'pipe:': pipefilter,
192 'pipe:': pipefilter,
193 }
193 }
194
194
195 def filter(s, cmd):
195 def filter(s, cmd):
196 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
197 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
198 if cmd.startswith(name):
198 if cmd.startswith(name):
199 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
200 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
201
201
202 def binary(s):
202 def binary(s):
203 """return true if a string is binary data"""
203 """return true if a string is binary data"""
204 return bool(s and '\0' in s)
204 return bool(s and '\0' in s)
205
205
206 def increasingchunks(source, min=1024, max=65536):
206 def increasingchunks(source, min=1024, max=65536):
207 '''return no less than min bytes per chunk while data remains,
207 '''return no less than min bytes per chunk while data remains,
208 doubling min after each chunk until it reaches max'''
208 doubling min after each chunk until it reaches max'''
209 def log2(x):
209 def log2(x):
210 if not x:
210 if not x:
211 return 0
211 return 0
212 i = 0
212 i = 0
213 while x:
213 while x:
214 x >>= 1
214 x >>= 1
215 i += 1
215 i += 1
216 return i - 1
216 return i - 1
217
217
218 buf = []
218 buf = []
219 blen = 0
219 blen = 0
220 for chunk in source:
220 for chunk in source:
221 buf.append(chunk)
221 buf.append(chunk)
222 blen += len(chunk)
222 blen += len(chunk)
223 if blen >= min:
223 if blen >= min:
224 if min < max:
224 if min < max:
225 min = min << 1
225 min = min << 1
226 nmin = 1 << log2(blen)
226 nmin = 1 << log2(blen)
227 if nmin > min:
227 if nmin > min:
228 min = nmin
228 min = nmin
229 if min > max:
229 if min > max:
230 min = max
230 min = max
231 yield ''.join(buf)
231 yield ''.join(buf)
232 blen = 0
232 blen = 0
233 buf = []
233 buf = []
234 if buf:
234 if buf:
235 yield ''.join(buf)
235 yield ''.join(buf)
236
236
237 Abort = error.Abort
237 Abort = error.Abort
238
238
239 def always(fn): return True
239 def always(fn): return True
240 def never(fn): return False
240 def never(fn): return False
241
241
242 def pathto(root, n1, n2):
242 def pathto(root, n1, n2):
243 '''return the relative path from one place to another.
243 '''return the relative path from one place to another.
244 root should use os.sep to separate directories
244 root should use os.sep to separate directories
245 n1 should use os.sep to separate directories
245 n1 should use os.sep to separate directories
246 n2 should use "/" to separate directories
246 n2 should use "/" to separate directories
247 returns an os.sep-separated path.
247 returns an os.sep-separated path.
248
248
249 If n1 is a relative path, it's assumed it's
249 If n1 is a relative path, it's assumed it's
250 relative to root.
250 relative to root.
251 n2 should always be relative to root.
251 n2 should always be relative to root.
252 '''
252 '''
253 if not n1: return localpath(n2)
253 if not n1: return localpath(n2)
254 if os.path.isabs(n1):
254 if os.path.isabs(n1):
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
256 return os.path.join(root, localpath(n2))
256 return os.path.join(root, localpath(n2))
257 n2 = '/'.join((pconvert(root), n2))
257 n2 = '/'.join((pconvert(root), n2))
258 a, b = splitpath(n1), n2.split('/')
258 a, b = splitpath(n1), n2.split('/')
259 a.reverse()
259 a.reverse()
260 b.reverse()
260 b.reverse()
261 while a and b and a[-1] == b[-1]:
261 while a and b and a[-1] == b[-1]:
262 a.pop()
262 a.pop()
263 b.pop()
263 b.pop()
264 b.reverse()
264 b.reverse()
265 return os.sep.join((['..'] * len(a)) + b) or '.'
265 return os.sep.join((['..'] * len(a)) + b) or '.'
266
266
267 def canonpath(root, cwd, myname):
267 def canonpath(root, cwd, myname):
268 """return the canonical path of myname, given cwd and root"""
268 """return the canonical path of myname, given cwd and root"""
269 if endswithsep(root):
269 if endswithsep(root):
270 rootsep = root
270 rootsep = root
271 else:
271 else:
272 rootsep = root + os.sep
272 rootsep = root + os.sep
273 name = myname
273 name = myname
274 if not os.path.isabs(name):
274 if not os.path.isabs(name):
275 name = os.path.join(root, cwd, name)
275 name = os.path.join(root, cwd, name)
276 name = os.path.normpath(name)
276 name = os.path.normpath(name)
277 audit_path = path_auditor(root)
277 audit_path = path_auditor(root)
278 if name != rootsep and name.startswith(rootsep):
278 if name != rootsep and name.startswith(rootsep):
279 name = name[len(rootsep):]
279 name = name[len(rootsep):]
280 audit_path(name)
280 audit_path(name)
281 return pconvert(name)
281 return pconvert(name)
282 elif name == root:
282 elif name == root:
283 return ''
283 return ''
284 else:
284 else:
285 # Determine whether `name' is in the hierarchy at or beneath `root',
285 # Determine whether `name' is in the hierarchy at or beneath `root',
286 # by iterating name=dirname(name) until that causes no change (can't
286 # by iterating name=dirname(name) until that causes no change (can't
287 # check name == '/', because that doesn't work on windows). For each
287 # check name == '/', because that doesn't work on windows). For each
288 # `name', compare dev/inode numbers. If they match, the list `rel'
288 # `name', compare dev/inode numbers. If they match, the list `rel'
289 # holds the reversed list of components making up the relative file
289 # holds the reversed list of components making up the relative file
290 # name we want.
290 # name we want.
291 root_st = os.stat(root)
291 root_st = os.stat(root)
292 rel = []
292 rel = []
293 while True:
293 while True:
294 try:
294 try:
295 name_st = os.stat(name)
295 name_st = os.stat(name)
296 except OSError:
296 except OSError:
297 break
297 break
298 if samestat(name_st, root_st):
298 if samestat(name_st, root_st):
299 if not rel:
299 if not rel:
300 # name was actually the same as root (maybe a symlink)
300 # name was actually the same as root (maybe a symlink)
301 return ''
301 return ''
302 rel.reverse()
302 rel.reverse()
303 name = os.path.join(*rel)
303 name = os.path.join(*rel)
304 audit_path(name)
304 audit_path(name)
305 return pconvert(name)
305 return pconvert(name)
306 dirname, basename = os.path.split(name)
306 dirname, basename = os.path.split(name)
307 rel.append(basename)
307 rel.append(basename)
308 if dirname == name:
308 if dirname == name:
309 break
309 break
310 name = dirname
310 name = dirname
311
311
312 raise Abort('%s not under root' % myname)
312 raise Abort('%s not under root' % myname)
313
313
314 _hgexecutable = None
314 _hgexecutable = None
315
315
316 def main_is_frozen():
316 def main_is_frozen():
317 """return True if we are a frozen executable.
317 """return True if we are a frozen executable.
318
318
319 The code supports py2exe (most common, Windows only) and tools/freeze
319 The code supports py2exe (most common, Windows only) and tools/freeze
320 (portable, not much used).
320 (portable, not much used).
321 """
321 """
322 return (hasattr(sys, "frozen") or # new py2exe
322 return (hasattr(sys, "frozen") or # new py2exe
323 hasattr(sys, "importers") or # old py2exe
323 hasattr(sys, "importers") or # old py2exe
324 imp.is_frozen("__main__")) # tools/freeze
324 imp.is_frozen("__main__")) # tools/freeze
325
325
326 def hgexecutable():
326 def hgexecutable():
327 """return location of the 'hg' executable.
327 """return location of the 'hg' executable.
328
328
329 Defaults to $HG or 'hg' in the search path.
329 Defaults to $HG or 'hg' in the search path.
330 """
330 """
331 if _hgexecutable is None:
331 if _hgexecutable is None:
332 hg = os.environ.get('HG')
332 hg = os.environ.get('HG')
333 if hg:
333 if hg:
334 set_hgexecutable(hg)
334 set_hgexecutable(hg)
335 elif main_is_frozen():
335 elif main_is_frozen():
336 set_hgexecutable(sys.executable)
336 set_hgexecutable(sys.executable)
337 else:
337 else:
338 set_hgexecutable(find_exe('hg') or 'hg')
338 set_hgexecutable(find_exe('hg') or 'hg')
339 return _hgexecutable
339 return _hgexecutable
340
340
341 def set_hgexecutable(path):
341 def set_hgexecutable(path):
342 """set location of the 'hg' executable"""
342 """set location of the 'hg' executable"""
343 global _hgexecutable
343 global _hgexecutable
344 _hgexecutable = path
344 _hgexecutable = path
345
345
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
347 '''enhanced shell command execution.
347 '''enhanced shell command execution.
348 run with environment maybe modified, maybe in different dir.
348 run with environment maybe modified, maybe in different dir.
349
349
350 if command fails and onerr is None, return status. if ui object,
350 if command fails and onerr is None, return status. if ui object,
351 print error message and return status, else raise onerr object as
351 print error message and return status, else raise onerr object as
352 exception.'''
352 exception.'''
353 def py2shell(val):
353 def py2shell(val):
354 'convert python object into string that is useful to shell'
354 'convert python object into string that is useful to shell'
355 if val is None or val is False:
355 if val is None or val is False:
356 return '0'
356 return '0'
357 if val is True:
357 if val is True:
358 return '1'
358 return '1'
359 return str(val)
359 return str(val)
360 origcmd = cmd
360 origcmd = cmd
361 if os.name == 'nt':
361 if os.name == 'nt':
362 cmd = '"%s"' % cmd
362 cmd = '"%s"' % cmd
363 env = dict(os.environ)
363 env = dict(os.environ)
364 env.update((k, py2shell(v)) for k, v in environ.iteritems())
364 env.update((k, py2shell(v)) for k, v in environ.iteritems())
365 env['HG'] = hgexecutable()
365 env['HG'] = hgexecutable()
366 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
366 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
367 env=env, cwd=cwd)
367 env=env, cwd=cwd)
368 if sys.platform == 'OpenVMS' and rc & 1:
368 if sys.platform == 'OpenVMS' and rc & 1:
369 rc = 0
369 rc = 0
370 if rc and onerr:
370 if rc and onerr:
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
372 explain_exit(rc)[0])
372 explain_exit(rc)[0])
373 if errprefix:
373 if errprefix:
374 errmsg = '%s: %s' % (errprefix, errmsg)
374 errmsg = '%s: %s' % (errprefix, errmsg)
375 try:
375 try:
376 onerr.warn(errmsg + '\n')
376 onerr.warn(errmsg + '\n')
377 except AttributeError:
377 except AttributeError:
378 raise onerr(errmsg)
378 raise onerr(errmsg)
379 return rc
379 return rc
380
380
381 def checksignature(func):
381 def checksignature(func):
382 '''wrap a function with code to check for calling errors'''
382 '''wrap a function with code to check for calling errors'''
383 def check(*args, **kwargs):
383 def check(*args, **kwargs):
384 try:
384 try:
385 return func(*args, **kwargs)
385 return func(*args, **kwargs)
386 except TypeError:
386 except TypeError:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
388 raise error.SignatureError
388 raise error.SignatureError
389 raise
389 raise
390
390
391 return check
391 return check
392
392
393 # os.path.lexists is not available on python2.3
393 # os.path.lexists is not available on python2.3
394 def lexists(filename):
394 def lexists(filename):
395 "test whether a file with this name exists. does not follow symlinks"
395 "test whether a file with this name exists. does not follow symlinks"
396 try:
396 try:
397 os.lstat(filename)
397 os.lstat(filename)
398 except:
398 except:
399 return False
399 return False
400 return True
400 return True
401
401
402 def unlink(f):
402 def unlink(f):
403 """unlink and remove the directory if it is empty"""
403 """unlink and remove the directory if it is empty"""
404 os.unlink(f)
404 os.unlink(f)
405 # try removing directories that might now be empty
405 # try removing directories that might now be empty
406 try:
406 try:
407 os.removedirs(os.path.dirname(f))
407 os.removedirs(os.path.dirname(f))
408 except OSError:
408 except OSError:
409 pass
409 pass
410
410
411 def copyfile(src, dest):
411 def copyfile(src, dest):
412 "copy a file, preserving mode and atime/mtime"
412 "copy a file, preserving mode and atime/mtime"
413 if os.path.islink(src):
413 if os.path.islink(src):
414 try:
414 try:
415 os.unlink(dest)
415 os.unlink(dest)
416 except:
416 except:
417 pass
417 pass
418 os.symlink(os.readlink(src), dest)
418 os.symlink(os.readlink(src), dest)
419 else:
419 else:
420 try:
420 try:
421 shutil.copyfile(src, dest)
421 shutil.copyfile(src, dest)
422 shutil.copystat(src, dest)
422 shutil.copystat(src, dest)
423 except shutil.Error, inst:
423 except shutil.Error, inst:
424 raise Abort(str(inst))
424 raise Abort(str(inst))
425
425
426 def copyfiles(src, dst, hardlink=None):
426 def copyfiles(src, dst, hardlink=None):
427 """Copy a directory tree using hardlinks if possible"""
427 """Copy a directory tree using hardlinks if possible"""
428
428
429 if hardlink is None:
429 if hardlink is None:
430 hardlink = (os.stat(src).st_dev ==
430 hardlink = (os.stat(src).st_dev ==
431 os.stat(os.path.dirname(dst)).st_dev)
431 os.stat(os.path.dirname(dst)).st_dev)
432
432
433 if os.path.isdir(src):
433 if os.path.isdir(src):
434 os.mkdir(dst)
434 os.mkdir(dst)
435 for name, kind in osutil.listdir(src):
435 for name, kind in osutil.listdir(src):
436 srcname = os.path.join(src, name)
436 srcname = os.path.join(src, name)
437 dstname = os.path.join(dst, name)
437 dstname = os.path.join(dst, name)
438 copyfiles(srcname, dstname, hardlink)
438 copyfiles(srcname, dstname, hardlink)
439 else:
439 else:
440 if hardlink:
440 if hardlink:
441 try:
441 try:
442 os_link(src, dst)
442 os_link(src, dst)
443 except (IOError, OSError):
443 except (IOError, OSError):
444 hardlink = False
444 hardlink = False
445 shutil.copy(src, dst)
445 shutil.copy(src, dst)
446 else:
446 else:
447 shutil.copy(src, dst)
447 shutil.copy(src, dst)
448
448
449 class path_auditor(object):
449 class path_auditor(object):
450 '''ensure that a filesystem path contains no banned components.
450 '''ensure that a filesystem path contains no banned components.
451 the following properties of a path are checked:
451 the following properties of a path are checked:
452
452
453 - under top-level .hg
453 - under top-level .hg
454 - starts at the root of a windows drive
454 - starts at the root of a windows drive
455 - contains ".."
455 - contains ".."
456 - traverses a symlink (e.g. a/symlink_here/b)
456 - traverses a symlink (e.g. a/symlink_here/b)
457 - inside a nested repository'''
457 - inside a nested repository'''
458
458
459 def __init__(self, root):
459 def __init__(self, root):
460 self.audited = set()
460 self.audited = set()
461 self.auditeddir = set()
461 self.auditeddir = set()
462 self.root = root
462 self.root = root
463
463
464 def __call__(self, path):
464 def __call__(self, path):
465 if path in self.audited:
465 if path in self.audited:
466 return
466 return
467 normpath = os.path.normcase(path)
467 normpath = os.path.normcase(path)
468 parts = splitpath(normpath)
468 parts = splitpath(normpath)
469 if (os.path.splitdrive(path)[0]
469 if (os.path.splitdrive(path)[0]
470 or parts[0].lower() in ('.hg', '.hg.', '')
470 or parts[0].lower() in ('.hg', '.hg.', '')
471 or os.pardir in parts):
471 or os.pardir in parts):
472 raise Abort(_("path contains illegal component: %s") % path)
472 raise Abort(_("path contains illegal component: %s") % path)
473 if '.hg' in path.lower():
473 if '.hg' in path.lower():
474 lparts = [p.lower() for p in parts]
474 lparts = [p.lower() for p in parts]
475 for p in '.hg', '.hg.':
475 for p in '.hg', '.hg.':
476 if p in lparts[1:]:
476 if p in lparts[1:]:
477 pos = lparts.index(p)
477 pos = lparts.index(p)
478 base = os.path.join(*parts[:pos])
478 base = os.path.join(*parts[:pos])
479 raise Abort(_('path %r is inside repo %r') % (path, base))
479 raise Abort(_('path %r is inside repo %r') % (path, base))
480 def check(prefix):
480 def check(prefix):
481 curpath = os.path.join(self.root, prefix)
481 curpath = os.path.join(self.root, prefix)
482 try:
482 try:
483 st = os.lstat(curpath)
483 st = os.lstat(curpath)
484 except OSError, err:
484 except OSError, err:
485 # EINVAL can be raised as invalid path syntax under win32.
485 # EINVAL can be raised as invalid path syntax under win32.
486 # They must be ignored for patterns can be checked too.
486 # They must be ignored for patterns can be checked too.
487 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
487 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
488 raise
488 raise
489 else:
489 else:
490 if stat.S_ISLNK(st.st_mode):
490 if stat.S_ISLNK(st.st_mode):
491 raise Abort(_('path %r traverses symbolic link %r') %
491 raise Abort(_('path %r traverses symbolic link %r') %
492 (path, prefix))
492 (path, prefix))
493 elif (stat.S_ISDIR(st.st_mode) and
493 elif (stat.S_ISDIR(st.st_mode) and
494 os.path.isdir(os.path.join(curpath, '.hg'))):
494 os.path.isdir(os.path.join(curpath, '.hg'))):
495 raise Abort(_('path %r is inside repo %r') %
495 raise Abort(_('path %r is inside repo %r') %
496 (path, prefix))
496 (path, prefix))
497 parts.pop()
497 parts.pop()
498 prefixes = []
498 prefixes = []
499 while parts:
499 while parts:
500 prefix = os.sep.join(parts)
500 prefix = os.sep.join(parts)
501 if prefix in self.auditeddir:
501 if prefix in self.auditeddir:
502 break
502 break
503 check(prefix)
503 check(prefix)
504 prefixes.append(prefix)
504 prefixes.append(prefix)
505 parts.pop()
505 parts.pop()
506
506
507 self.audited.add(path)
507 self.audited.add(path)
508 # only add prefixes to the cache after checking everything: we don't
508 # only add prefixes to the cache after checking everything: we don't
509 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
509 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
510 self.auditeddir.update(prefixes)
510 self.auditeddir.update(prefixes)
511
511
512 def nlinks(pathname):
512 def nlinks(pathname):
513 """Return number of hardlinks for the given file."""
513 """Return number of hardlinks for the given file."""
514 return os.lstat(pathname).st_nlink
514 return os.lstat(pathname).st_nlink
515
515
516 if hasattr(os, 'link'):
516 if hasattr(os, 'link'):
517 os_link = os.link
517 os_link = os.link
518 else:
518 else:
519 def os_link(src, dst):
519 def os_link(src, dst):
520 raise OSError(0, _("Hardlinks not supported"))
520 raise OSError(0, _("Hardlinks not supported"))
521
521
522 def lookup_reg(key, name=None, scope=None):
522 def lookup_reg(key, name=None, scope=None):
523 return None
523 return None
524
524
525 if os.name == 'nt':
525 if os.name == 'nt':
526 from windows import *
526 from windows import *
527 else:
527 else:
528 from posix import *
528 from posix import *
529
529
530 def makelock(info, pathname):
530 def makelock(info, pathname):
531 try:
531 try:
532 return os.symlink(info, pathname)
532 return os.symlink(info, pathname)
533 except OSError, why:
533 except OSError, why:
534 if why.errno == errno.EEXIST:
534 if why.errno == errno.EEXIST:
535 raise
535 raise
536 except AttributeError: # no symlink in os
536 except AttributeError: # no symlink in os
537 pass
537 pass
538
538
539 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
539 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
540 os.write(ld, info)
540 os.write(ld, info)
541 os.close(ld)
541 os.close(ld)
542
542
543 def readlock(pathname):
543 def readlock(pathname):
544 try:
544 try:
545 return os.readlink(pathname)
545 return os.readlink(pathname)
546 except OSError, why:
546 except OSError, why:
547 if why.errno not in (errno.EINVAL, errno.ENOSYS):
547 if why.errno not in (errno.EINVAL, errno.ENOSYS):
548 raise
548 raise
549 except AttributeError: # no symlink in os
549 except AttributeError: # no symlink in os
550 pass
550 pass
551 return posixfile(pathname).read()
551 return posixfile(pathname).read()
552
552
553 def fstat(fp):
553 def fstat(fp):
554 '''stat file object that may not have fileno method.'''
554 '''stat file object that may not have fileno method.'''
555 try:
555 try:
556 return os.fstat(fp.fileno())
556 return os.fstat(fp.fileno())
557 except AttributeError:
557 except AttributeError:
558 return os.stat(fp.name)
558 return os.stat(fp.name)
559
559
560 # File system features
560 # File system features
561
561
562 def checkcase(path):
562 def checkcase(path):
563 """
563 """
564 Check whether the given path is on a case-sensitive filesystem
564 Check whether the given path is on a case-sensitive filesystem
565
565
566 Requires a path (like /foo/.hg) ending with a foldable final
566 Requires a path (like /foo/.hg) ending with a foldable final
567 directory component.
567 directory component.
568 """
568 """
569 s1 = os.stat(path)
569 s1 = os.stat(path)
570 d, b = os.path.split(path)
570 d, b = os.path.split(path)
571 p2 = os.path.join(d, b.upper())
571 p2 = os.path.join(d, b.upper())
572 if path == p2:
572 if path == p2:
573 p2 = os.path.join(d, b.lower())
573 p2 = os.path.join(d, b.lower())
574 try:
574 try:
575 s2 = os.stat(p2)
575 s2 = os.stat(p2)
576 if s2 == s1:
576 if s2 == s1:
577 return False
577 return False
578 return True
578 return True
579 except:
579 except:
580 return True
580 return True
581
581
582 _fspathcache = {}
582 _fspathcache = {}
583 def fspath(name, root):
583 def fspath(name, root):
584 '''Get name in the case stored in the filesystem
584 '''Get name in the case stored in the filesystem
585
585
586 The name is either relative to root, or it is an absolute path starting
586 The name is either relative to root, or it is an absolute path starting
587 with root. Note that this function is unnecessary, and should not be
587 with root. Note that this function is unnecessary, and should not be
588 called, for case-sensitive filesystems (simply because it's expensive).
588 called, for case-sensitive filesystems (simply because it's expensive).
589 '''
589 '''
590 # If name is absolute, make it relative
590 # If name is absolute, make it relative
591 if name.lower().startswith(root.lower()):
591 if name.lower().startswith(root.lower()):
592 l = len(root)
592 l = len(root)
593 if name[l] == os.sep or name[l] == os.altsep:
593 if name[l] == os.sep or name[l] == os.altsep:
594 l = l + 1
594 l = l + 1
595 name = name[l:]
595 name = name[l:]
596
596
597 if not os.path.exists(os.path.join(root, name)):
597 if not os.path.exists(os.path.join(root, name)):
598 return None
598 return None
599
599
600 seps = os.sep
600 seps = os.sep
601 if os.altsep:
601 if os.altsep:
602 seps = seps + os.altsep
602 seps = seps + os.altsep
603 # Protect backslashes. This gets silly very quickly.
603 # Protect backslashes. This gets silly very quickly.
604 seps.replace('\\','\\\\')
604 seps.replace('\\','\\\\')
605 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
605 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
606 dir = os.path.normcase(os.path.normpath(root))
606 dir = os.path.normcase(os.path.normpath(root))
607 result = []
607 result = []
608 for part, sep in pattern.findall(name):
608 for part, sep in pattern.findall(name):
609 if sep:
609 if sep:
610 result.append(sep)
610 result.append(sep)
611 continue
611 continue
612
612
613 if dir not in _fspathcache:
613 if dir not in _fspathcache:
614 _fspathcache[dir] = os.listdir(dir)
614 _fspathcache[dir] = os.listdir(dir)
615 contents = _fspathcache[dir]
615 contents = _fspathcache[dir]
616
616
617 lpart = part.lower()
617 lpart = part.lower()
618 lenp = len(part)
618 lenp = len(part)
619 for n in contents:
619 for n in contents:
620 if lenp == len(n) and n.lower() == lpart:
620 if lenp == len(n) and n.lower() == lpart:
621 result.append(n)
621 result.append(n)
622 break
622 break
623 else:
623 else:
624 # Cannot happen, as the file exists!
624 # Cannot happen, as the file exists!
625 result.append(part)
625 result.append(part)
626 dir = os.path.join(dir, lpart)
626 dir = os.path.join(dir, lpart)
627
627
628 return ''.join(result)
628 return ''.join(result)
629
629
630 def checkexec(path):
630 def checkexec(path):
631 """
631 """
632 Check whether the given path is on a filesystem with UNIX-like exec flags
632 Check whether the given path is on a filesystem with UNIX-like exec flags
633
633
634 Requires a directory (like /foo/.hg)
634 Requires a directory (like /foo/.hg)
635 """
635 """
636
636
637 # VFAT on some Linux versions can flip mode but it doesn't persist
637 # VFAT on some Linux versions can flip mode but it doesn't persist
638 # a FS remount. Frequently we can detect it if files are created
638 # a FS remount. Frequently we can detect it if files are created
639 # with exec bit on.
639 # with exec bit on.
640
640
641 try:
641 try:
642 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
642 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
643 fh, fn = tempfile.mkstemp("", "", path)
643 fh, fn = tempfile.mkstemp("", "", path)
644 try:
644 try:
645 os.close(fh)
645 os.close(fh)
646 m = os.stat(fn).st_mode & 0777
646 m = os.stat(fn).st_mode & 0777
647 new_file_has_exec = m & EXECFLAGS
647 new_file_has_exec = m & EXECFLAGS
648 os.chmod(fn, m ^ EXECFLAGS)
648 os.chmod(fn, m ^ EXECFLAGS)
649 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
649 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
650 finally:
650 finally:
651 os.unlink(fn)
651 os.unlink(fn)
652 except (IOError, OSError):
652 except (IOError, OSError):
653 # we don't care, the user probably won't be able to commit anyway
653 # we don't care, the user probably won't be able to commit anyway
654 return False
654 return False
655 return not (new_file_has_exec or exec_flags_cannot_flip)
655 return not (new_file_has_exec or exec_flags_cannot_flip)
656
656
657 def checklink(path):
657 def checklink(path):
658 """check whether the given path is on a symlink-capable filesystem"""
658 """check whether the given path is on a symlink-capable filesystem"""
659 # mktemp is not racy because symlink creation will fail if the
659 # mktemp is not racy because symlink creation will fail if the
660 # file already exists
660 # file already exists
661 name = tempfile.mktemp(dir=path)
661 name = tempfile.mktemp(dir=path)
662 try:
662 try:
663 os.symlink(".", name)
663 os.symlink(".", name)
664 os.unlink(name)
664 os.unlink(name)
665 return True
665 return True
666 except (OSError, AttributeError):
666 except (OSError, AttributeError):
667 return False
667 return False
668
668
669 def needbinarypatch():
669 def needbinarypatch():
670 """return True if patches should be applied in binary mode by default."""
670 """return True if patches should be applied in binary mode by default."""
671 return os.name == 'nt'
671 return os.name == 'nt'
672
672
673 def endswithsep(path):
673 def endswithsep(path):
674 '''Check path ends with os.sep or os.altsep.'''
674 '''Check path ends with os.sep or os.altsep.'''
675 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
675 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
676
676
677 def splitpath(path):
677 def splitpath(path):
678 '''Split path by os.sep.
678 '''Split path by os.sep.
679 Note that this function does not use os.altsep because this is
679 Note that this function does not use os.altsep because this is
680 an alternative of simple "xxx.split(os.sep)".
680 an alternative of simple "xxx.split(os.sep)".
681 It is recommended to use os.path.normpath() before using this
681 It is recommended to use os.path.normpath() before using this
682 function if need.'''
682 function if need.'''
683 return path.split(os.sep)
683 return path.split(os.sep)
684
684
685 def gui():
685 def gui():
686 '''Are we running in a GUI?'''
686 '''Are we running in a GUI?'''
687 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
687 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
688
688
689 def mktempcopy(name, emptyok=False, createmode=None):
689 def mktempcopy(name, emptyok=False, createmode=None):
690 """Create a temporary file with the same contents from name
690 """Create a temporary file with the same contents from name
691
691
692 The permission bits are copied from the original file.
692 The permission bits are copied from the original file.
693
693
694 If the temporary file is going to be truncated immediately, you
694 If the temporary file is going to be truncated immediately, you
695 can use emptyok=True as an optimization.
695 can use emptyok=True as an optimization.
696
696
697 Returns the name of the temporary file.
697 Returns the name of the temporary file.
698 """
698 """
699 d, fn = os.path.split(name)
699 d, fn = os.path.split(name)
700 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
700 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
701 os.close(fd)
701 os.close(fd)
702 # Temporary files are created with mode 0600, which is usually not
702 # Temporary files are created with mode 0600, which is usually not
703 # what we want. If the original file already exists, just copy
703 # what we want. If the original file already exists, just copy
704 # its mode. Otherwise, manually obey umask.
704 # its mode. Otherwise, manually obey umask.
705 try:
705 try:
706 st_mode = os.lstat(name).st_mode & 0777
706 st_mode = os.lstat(name).st_mode & 0777
707 except OSError, inst:
707 except OSError, inst:
708 if inst.errno != errno.ENOENT:
708 if inst.errno != errno.ENOENT:
709 raise
709 raise
710 st_mode = createmode
710 st_mode = createmode
711 if st_mode is None:
711 if st_mode is None:
712 st_mode = ~umask
712 st_mode = ~umask
713 st_mode &= 0666
713 st_mode &= 0666
714 os.chmod(temp, st_mode)
714 os.chmod(temp, st_mode)
715 if emptyok:
715 if emptyok:
716 return temp
716 return temp
717 try:
717 try:
718 try:
718 try:
719 ifp = posixfile(name, "rb")
719 ifp = posixfile(name, "rb")
720 except IOError, inst:
720 except IOError, inst:
721 if inst.errno == errno.ENOENT:
721 if inst.errno == errno.ENOENT:
722 return temp
722 return temp
723 if not getattr(inst, 'filename', None):
723 if not getattr(inst, 'filename', None):
724 inst.filename = name
724 inst.filename = name
725 raise
725 raise
726 ofp = posixfile(temp, "wb")
726 ofp = posixfile(temp, "wb")
727 for chunk in filechunkiter(ifp):
727 for chunk in filechunkiter(ifp):
728 ofp.write(chunk)
728 ofp.write(chunk)
729 ifp.close()
729 ifp.close()
730 ofp.close()
730 ofp.close()
731 except:
731 except:
732 try: os.unlink(temp)
732 try: os.unlink(temp)
733 except: pass
733 except: pass
734 raise
734 raise
735 return temp
735 return temp
736
736
737 class atomictempfile(object):
737 class atomictempfile(object):
738 """file-like object that atomically updates a file
738 """file-like object that atomically updates a file
739
739
740 All writes will be redirected to a temporary copy of the original
740 All writes will be redirected to a temporary copy of the original
741 file. When rename is called, the copy is renamed to the original
741 file. When rename is called, the copy is renamed to the original
742 name, making the changes visible.
742 name, making the changes visible.
743 """
743 """
744 def __init__(self, name, mode, createmode):
744 def __init__(self, name, mode, createmode):
745 self.__name = name
745 self.__name = name
746 self._fp = None
746 self._fp = None
747 self.temp = mktempcopy(name, emptyok=('w' in mode),
747 self.temp = mktempcopy(name, emptyok=('w' in mode),
748 createmode=createmode)
748 createmode=createmode)
749 self._fp = posixfile(self.temp, mode)
749 self._fp = posixfile(self.temp, mode)
750
750
751 def __getattr__(self, name):
751 def __getattr__(self, name):
752 return getattr(self._fp, name)
752 return getattr(self._fp, name)
753
753
754 def rename(self):
754 def rename(self):
755 if not self._fp.closed:
755 if not self._fp.closed:
756 self._fp.close()
756 self._fp.close()
757 rename(self.temp, localpath(self.__name))
757 rename(self.temp, localpath(self.__name))
758
758
759 def __del__(self):
759 def __del__(self):
760 if not self._fp:
760 if not self._fp:
761 return
761 return
762 if not self._fp.closed:
762 if not self._fp.closed:
763 try:
763 try:
764 os.unlink(self.temp)
764 os.unlink(self.temp)
765 except: pass
765 except: pass
766 self._fp.close()
766 self._fp.close()
767
767
768 def makedirs(name, mode=None):
768 def makedirs(name, mode=None):
769 """recursive directory creation with parent mode inheritance"""
769 """recursive directory creation with parent mode inheritance"""
770 try:
770 try:
771 os.mkdir(name)
771 os.mkdir(name)
772 if mode is not None:
772 if mode is not None:
773 os.chmod(name, mode)
773 os.chmod(name, mode)
774 return
774 return
775 except OSError, err:
775 except OSError, err:
776 if err.errno == errno.EEXIST:
776 if err.errno == errno.EEXIST:
777 return
777 return
778 if err.errno != errno.ENOENT:
778 if err.errno != errno.ENOENT:
779 raise
779 raise
780 parent = os.path.abspath(os.path.dirname(name))
780 parent = os.path.abspath(os.path.dirname(name))
781 makedirs(parent, mode)
781 makedirs(parent, mode)
782 makedirs(name, mode)
782 makedirs(name, mode)
783
783
784 class opener(object):
784 class opener(object):
785 """Open files relative to a base directory
785 """Open files relative to a base directory
786
786
787 This class is used to hide the details of COW semantics and
787 This class is used to hide the details of COW semantics and
788 remote file access from higher level code.
788 remote file access from higher level code.
789 """
789 """
790 def __init__(self, base, audit=True):
790 def __init__(self, base, audit=True):
791 self.base = base
791 self.base = base
792 if audit:
792 if audit:
793 self.audit_path = path_auditor(base)
793 self.audit_path = path_auditor(base)
794 else:
794 else:
795 self.audit_path = always
795 self.audit_path = always
796 self.createmode = None
796 self.createmode = None
797
797
798 @propertycache
798 @propertycache
799 def _can_symlink(self):
799 def _can_symlink(self):
800 return checklink(self.base)
800 return checklink(self.base)
801
801
802 def _fixfilemode(self, name):
802 def _fixfilemode(self, name):
803 if self.createmode is None:
803 if self.createmode is None:
804 return
804 return
805 os.chmod(name, self.createmode & 0666)
805 os.chmod(name, self.createmode & 0666)
806
806
807 def __call__(self, path, mode="r", text=False, atomictemp=False):
807 def __call__(self, path, mode="r", text=False, atomictemp=False):
808 self.audit_path(path)
808 self.audit_path(path)
809 f = os.path.join(self.base, path)
809 f = os.path.join(self.base, path)
810
810
811 if not text and "b" not in mode:
811 if not text and "b" not in mode:
812 mode += "b" # for that other OS
812 mode += "b" # for that other OS
813
813
814 nlink = -1
814 nlink = -1
815 if mode not in ("r", "rb"):
815 if mode not in ("r", "rb"):
816 try:
816 try:
817 nlink = nlinks(f)
817 nlink = nlinks(f)
818 except OSError:
818 except OSError:
819 nlink = 0
819 nlink = 0
820 d = os.path.dirname(f)
820 d = os.path.dirname(f)
821 if not os.path.isdir(d):
821 if not os.path.isdir(d):
822 makedirs(d, self.createmode)
822 makedirs(d, self.createmode)
823 if atomictemp:
823 if atomictemp:
824 return atomictempfile(f, mode, self.createmode)
824 return atomictempfile(f, mode, self.createmode)
825 if nlink > 1:
825 if nlink > 1:
826 rename(mktempcopy(f), f)
826 rename(mktempcopy(f), f)
827 fp = posixfile(f, mode)
827 fp = posixfile(f, mode)
828 if nlink == 0:
828 if nlink == 0:
829 self._fixfilemode(f)
829 self._fixfilemode(f)
830 return fp
830 return fp
831
831
832 def symlink(self, src, dst):
832 def symlink(self, src, dst):
833 self.audit_path(dst)
833 self.audit_path(dst)
834 linkname = os.path.join(self.base, dst)
834 linkname = os.path.join(self.base, dst)
835 try:
835 try:
836 os.unlink(linkname)
836 os.unlink(linkname)
837 except OSError:
837 except OSError:
838 pass
838 pass
839
839
840 dirname = os.path.dirname(linkname)
840 dirname = os.path.dirname(linkname)
841 if not os.path.exists(dirname):
841 if not os.path.exists(dirname):
842 makedirs(dirname, self.createmode)
842 makedirs(dirname, self.createmode)
843
843
844 if self._can_symlink:
844 if self._can_symlink:
845 try:
845 try:
846 os.symlink(src, linkname)
846 os.symlink(src, linkname)
847 except OSError, err:
847 except OSError, err:
848 raise OSError(err.errno, _('could not symlink to %r: %s') %
848 raise OSError(err.errno, _('could not symlink to %r: %s') %
849 (src, err.strerror), linkname)
849 (src, err.strerror), linkname)
850 else:
850 else:
851 f = self(dst, "w")
851 f = self(dst, "w")
852 f.write(src)
852 f.write(src)
853 f.close()
853 f.close()
854 self._fixfilemode(dst)
854 self._fixfilemode(dst)
855
855
856 class chunkbuffer(object):
856 class chunkbuffer(object):
857 """Allow arbitrary sized chunks of data to be efficiently read from an
857 """Allow arbitrary sized chunks of data to be efficiently read from an
858 iterator over chunks of arbitrary size."""
858 iterator over chunks of arbitrary size."""
859
859
860 def __init__(self, in_iter):
860 def __init__(self, in_iter):
861 """in_iter is the iterator that's iterating over the input chunks.
861 """in_iter is the iterator that's iterating over the input chunks.
862 targetsize is how big a buffer to try to maintain."""
862 targetsize is how big a buffer to try to maintain."""
863 self.iter = iter(in_iter)
863 self.iter = iter(in_iter)
864 self.buf = ''
864 self.buf = ''
865 self.targetsize = 2**16
865 self.targetsize = 2**16
866
866
867 def read(self, l):
867 def read(self, l):
868 """Read L bytes of data from the iterator of chunks of data.
868 """Read L bytes of data from the iterator of chunks of data.
869 Returns less than L bytes if the iterator runs dry."""
869 Returns less than L bytes if the iterator runs dry."""
870 if l > len(self.buf) and self.iter:
870 if l > len(self.buf) and self.iter:
871 # Clamp to a multiple of self.targetsize
871 # Clamp to a multiple of self.targetsize
872 targetsize = max(l, self.targetsize)
872 targetsize = max(l, self.targetsize)
873 collector = cStringIO.StringIO()
873 collector = cStringIO.StringIO()
874 collector.write(self.buf)
874 collector.write(self.buf)
875 collected = len(self.buf)
875 collected = len(self.buf)
876 for chunk in self.iter:
876 for chunk in self.iter:
877 collector.write(chunk)
877 collector.write(chunk)
878 collected += len(chunk)
878 collected += len(chunk)
879 if collected >= targetsize:
879 if collected >= targetsize:
880 break
880 break
881 if collected < targetsize:
881 if collected < targetsize:
882 self.iter = False
882 self.iter = False
883 self.buf = collector.getvalue()
883 self.buf = collector.getvalue()
884 if len(self.buf) == l:
884 if len(self.buf) == l:
885 s, self.buf = str(self.buf), ''
885 s, self.buf = str(self.buf), ''
886 else:
886 else:
887 s, self.buf = self.buf[:l], buffer(self.buf, l)
887 s, self.buf = self.buf[:l], buffer(self.buf, l)
888 return s
888 return s
889
889
890 def filechunkiter(f, size=65536, limit=None):
890 def filechunkiter(f, size=65536, limit=None):
891 """Create a generator that produces the data in the file size
891 """Create a generator that produces the data in the file size
892 (default 65536) bytes at a time, up to optional limit (default is
892 (default 65536) bytes at a time, up to optional limit (default is
893 to read all data). Chunks may be less than size bytes if the
893 to read all data). Chunks may be less than size bytes if the
894 chunk is the last chunk in the file, or the file is a socket or
894 chunk is the last chunk in the file, or the file is a socket or
895 some other type of file that sometimes reads less data than is
895 some other type of file that sometimes reads less data than is
896 requested."""
896 requested."""
897 assert size >= 0
897 assert size >= 0
898 assert limit is None or limit >= 0
898 assert limit is None or limit >= 0
899 while True:
899 while True:
900 if limit is None: nbytes = size
900 if limit is None: nbytes = size
901 else: nbytes = min(limit, size)
901 else: nbytes = min(limit, size)
902 s = nbytes and f.read(nbytes)
902 s = nbytes and f.read(nbytes)
903 if not s: break
903 if not s: break
904 if limit: limit -= len(s)
904 if limit: limit -= len(s)
905 yield s
905 yield s
906
906
907 def makedate():
907 def makedate():
908 lt = time.localtime()
908 lt = time.localtime()
909 if lt[8] == 1 and time.daylight:
909 if lt[8] == 1 and time.daylight:
910 tz = time.altzone
910 tz = time.altzone
911 else:
911 else:
912 tz = time.timezone
912 tz = time.timezone
913 return time.mktime(lt), tz
913 return time.mktime(lt), tz
914
914
915 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
915 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
916 """represent a (unixtime, offset) tuple as a localized time.
916 """represent a (unixtime, offset) tuple as a localized time.
917 unixtime is seconds since the epoch, and offset is the time zone's
917 unixtime is seconds since the epoch, and offset is the time zone's
918 number of seconds away from UTC. if timezone is false, do not
918 number of seconds away from UTC. if timezone is false, do not
919 append time zone to string."""
919 append time zone to string."""
920 t, tz = date or makedate()
920 t, tz = date or makedate()
921 if "%1" in format or "%2" in format:
921 if "%1" in format or "%2" in format:
922 sign = (tz > 0) and "-" or "+"
922 sign = (tz > 0) and "-" or "+"
923 minutes = abs(tz) // 60
923 minutes = abs(tz) // 60
924 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
924 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
925 format = format.replace("%2", "%02d" % (minutes % 60))
925 format = format.replace("%2", "%02d" % (minutes % 60))
926 s = time.strftime(format, time.gmtime(float(t) - tz))
926 s = time.strftime(format, time.gmtime(float(t) - tz))
927 return s
927 return s
928
928
929 def shortdate(date=None):
929 def shortdate(date=None):
930 """turn (timestamp, tzoff) tuple into iso 8631 date."""
930 """turn (timestamp, tzoff) tuple into iso 8631 date."""
931 return datestr(date, format='%Y-%m-%d')
931 return datestr(date, format='%Y-%m-%d')
932
932
933 def strdate(string, format, defaults=[]):
933 def strdate(string, format, defaults=[]):
934 """parse a localized time string and return a (unixtime, offset) tuple.
934 """parse a localized time string and return a (unixtime, offset) tuple.
935 if the string cannot be parsed, ValueError is raised."""
935 if the string cannot be parsed, ValueError is raised."""
936 def timezone(string):
936 def timezone(string):
937 tz = string.split()[-1]
937 tz = string.split()[-1]
938 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
938 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
939 sign = (tz[0] == "+") and 1 or -1
939 sign = (tz[0] == "+") and 1 or -1
940 hours = int(tz[1:3])
940 hours = int(tz[1:3])
941 minutes = int(tz[3:5])
941 minutes = int(tz[3:5])
942 return -sign * (hours * 60 + minutes) * 60
942 return -sign * (hours * 60 + minutes) * 60
943 if tz == "GMT" or tz == "UTC":
943 if tz == "GMT" or tz == "UTC":
944 return 0
944 return 0
945 return None
945 return None
946
946
947 # NOTE: unixtime = localunixtime + offset
947 # NOTE: unixtime = localunixtime + offset
948 offset, date = timezone(string), string
948 offset, date = timezone(string), string
949 if offset != None:
949 if offset != None:
950 date = " ".join(string.split()[:-1])
950 date = " ".join(string.split()[:-1])
951
951
952 # add missing elements from defaults
952 # add missing elements from defaults
953 for part in defaults:
953 for part in defaults:
954 found = [True for p in part if ("%"+p) in format]
954 found = [True for p in part if ("%"+p) in format]
955 if not found:
955 if not found:
956 date += "@" + defaults[part]
956 date += "@" + defaults[part]
957 format += "@%" + part[0]
957 format += "@%" + part[0]
958
958
959 timetuple = time.strptime(date, format)
959 timetuple = time.strptime(date, format)
960 localunixtime = int(calendar.timegm(timetuple))
960 localunixtime = int(calendar.timegm(timetuple))
961 if offset is None:
961 if offset is None:
962 # local timezone
962 # local timezone
963 unixtime = int(time.mktime(timetuple))
963 unixtime = int(time.mktime(timetuple))
964 offset = unixtime - localunixtime
964 offset = unixtime - localunixtime
965 else:
965 else:
966 unixtime = localunixtime + offset
966 unixtime = localunixtime + offset
967 return unixtime, offset
967 return unixtime, offset
968
968
969 def parsedate(date, formats=None, defaults=None):
969 def parsedate(date, formats=None, defaults=None):
970 """parse a localized date/time string and return a (unixtime, offset) tuple.
970 """parse a localized date/time string and return a (unixtime, offset) tuple.
971
971
972 The date may be a "unixtime offset" string or in one of the specified
972 The date may be a "unixtime offset" string or in one of the specified
973 formats. If the date already is a (unixtime, offset) tuple, it is returned.
973 formats. If the date already is a (unixtime, offset) tuple, it is returned.
974 """
974 """
975 if not date:
975 if not date:
976 return 0, 0
976 return 0, 0
977 if isinstance(date, tuple) and len(date) == 2:
977 if isinstance(date, tuple) and len(date) == 2:
978 return date
978 return date
979 if not formats:
979 if not formats:
980 formats = defaultdateformats
980 formats = defaultdateformats
981 date = date.strip()
981 date = date.strip()
982 try:
982 try:
983 when, offset = map(int, date.split(' '))
983 when, offset = map(int, date.split(' '))
984 except ValueError:
984 except ValueError:
985 # fill out defaults
985 # fill out defaults
986 if not defaults:
986 if not defaults:
987 defaults = {}
987 defaults = {}
988 now = makedate()
988 now = makedate()
989 for part in "d mb yY HI M S".split():
989 for part in "d mb yY HI M S".split():
990 if part not in defaults:
990 if part not in defaults:
991 if part[0] in "HMS":
991 if part[0] in "HMS":
992 defaults[part] = "00"
992 defaults[part] = "00"
993 else:
993 else:
994 defaults[part] = datestr(now, "%" + part[0])
994 defaults[part] = datestr(now, "%" + part[0])
995
995
996 for format in formats:
996 for format in formats:
997 try:
997 try:
998 when, offset = strdate(date, format, defaults)
998 when, offset = strdate(date, format, defaults)
999 except (ValueError, OverflowError):
999 except (ValueError, OverflowError):
1000 pass
1000 pass
1001 else:
1001 else:
1002 break
1002 break
1003 else:
1003 else:
1004 raise Abort(_('invalid date: %r ') % date)
1004 raise Abort(_('invalid date: %r ') % date)
1005 # validate explicit (probably user-specified) date and
1005 # validate explicit (probably user-specified) date and
1006 # time zone offset. values must fit in signed 32 bits for
1006 # time zone offset. values must fit in signed 32 bits for
1007 # current 32-bit linux runtimes. timezones go from UTC-12
1007 # current 32-bit linux runtimes. timezones go from UTC-12
1008 # to UTC+14
1008 # to UTC+14
1009 if abs(when) > 0x7fffffff:
1009 if abs(when) > 0x7fffffff:
1010 raise Abort(_('date exceeds 32 bits: %d') % when)
1010 raise Abort(_('date exceeds 32 bits: %d') % when)
1011 if offset < -50400 or offset > 43200:
1011 if offset < -50400 or offset > 43200:
1012 raise Abort(_('impossible time zone offset: %d') % offset)
1012 raise Abort(_('impossible time zone offset: %d') % offset)
1013 return when, offset
1013 return when, offset
1014
1014
1015 def matchdate(date):
1015 def matchdate(date):
1016 """Return a function that matches a given date match specifier
1016 """Return a function that matches a given date match specifier
1017
1017
1018 Formats include:
1018 Formats include:
1019
1019
1020 '{date}' match a given date to the accuracy provided
1020 '{date}' match a given date to the accuracy provided
1021
1021
1022 '<{date}' on or before a given date
1022 '<{date}' on or before a given date
1023
1023
1024 '>{date}' on or after a given date
1024 '>{date}' on or after a given date
1025
1025
1026 """
1026 """
1027
1027
1028 def lower(date):
1028 def lower(date):
1029 d = dict(mb="1", d="1")
1029 d = dict(mb="1", d="1")
1030 return parsedate(date, extendeddateformats, d)[0]
1030 return parsedate(date, extendeddateformats, d)[0]
1031
1031
1032 def upper(date):
1032 def upper(date):
1033 d = dict(mb="12", HI="23", M="59", S="59")
1033 d = dict(mb="12", HI="23", M="59", S="59")
1034 for days in "31 30 29".split():
1034 for days in "31 30 29".split():
1035 try:
1035 try:
1036 d["d"] = days
1036 d["d"] = days
1037 return parsedate(date, extendeddateformats, d)[0]
1037 return parsedate(date, extendeddateformats, d)[0]
1038 except:
1038 except:
1039 pass
1039 pass
1040 d["d"] = "28"
1040 d["d"] = "28"
1041 return parsedate(date, extendeddateformats, d)[0]
1041 return parsedate(date, extendeddateformats, d)[0]
1042
1042
1043 date = date.strip()
1043 date = date.strip()
1044 if date[0] == "<":
1044 if date[0] == "<":
1045 when = upper(date[1:])
1045 when = upper(date[1:])
1046 return lambda x: x <= when
1046 return lambda x: x <= when
1047 elif date[0] == ">":
1047 elif date[0] == ">":
1048 when = lower(date[1:])
1048 when = lower(date[1:])
1049 return lambda x: x >= when
1049 return lambda x: x >= when
1050 elif date[0] == "-":
1050 elif date[0] == "-":
1051 try:
1051 try:
1052 days = int(date[1:])
1052 days = int(date[1:])
1053 except ValueError:
1053 except ValueError:
1054 raise Abort(_("invalid day spec: %s") % date[1:])
1054 raise Abort(_("invalid day spec: %s") % date[1:])
1055 when = makedate()[0] - days * 3600 * 24
1055 when = makedate()[0] - days * 3600 * 24
1056 return lambda x: x >= when
1056 return lambda x: x >= when
1057 elif " to " in date:
1057 elif " to " in date:
1058 a, b = date.split(" to ")
1058 a, b = date.split(" to ")
1059 start, stop = lower(a), upper(b)
1059 start, stop = lower(a), upper(b)
1060 return lambda x: x >= start and x <= stop
1060 return lambda x: x >= start and x <= stop
1061 else:
1061 else:
1062 start, stop = lower(date), upper(date)
1062 start, stop = lower(date), upper(date)
1063 return lambda x: x >= start and x <= stop
1063 return lambda x: x >= start and x <= stop
1064
1064
1065 def shortuser(user):
1065 def shortuser(user):
1066 """Return a short representation of a user name or email address."""
1066 """Return a short representation of a user name or email address."""
1067 f = user.find('@')
1067 f = user.find('@')
1068 if f >= 0:
1068 if f >= 0:
1069 user = user[:f]
1069 user = user[:f]
1070 f = user.find('<')
1070 f = user.find('<')
1071 if f >= 0:
1071 if f >= 0:
1072 user = user[f+1:]
1072 user = user[f+1:]
1073 f = user.find(' ')
1073 f = user.find(' ')
1074 if f >= 0:
1074 if f >= 0:
1075 user = user[:f]
1075 user = user[:f]
1076 f = user.find('.')
1076 f = user.find('.')
1077 if f >= 0:
1077 if f >= 0:
1078 user = user[:f]
1078 user = user[:f]
1079 return user
1079 return user
1080
1080
1081 def email(author):
1081 def email(author):
1082 '''get email of author.'''
1082 '''get email of author.'''
1083 r = author.find('>')
1083 r = author.find('>')
1084 if r == -1: r = None
1084 if r == -1: r = None
1085 return author[author.find('<')+1:r]
1085 return author[author.find('<')+1:r]
1086
1086
1087 def ellipsis(text, maxlength=400):
1087 def ellipsis(text, maxlength=400):
1088 """Trim string to at most maxlength (default: 400) characters."""
1088 """Trim string to at most maxlength (default: 400) characters."""
1089 if len(text) <= maxlength:
1089 if len(text) <= maxlength:
1090 return text
1090 return text
1091 else:
1091 else:
1092 return "%s..." % (text[:maxlength-3])
1092 return "%s..." % (text[:maxlength-3])
1093
1093
1094 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1094 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1095 '''yield every hg repository under path, recursively.'''
1095 '''yield every hg repository under path, recursively.'''
1096 def errhandler(err):
1096 def errhandler(err):
1097 if err.filename == path:
1097 if err.filename == path:
1098 raise err
1098 raise err
1099 if followsym and hasattr(os.path, 'samestat'):
1099 if followsym and hasattr(os.path, 'samestat'):
1100 def _add_dir_if_not_there(dirlst, dirname):
1100 def _add_dir_if_not_there(dirlst, dirname):
1101 match = False
1101 match = False
1102 samestat = os.path.samestat
1102 samestat = os.path.samestat
1103 dirstat = os.stat(dirname)
1103 dirstat = os.stat(dirname)
1104 for lstdirstat in dirlst:
1104 for lstdirstat in dirlst:
1105 if samestat(dirstat, lstdirstat):
1105 if samestat(dirstat, lstdirstat):
1106 match = True
1106 match = True
1107 break
1107 break
1108 if not match:
1108 if not match:
1109 dirlst.append(dirstat)
1109 dirlst.append(dirstat)
1110 return not match
1110 return not match
1111 else:
1111 else:
1112 followsym = False
1112 followsym = False
1113
1113
1114 if (seen_dirs is None) and followsym:
1114 if (seen_dirs is None) and followsym:
1115 seen_dirs = []
1115 seen_dirs = []
1116 _add_dir_if_not_there(seen_dirs, path)
1116 _add_dir_if_not_there(seen_dirs, path)
1117 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1117 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1118 if '.hg' in dirs:
1118 if '.hg' in dirs:
1119 yield root # found a repository
1119 yield root # found a repository
1120 qroot = os.path.join(root, '.hg', 'patches')
1120 qroot = os.path.join(root, '.hg', 'patches')
1121 if os.path.isdir(os.path.join(qroot, '.hg')):
1121 if os.path.isdir(os.path.join(qroot, '.hg')):
1122 yield qroot # we have a patch queue repo here
1122 yield qroot # we have a patch queue repo here
1123 if recurse:
1123 if recurse:
1124 # avoid recursing inside the .hg directory
1124 # avoid recursing inside the .hg directory
1125 dirs.remove('.hg')
1125 dirs.remove('.hg')
1126 else:
1126 else:
1127 dirs[:] = [] # don't descend further
1127 dirs[:] = [] # don't descend further
1128 elif followsym:
1128 elif followsym:
1129 newdirs = []
1129 newdirs = []
1130 for d in dirs:
1130 for d in dirs:
1131 fname = os.path.join(root, d)
1131 fname = os.path.join(root, d)
1132 if _add_dir_if_not_there(seen_dirs, fname):
1132 if _add_dir_if_not_there(seen_dirs, fname):
1133 if os.path.islink(fname):
1133 if os.path.islink(fname):
1134 for hgname in walkrepos(fname, True, seen_dirs):
1134 for hgname in walkrepos(fname, True, seen_dirs):
1135 yield hgname
1135 yield hgname
1136 else:
1136 else:
1137 newdirs.append(d)
1137 newdirs.append(d)
1138 dirs[:] = newdirs
1138 dirs[:] = newdirs
1139
1139
1140 _rcpath = None
1140 _rcpath = None
1141
1141
1142 def os_rcpath():
1142 def os_rcpath():
1143 '''return default os-specific hgrc search path'''
1143 '''return default os-specific hgrc search path'''
1144 path = system_rcpath()
1144 path = system_rcpath()
1145 path.extend(user_rcpath())
1145 path.extend(user_rcpath())
1146 path = [os.path.normpath(f) for f in path]
1146 path = [os.path.normpath(f) for f in path]
1147 return path
1147 return path
1148
1148
1149 def rcpath():
1149 def rcpath():
1150 '''return hgrc search path. if env var HGRCPATH is set, use it.
1150 '''return hgrc search path. if env var HGRCPATH is set, use it.
1151 for each item in path, if directory, use files ending in .rc,
1151 for each item in path, if directory, use files ending in .rc,
1152 else use item.
1152 else use item.
1153 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1153 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1154 if no HGRCPATH, use default os-specific path.'''
1154 if no HGRCPATH, use default os-specific path.'''
1155 global _rcpath
1155 global _rcpath
1156 if _rcpath is None:
1156 if _rcpath is None:
1157 if 'HGRCPATH' in os.environ:
1157 if 'HGRCPATH' in os.environ:
1158 _rcpath = []
1158 _rcpath = []
1159 for p in os.environ['HGRCPATH'].split(os.pathsep):
1159 for p in os.environ['HGRCPATH'].split(os.pathsep):
1160 if not p: continue
1160 if not p: continue
1161 if os.path.isdir(p):
1161 if os.path.isdir(p):
1162 for f, kind in osutil.listdir(p):
1162 for f, kind in osutil.listdir(p):
1163 if f.endswith('.rc'):
1163 if f.endswith('.rc'):
1164 _rcpath.append(os.path.join(p, f))
1164 _rcpath.append(os.path.join(p, f))
1165 else:
1165 else:
1166 _rcpath.append(p)
1166 _rcpath.append(p)
1167 else:
1167 else:
1168 _rcpath = os_rcpath()
1168 _rcpath = os_rcpath()
1169 return _rcpath
1169 return _rcpath
1170
1170
1171 def bytecount(nbytes):
1171 def bytecount(nbytes):
1172 '''return byte count formatted as readable string, with units'''
1172 '''return byte count formatted as readable string, with units'''
1173
1173
1174 units = (
1174 units = (
1175 (100, 1<<30, _('%.0f GB')),
1175 (100, 1<<30, _('%.0f GB')),
1176 (10, 1<<30, _('%.1f GB')),
1176 (10, 1<<30, _('%.1f GB')),
1177 (1, 1<<30, _('%.2f GB')),
1177 (1, 1<<30, _('%.2f GB')),
1178 (100, 1<<20, _('%.0f MB')),
1178 (100, 1<<20, _('%.0f MB')),
1179 (10, 1<<20, _('%.1f MB')),
1179 (10, 1<<20, _('%.1f MB')),
1180 (1, 1<<20, _('%.2f MB')),
1180 (1, 1<<20, _('%.2f MB')),
1181 (100, 1<<10, _('%.0f KB')),
1181 (100, 1<<10, _('%.0f KB')),
1182 (10, 1<<10, _('%.1f KB')),
1182 (10, 1<<10, _('%.1f KB')),
1183 (1, 1<<10, _('%.2f KB')),
1183 (1, 1<<10, _('%.2f KB')),
1184 (1, 1, _('%.0f bytes')),
1184 (1, 1, _('%.0f bytes')),
1185 )
1185 )
1186
1186
1187 for multiplier, divisor, format in units:
1187 for multiplier, divisor, format in units:
1188 if nbytes >= divisor * multiplier:
1188 if nbytes >= divisor * multiplier:
1189 return format % (nbytes / float(divisor))
1189 return format % (nbytes / float(divisor))
1190 return units[-1][2] % nbytes
1190 return units[-1][2] % nbytes
1191
1191
1192 def drop_scheme(scheme, path):
1192 def drop_scheme(scheme, path):
1193 sc = scheme + ':'
1193 sc = scheme + ':'
1194 if path.startswith(sc):
1194 if path.startswith(sc):
1195 path = path[len(sc):]
1195 path = path[len(sc):]
1196 if path.startswith('//'):
1196 if path.startswith('//'):
1197 path = path[2:]
1197 path = path[2:]
1198 return path
1198 return path
1199
1199
1200 def uirepr(s):
1200 def uirepr(s):
1201 # Avoid double backslash in Windows path repr()
1201 # Avoid double backslash in Windows path repr()
1202 return repr(s).replace('\\\\', '\\')
1202 return repr(s).replace('\\\\', '\\')
1203
1203
1204 def termwidth():
1204 def termwidth():
1205 if 'COLUMNS' in os.environ:
1205 if 'COLUMNS' in os.environ:
1206 try:
1206 try:
1207 return int(os.environ['COLUMNS'])
1207 return int(os.environ['COLUMNS'])
1208 except ValueError:
1208 except ValueError:
1209 pass
1209 pass
1210 try:
1210 try:
1211 import termios, array, fcntl
1211 import termios, array, fcntl
1212 for dev in (sys.stdout, sys.stdin):
1212 for dev in (sys.stdout, sys.stdin):
1213 try:
1213 try:
1214 try:
1214 try:
1215 fd = dev.fileno()
1215 fd = dev.fileno()
1216 except AttributeError:
1216 except AttributeError:
1217 continue
1217 continue
1218 if not os.isatty(fd):
1218 if not os.isatty(fd):
1219 continue
1219 continue
1220 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1220 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1221 return array.array('h', arri)[1]
1221 return array.array('h', arri)[1]
1222 except ValueError:
1222 except ValueError:
1223 pass
1223 pass
1224 except IOError, e:
1225 if e[0] == errno.EINVAL:
1226 pass
1227 else:
1228 raise
1224 except ImportError:
1229 except ImportError:
1225 pass
1230 pass
1226 return 80
1231 return 80
1227
1232
1228 def wrap(line, hangindent, width=None):
1233 def wrap(line, hangindent, width=None):
1229 if width is None:
1234 if width is None:
1230 width = termwidth() - 2
1235 width = termwidth() - 2
1231 if width <= hangindent:
1236 if width <= hangindent:
1232 # adjust for weird terminal size
1237 # adjust for weird terminal size
1233 width = max(78, hangindent + 1)
1238 width = max(78, hangindent + 1)
1234 padding = '\n' + ' ' * hangindent
1239 padding = '\n' + ' ' * hangindent
1235 # To avoid corrupting multi-byte characters in line, we must wrap
1240 # To avoid corrupting multi-byte characters in line, we must wrap
1236 # a Unicode string instead of a bytestring.
1241 # a Unicode string instead of a bytestring.
1237 try:
1242 try:
1238 u = line.decode(encoding.encoding)
1243 u = line.decode(encoding.encoding)
1239 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1244 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1240 return w.encode(encoding.encoding)
1245 return w.encode(encoding.encoding)
1241 except UnicodeDecodeError:
1246 except UnicodeDecodeError:
1242 return padding.join(textwrap.wrap(line, width=width - hangindent))
1247 return padding.join(textwrap.wrap(line, width=width - hangindent))
1243
1248
1244 def iterlines(iterator):
1249 def iterlines(iterator):
1245 for chunk in iterator:
1250 for chunk in iterator:
1246 for line in chunk.splitlines():
1251 for line in chunk.splitlines():
1247 yield line
1252 yield line
General Comments 0
You need to be logged in to leave comments. Login now