##// END OF EJS Templates
util: fake the builtin buffer if it's missing (jython)
Ronny Pfannschmidt -
r10756:cb681cc5 default
parent child Browse files
Show More
@@ -1,1359 +1,1366
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, signal
19 import os, stat, time, calendar, textwrap, signal
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
40
41 def fakebuffer(sliceable, offset=0):
42 return sliceable[offset:]
43 if not hasattr(__builtin__, 'buffer'):
44 __builtin__.buffer = fakebuffer
45
39 import subprocess
46 import subprocess
40 closefds = os.name == 'posix'
47 closefds = os.name == 'posix'
41
48
42 def popen2(cmd, env=None, newlines=False):
49 def popen2(cmd, env=None, newlines=False):
43 # Setting bufsize to -1 lets the system decide the buffer size.
50 # Setting bufsize to -1 lets the system decide the buffer size.
44 # The default for bufsize is 0, meaning unbuffered. This leads to
51 # The default for bufsize is 0, meaning unbuffered. This leads to
45 # poor performance on Mac OS X: http://bugs.python.org/issue4194
52 # poor performance on Mac OS X: http://bugs.python.org/issue4194
46 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
53 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
47 close_fds=closefds,
54 close_fds=closefds,
48 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
55 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
49 universal_newlines=newlines,
56 universal_newlines=newlines,
50 env=env)
57 env=env)
51 return p.stdin, p.stdout
58 return p.stdin, p.stdout
52
59
53 def popen3(cmd, env=None, newlines=False):
60 def popen3(cmd, env=None, newlines=False):
54 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
61 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
55 close_fds=closefds,
62 close_fds=closefds,
56 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
63 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
57 stderr=subprocess.PIPE,
64 stderr=subprocess.PIPE,
58 universal_newlines=newlines,
65 universal_newlines=newlines,
59 env=env)
66 env=env)
60 return p.stdin, p.stdout, p.stderr
67 return p.stdin, p.stdout, p.stderr
61
68
62 def version():
69 def version():
63 """Return version information if available."""
70 """Return version information if available."""
64 try:
71 try:
65 import __version__
72 import __version__
66 return __version__.version
73 return __version__.version
67 except ImportError:
74 except ImportError:
68 return 'unknown'
75 return 'unknown'
69
76
70 # used by parsedate
77 # used by parsedate
71 defaultdateformats = (
78 defaultdateformats = (
72 '%Y-%m-%d %H:%M:%S',
79 '%Y-%m-%d %H:%M:%S',
73 '%Y-%m-%d %I:%M:%S%p',
80 '%Y-%m-%d %I:%M:%S%p',
74 '%Y-%m-%d %H:%M',
81 '%Y-%m-%d %H:%M',
75 '%Y-%m-%d %I:%M%p',
82 '%Y-%m-%d %I:%M%p',
76 '%Y-%m-%d',
83 '%Y-%m-%d',
77 '%m-%d',
84 '%m-%d',
78 '%m/%d',
85 '%m/%d',
79 '%m/%d/%y',
86 '%m/%d/%y',
80 '%m/%d/%Y',
87 '%m/%d/%Y',
81 '%a %b %d %H:%M:%S %Y',
88 '%a %b %d %H:%M:%S %Y',
82 '%a %b %d %I:%M:%S%p %Y',
89 '%a %b %d %I:%M:%S%p %Y',
83 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
90 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
84 '%b %d %H:%M:%S %Y',
91 '%b %d %H:%M:%S %Y',
85 '%b %d %I:%M:%S%p %Y',
92 '%b %d %I:%M:%S%p %Y',
86 '%b %d %H:%M:%S',
93 '%b %d %H:%M:%S',
87 '%b %d %I:%M:%S%p',
94 '%b %d %I:%M:%S%p',
88 '%b %d %H:%M',
95 '%b %d %H:%M',
89 '%b %d %I:%M%p',
96 '%b %d %I:%M%p',
90 '%b %d %Y',
97 '%b %d %Y',
91 '%b %d',
98 '%b %d',
92 '%H:%M:%S',
99 '%H:%M:%S',
93 '%I:%M:%S%p',
100 '%I:%M:%S%p',
94 '%H:%M',
101 '%H:%M',
95 '%I:%M%p',
102 '%I:%M%p',
96 )
103 )
97
104
98 extendeddateformats = defaultdateformats + (
105 extendeddateformats = defaultdateformats + (
99 "%Y",
106 "%Y",
100 "%Y-%m",
107 "%Y-%m",
101 "%b",
108 "%b",
102 "%b %Y",
109 "%b %Y",
103 )
110 )
104
111
105 def cachefunc(func):
112 def cachefunc(func):
106 '''cache the result of function calls'''
113 '''cache the result of function calls'''
107 # XXX doesn't handle keywords args
114 # XXX doesn't handle keywords args
108 cache = {}
115 cache = {}
109 if func.func_code.co_argcount == 1:
116 if func.func_code.co_argcount == 1:
110 # we gain a small amount of time because
117 # we gain a small amount of time because
111 # we don't need to pack/unpack the list
118 # we don't need to pack/unpack the list
112 def f(arg):
119 def f(arg):
113 if arg not in cache:
120 if arg not in cache:
114 cache[arg] = func(arg)
121 cache[arg] = func(arg)
115 return cache[arg]
122 return cache[arg]
116 else:
123 else:
117 def f(*args):
124 def f(*args):
118 if args not in cache:
125 if args not in cache:
119 cache[args] = func(*args)
126 cache[args] = func(*args)
120 return cache[args]
127 return cache[args]
121
128
122 return f
129 return f
123
130
124 def lrucachefunc(func):
131 def lrucachefunc(func):
125 '''cache most recent results of function calls'''
132 '''cache most recent results of function calls'''
126 cache = {}
133 cache = {}
127 order = []
134 order = []
128 if func.func_code.co_argcount == 1:
135 if func.func_code.co_argcount == 1:
129 def f(arg):
136 def f(arg):
130 if arg not in cache:
137 if arg not in cache:
131 if len(cache) > 20:
138 if len(cache) > 20:
132 del cache[order.pop(0)]
139 del cache[order.pop(0)]
133 cache[arg] = func(arg)
140 cache[arg] = func(arg)
134 else:
141 else:
135 order.remove(arg)
142 order.remove(arg)
136 order.append(arg)
143 order.append(arg)
137 return cache[arg]
144 return cache[arg]
138 else:
145 else:
139 def f(*args):
146 def f(*args):
140 if args not in cache:
147 if args not in cache:
141 if len(cache) > 20:
148 if len(cache) > 20:
142 del cache[order.pop(0)]
149 del cache[order.pop(0)]
143 cache[args] = func(*args)
150 cache[args] = func(*args)
144 else:
151 else:
145 order.remove(args)
152 order.remove(args)
146 order.append(args)
153 order.append(args)
147 return cache[args]
154 return cache[args]
148
155
149 return f
156 return f
150
157
151 class propertycache(object):
158 class propertycache(object):
152 def __init__(self, func):
159 def __init__(self, func):
153 self.func = func
160 self.func = func
154 self.name = func.__name__
161 self.name = func.__name__
155 def __get__(self, obj, type=None):
162 def __get__(self, obj, type=None):
156 result = self.func(obj)
163 result = self.func(obj)
157 setattr(obj, self.name, result)
164 setattr(obj, self.name, result)
158 return result
165 return result
159
166
160 def pipefilter(s, cmd):
167 def pipefilter(s, cmd):
161 '''filter string S through command CMD, returning its output'''
168 '''filter string S through command CMD, returning its output'''
162 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
169 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
163 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
170 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
164 pout, perr = p.communicate(s)
171 pout, perr = p.communicate(s)
165 return pout
172 return pout
166
173
167 def tempfilter(s, cmd):
174 def tempfilter(s, cmd):
168 '''filter string S through a pair of temporary files with CMD.
175 '''filter string S through a pair of temporary files with CMD.
169 CMD is used as a template to create the real command to be run,
176 CMD is used as a template to create the real command to be run,
170 with the strings INFILE and OUTFILE replaced by the real names of
177 with the strings INFILE and OUTFILE replaced by the real names of
171 the temporary files generated.'''
178 the temporary files generated.'''
172 inname, outname = None, None
179 inname, outname = None, None
173 try:
180 try:
174 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
181 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
175 fp = os.fdopen(infd, 'wb')
182 fp = os.fdopen(infd, 'wb')
176 fp.write(s)
183 fp.write(s)
177 fp.close()
184 fp.close()
178 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
185 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
179 os.close(outfd)
186 os.close(outfd)
180 cmd = cmd.replace('INFILE', inname)
187 cmd = cmd.replace('INFILE', inname)
181 cmd = cmd.replace('OUTFILE', outname)
188 cmd = cmd.replace('OUTFILE', outname)
182 code = os.system(cmd)
189 code = os.system(cmd)
183 if sys.platform == 'OpenVMS' and code & 1:
190 if sys.platform == 'OpenVMS' and code & 1:
184 code = 0
191 code = 0
185 if code:
192 if code:
186 raise Abort(_("command '%s' failed: %s") %
193 raise Abort(_("command '%s' failed: %s") %
187 (cmd, explain_exit(code)))
194 (cmd, explain_exit(code)))
188 return open(outname, 'rb').read()
195 return open(outname, 'rb').read()
189 finally:
196 finally:
190 try:
197 try:
191 if inname:
198 if inname:
192 os.unlink(inname)
199 os.unlink(inname)
193 except:
200 except:
194 pass
201 pass
195 try:
202 try:
196 if outname:
203 if outname:
197 os.unlink(outname)
204 os.unlink(outname)
198 except:
205 except:
199 pass
206 pass
200
207
201 filtertable = {
208 filtertable = {
202 'tempfile:': tempfilter,
209 'tempfile:': tempfilter,
203 'pipe:': pipefilter,
210 'pipe:': pipefilter,
204 }
211 }
205
212
206 def filter(s, cmd):
213 def filter(s, cmd):
207 "filter a string through a command that transforms its input to its output"
214 "filter a string through a command that transforms its input to its output"
208 for name, fn in filtertable.iteritems():
215 for name, fn in filtertable.iteritems():
209 if cmd.startswith(name):
216 if cmd.startswith(name):
210 return fn(s, cmd[len(name):].lstrip())
217 return fn(s, cmd[len(name):].lstrip())
211 return pipefilter(s, cmd)
218 return pipefilter(s, cmd)
212
219
213 def binary(s):
220 def binary(s):
214 """return true if a string is binary data"""
221 """return true if a string is binary data"""
215 return bool(s and '\0' in s)
222 return bool(s and '\0' in s)
216
223
217 def increasingchunks(source, min=1024, max=65536):
224 def increasingchunks(source, min=1024, max=65536):
218 '''return no less than min bytes per chunk while data remains,
225 '''return no less than min bytes per chunk while data remains,
219 doubling min after each chunk until it reaches max'''
226 doubling min after each chunk until it reaches max'''
220 def log2(x):
227 def log2(x):
221 if not x:
228 if not x:
222 return 0
229 return 0
223 i = 0
230 i = 0
224 while x:
231 while x:
225 x >>= 1
232 x >>= 1
226 i += 1
233 i += 1
227 return i - 1
234 return i - 1
228
235
229 buf = []
236 buf = []
230 blen = 0
237 blen = 0
231 for chunk in source:
238 for chunk in source:
232 buf.append(chunk)
239 buf.append(chunk)
233 blen += len(chunk)
240 blen += len(chunk)
234 if blen >= min:
241 if blen >= min:
235 if min < max:
242 if min < max:
236 min = min << 1
243 min = min << 1
237 nmin = 1 << log2(blen)
244 nmin = 1 << log2(blen)
238 if nmin > min:
245 if nmin > min:
239 min = nmin
246 min = nmin
240 if min > max:
247 if min > max:
241 min = max
248 min = max
242 yield ''.join(buf)
249 yield ''.join(buf)
243 blen = 0
250 blen = 0
244 buf = []
251 buf = []
245 if buf:
252 if buf:
246 yield ''.join(buf)
253 yield ''.join(buf)
247
254
248 Abort = error.Abort
255 Abort = error.Abort
249
256
250 def always(fn):
257 def always(fn):
251 return True
258 return True
252
259
253 def never(fn):
260 def never(fn):
254 return False
261 return False
255
262
256 def pathto(root, n1, n2):
263 def pathto(root, n1, n2):
257 '''return the relative path from one place to another.
264 '''return the relative path from one place to another.
258 root should use os.sep to separate directories
265 root should use os.sep to separate directories
259 n1 should use os.sep to separate directories
266 n1 should use os.sep to separate directories
260 n2 should use "/" to separate directories
267 n2 should use "/" to separate directories
261 returns an os.sep-separated path.
268 returns an os.sep-separated path.
262
269
263 If n1 is a relative path, it's assumed it's
270 If n1 is a relative path, it's assumed it's
264 relative to root.
271 relative to root.
265 n2 should always be relative to root.
272 n2 should always be relative to root.
266 '''
273 '''
267 if not n1:
274 if not n1:
268 return localpath(n2)
275 return localpath(n2)
269 if os.path.isabs(n1):
276 if os.path.isabs(n1):
270 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
277 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
271 return os.path.join(root, localpath(n2))
278 return os.path.join(root, localpath(n2))
272 n2 = '/'.join((pconvert(root), n2))
279 n2 = '/'.join((pconvert(root), n2))
273 a, b = splitpath(n1), n2.split('/')
280 a, b = splitpath(n1), n2.split('/')
274 a.reverse()
281 a.reverse()
275 b.reverse()
282 b.reverse()
276 while a and b and a[-1] == b[-1]:
283 while a and b and a[-1] == b[-1]:
277 a.pop()
284 a.pop()
278 b.pop()
285 b.pop()
279 b.reverse()
286 b.reverse()
280 return os.sep.join((['..'] * len(a)) + b) or '.'
287 return os.sep.join((['..'] * len(a)) + b) or '.'
281
288
282 def canonpath(root, cwd, myname):
289 def canonpath(root, cwd, myname):
283 """return the canonical path of myname, given cwd and root"""
290 """return the canonical path of myname, given cwd and root"""
284 if endswithsep(root):
291 if endswithsep(root):
285 rootsep = root
292 rootsep = root
286 else:
293 else:
287 rootsep = root + os.sep
294 rootsep = root + os.sep
288 name = myname
295 name = myname
289 if not os.path.isabs(name):
296 if not os.path.isabs(name):
290 name = os.path.join(root, cwd, name)
297 name = os.path.join(root, cwd, name)
291 name = os.path.normpath(name)
298 name = os.path.normpath(name)
292 audit_path = path_auditor(root)
299 audit_path = path_auditor(root)
293 if name != rootsep and name.startswith(rootsep):
300 if name != rootsep and name.startswith(rootsep):
294 name = name[len(rootsep):]
301 name = name[len(rootsep):]
295 audit_path(name)
302 audit_path(name)
296 return pconvert(name)
303 return pconvert(name)
297 elif name == root:
304 elif name == root:
298 return ''
305 return ''
299 else:
306 else:
300 # Determine whether `name' is in the hierarchy at or beneath `root',
307 # Determine whether `name' is in the hierarchy at or beneath `root',
301 # by iterating name=dirname(name) until that causes no change (can't
308 # by iterating name=dirname(name) until that causes no change (can't
302 # check name == '/', because that doesn't work on windows). For each
309 # check name == '/', because that doesn't work on windows). For each
303 # `name', compare dev/inode numbers. If they match, the list `rel'
310 # `name', compare dev/inode numbers. If they match, the list `rel'
304 # holds the reversed list of components making up the relative file
311 # holds the reversed list of components making up the relative file
305 # name we want.
312 # name we want.
306 root_st = os.stat(root)
313 root_st = os.stat(root)
307 rel = []
314 rel = []
308 while True:
315 while True:
309 try:
316 try:
310 name_st = os.stat(name)
317 name_st = os.stat(name)
311 except OSError:
318 except OSError:
312 break
319 break
313 if samestat(name_st, root_st):
320 if samestat(name_st, root_st):
314 if not rel:
321 if not rel:
315 # name was actually the same as root (maybe a symlink)
322 # name was actually the same as root (maybe a symlink)
316 return ''
323 return ''
317 rel.reverse()
324 rel.reverse()
318 name = os.path.join(*rel)
325 name = os.path.join(*rel)
319 audit_path(name)
326 audit_path(name)
320 return pconvert(name)
327 return pconvert(name)
321 dirname, basename = os.path.split(name)
328 dirname, basename = os.path.split(name)
322 rel.append(basename)
329 rel.append(basename)
323 if dirname == name:
330 if dirname == name:
324 break
331 break
325 name = dirname
332 name = dirname
326
333
327 raise Abort('%s not under root' % myname)
334 raise Abort('%s not under root' % myname)
328
335
329 _hgexecutable = None
336 _hgexecutable = None
330
337
331 def main_is_frozen():
338 def main_is_frozen():
332 """return True if we are a frozen executable.
339 """return True if we are a frozen executable.
333
340
334 The code supports py2exe (most common, Windows only) and tools/freeze
341 The code supports py2exe (most common, Windows only) and tools/freeze
335 (portable, not much used).
342 (portable, not much used).
336 """
343 """
337 return (hasattr(sys, "frozen") or # new py2exe
344 return (hasattr(sys, "frozen") or # new py2exe
338 hasattr(sys, "importers") or # old py2exe
345 hasattr(sys, "importers") or # old py2exe
339 imp.is_frozen("__main__")) # tools/freeze
346 imp.is_frozen("__main__")) # tools/freeze
340
347
341 def hgexecutable():
348 def hgexecutable():
342 """return location of the 'hg' executable.
349 """return location of the 'hg' executable.
343
350
344 Defaults to $HG or 'hg' in the search path.
351 Defaults to $HG or 'hg' in the search path.
345 """
352 """
346 if _hgexecutable is None:
353 if _hgexecutable is None:
347 hg = os.environ.get('HG')
354 hg = os.environ.get('HG')
348 if hg:
355 if hg:
349 set_hgexecutable(hg)
356 set_hgexecutable(hg)
350 elif main_is_frozen():
357 elif main_is_frozen():
351 set_hgexecutable(sys.executable)
358 set_hgexecutable(sys.executable)
352 else:
359 else:
353 exe = find_exe('hg') or os.path.basename(sys.argv[0])
360 exe = find_exe('hg') or os.path.basename(sys.argv[0])
354 set_hgexecutable(exe)
361 set_hgexecutable(exe)
355 return _hgexecutable
362 return _hgexecutable
356
363
357 def set_hgexecutable(path):
364 def set_hgexecutable(path):
358 """set location of the 'hg' executable"""
365 """set location of the 'hg' executable"""
359 global _hgexecutable
366 global _hgexecutable
360 _hgexecutable = path
367 _hgexecutable = path
361
368
362 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
369 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
363 '''enhanced shell command execution.
370 '''enhanced shell command execution.
364 run with environment maybe modified, maybe in different dir.
371 run with environment maybe modified, maybe in different dir.
365
372
366 if command fails and onerr is None, return status. if ui object,
373 if command fails and onerr is None, return status. if ui object,
367 print error message and return status, else raise onerr object as
374 print error message and return status, else raise onerr object as
368 exception.'''
375 exception.'''
369 def py2shell(val):
376 def py2shell(val):
370 'convert python object into string that is useful to shell'
377 'convert python object into string that is useful to shell'
371 if val is None or val is False:
378 if val is None or val is False:
372 return '0'
379 return '0'
373 if val is True:
380 if val is True:
374 return '1'
381 return '1'
375 return str(val)
382 return str(val)
376 origcmd = cmd
383 origcmd = cmd
377 if os.name == 'nt':
384 if os.name == 'nt':
378 cmd = '"%s"' % cmd
385 cmd = '"%s"' % cmd
379 env = dict(os.environ)
386 env = dict(os.environ)
380 env.update((k, py2shell(v)) for k, v in environ.iteritems())
387 env.update((k, py2shell(v)) for k, v in environ.iteritems())
381 env['HG'] = hgexecutable()
388 env['HG'] = hgexecutable()
382 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
389 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
383 env=env, cwd=cwd)
390 env=env, cwd=cwd)
384 if sys.platform == 'OpenVMS' and rc & 1:
391 if sys.platform == 'OpenVMS' and rc & 1:
385 rc = 0
392 rc = 0
386 if rc and onerr:
393 if rc and onerr:
387 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
394 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
388 explain_exit(rc)[0])
395 explain_exit(rc)[0])
389 if errprefix:
396 if errprefix:
390 errmsg = '%s: %s' % (errprefix, errmsg)
397 errmsg = '%s: %s' % (errprefix, errmsg)
391 try:
398 try:
392 onerr.warn(errmsg + '\n')
399 onerr.warn(errmsg + '\n')
393 except AttributeError:
400 except AttributeError:
394 raise onerr(errmsg)
401 raise onerr(errmsg)
395 return rc
402 return rc
396
403
397 def checksignature(func):
404 def checksignature(func):
398 '''wrap a function with code to check for calling errors'''
405 '''wrap a function with code to check for calling errors'''
399 def check(*args, **kwargs):
406 def check(*args, **kwargs):
400 try:
407 try:
401 return func(*args, **kwargs)
408 return func(*args, **kwargs)
402 except TypeError:
409 except TypeError:
403 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
410 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
404 raise error.SignatureError
411 raise error.SignatureError
405 raise
412 raise
406
413
407 return check
414 return check
408
415
409 # os.path.lexists is not available on python2.3
416 # os.path.lexists is not available on python2.3
410 def lexists(filename):
417 def lexists(filename):
411 "test whether a file with this name exists. does not follow symlinks"
418 "test whether a file with this name exists. does not follow symlinks"
412 try:
419 try:
413 os.lstat(filename)
420 os.lstat(filename)
414 except:
421 except:
415 return False
422 return False
416 return True
423 return True
417
424
418 def unlink(f):
425 def unlink(f):
419 """unlink and remove the directory if it is empty"""
426 """unlink and remove the directory if it is empty"""
420 os.unlink(f)
427 os.unlink(f)
421 # try removing directories that might now be empty
428 # try removing directories that might now be empty
422 try:
429 try:
423 os.removedirs(os.path.dirname(f))
430 os.removedirs(os.path.dirname(f))
424 except OSError:
431 except OSError:
425 pass
432 pass
426
433
427 def copyfile(src, dest):
434 def copyfile(src, dest):
428 "copy a file, preserving mode and atime/mtime"
435 "copy a file, preserving mode and atime/mtime"
429 if os.path.islink(src):
436 if os.path.islink(src):
430 try:
437 try:
431 os.unlink(dest)
438 os.unlink(dest)
432 except:
439 except:
433 pass
440 pass
434 os.symlink(os.readlink(src), dest)
441 os.symlink(os.readlink(src), dest)
435 else:
442 else:
436 try:
443 try:
437 shutil.copyfile(src, dest)
444 shutil.copyfile(src, dest)
438 shutil.copystat(src, dest)
445 shutil.copystat(src, dest)
439 except shutil.Error, inst:
446 except shutil.Error, inst:
440 raise Abort(str(inst))
447 raise Abort(str(inst))
441
448
442 def copyfiles(src, dst, hardlink=None):
449 def copyfiles(src, dst, hardlink=None):
443 """Copy a directory tree using hardlinks if possible"""
450 """Copy a directory tree using hardlinks if possible"""
444
451
445 if hardlink is None:
452 if hardlink is None:
446 hardlink = (os.stat(src).st_dev ==
453 hardlink = (os.stat(src).st_dev ==
447 os.stat(os.path.dirname(dst)).st_dev)
454 os.stat(os.path.dirname(dst)).st_dev)
448
455
449 if os.path.isdir(src):
456 if os.path.isdir(src):
450 os.mkdir(dst)
457 os.mkdir(dst)
451 for name, kind in osutil.listdir(src):
458 for name, kind in osutil.listdir(src):
452 srcname = os.path.join(src, name)
459 srcname = os.path.join(src, name)
453 dstname = os.path.join(dst, name)
460 dstname = os.path.join(dst, name)
454 copyfiles(srcname, dstname, hardlink)
461 copyfiles(srcname, dstname, hardlink)
455 else:
462 else:
456 if hardlink:
463 if hardlink:
457 try:
464 try:
458 os_link(src, dst)
465 os_link(src, dst)
459 except (IOError, OSError):
466 except (IOError, OSError):
460 hardlink = False
467 hardlink = False
461 shutil.copy(src, dst)
468 shutil.copy(src, dst)
462 else:
469 else:
463 shutil.copy(src, dst)
470 shutil.copy(src, dst)
464
471
465 class path_auditor(object):
472 class path_auditor(object):
466 '''ensure that a filesystem path contains no banned components.
473 '''ensure that a filesystem path contains no banned components.
467 the following properties of a path are checked:
474 the following properties of a path are checked:
468
475
469 - under top-level .hg
476 - under top-level .hg
470 - starts at the root of a windows drive
477 - starts at the root of a windows drive
471 - contains ".."
478 - contains ".."
472 - traverses a symlink (e.g. a/symlink_here/b)
479 - traverses a symlink (e.g. a/symlink_here/b)
473 - inside a nested repository'''
480 - inside a nested repository'''
474
481
475 def __init__(self, root):
482 def __init__(self, root):
476 self.audited = set()
483 self.audited = set()
477 self.auditeddir = set()
484 self.auditeddir = set()
478 self.root = root
485 self.root = root
479
486
480 def __call__(self, path):
487 def __call__(self, path):
481 if path in self.audited:
488 if path in self.audited:
482 return
489 return
483 normpath = os.path.normcase(path)
490 normpath = os.path.normcase(path)
484 parts = splitpath(normpath)
491 parts = splitpath(normpath)
485 if (os.path.splitdrive(path)[0]
492 if (os.path.splitdrive(path)[0]
486 or parts[0].lower() in ('.hg', '.hg.', '')
493 or parts[0].lower() in ('.hg', '.hg.', '')
487 or os.pardir in parts):
494 or os.pardir in parts):
488 raise Abort(_("path contains illegal component: %s") % path)
495 raise Abort(_("path contains illegal component: %s") % path)
489 if '.hg' in path.lower():
496 if '.hg' in path.lower():
490 lparts = [p.lower() for p in parts]
497 lparts = [p.lower() for p in parts]
491 for p in '.hg', '.hg.':
498 for p in '.hg', '.hg.':
492 if p in lparts[1:]:
499 if p in lparts[1:]:
493 pos = lparts.index(p)
500 pos = lparts.index(p)
494 base = os.path.join(*parts[:pos])
501 base = os.path.join(*parts[:pos])
495 raise Abort(_('path %r is inside repo %r') % (path, base))
502 raise Abort(_('path %r is inside repo %r') % (path, base))
496 def check(prefix):
503 def check(prefix):
497 curpath = os.path.join(self.root, prefix)
504 curpath = os.path.join(self.root, prefix)
498 try:
505 try:
499 st = os.lstat(curpath)
506 st = os.lstat(curpath)
500 except OSError, err:
507 except OSError, err:
501 # EINVAL can be raised as invalid path syntax under win32.
508 # EINVAL can be raised as invalid path syntax under win32.
502 # They must be ignored for patterns can be checked too.
509 # They must be ignored for patterns can be checked too.
503 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
510 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
504 raise
511 raise
505 else:
512 else:
506 if stat.S_ISLNK(st.st_mode):
513 if stat.S_ISLNK(st.st_mode):
507 raise Abort(_('path %r traverses symbolic link %r') %
514 raise Abort(_('path %r traverses symbolic link %r') %
508 (path, prefix))
515 (path, prefix))
509 elif (stat.S_ISDIR(st.st_mode) and
516 elif (stat.S_ISDIR(st.st_mode) and
510 os.path.isdir(os.path.join(curpath, '.hg'))):
517 os.path.isdir(os.path.join(curpath, '.hg'))):
511 raise Abort(_('path %r is inside repo %r') %
518 raise Abort(_('path %r is inside repo %r') %
512 (path, prefix))
519 (path, prefix))
513 parts.pop()
520 parts.pop()
514 prefixes = []
521 prefixes = []
515 while parts:
522 while parts:
516 prefix = os.sep.join(parts)
523 prefix = os.sep.join(parts)
517 if prefix in self.auditeddir:
524 if prefix in self.auditeddir:
518 break
525 break
519 check(prefix)
526 check(prefix)
520 prefixes.append(prefix)
527 prefixes.append(prefix)
521 parts.pop()
528 parts.pop()
522
529
523 self.audited.add(path)
530 self.audited.add(path)
524 # only add prefixes to the cache after checking everything: we don't
531 # only add prefixes to the cache after checking everything: we don't
525 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
532 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
526 self.auditeddir.update(prefixes)
533 self.auditeddir.update(prefixes)
527
534
528 def nlinks(pathname):
535 def nlinks(pathname):
529 """Return number of hardlinks for the given file."""
536 """Return number of hardlinks for the given file."""
530 return os.lstat(pathname).st_nlink
537 return os.lstat(pathname).st_nlink
531
538
532 if hasattr(os, 'link'):
539 if hasattr(os, 'link'):
533 os_link = os.link
540 os_link = os.link
534 else:
541 else:
535 def os_link(src, dst):
542 def os_link(src, dst):
536 raise OSError(0, _("Hardlinks not supported"))
543 raise OSError(0, _("Hardlinks not supported"))
537
544
538 def lookup_reg(key, name=None, scope=None):
545 def lookup_reg(key, name=None, scope=None):
539 return None
546 return None
540
547
541 def hidewindow():
548 def hidewindow():
542 """Hide current shell window.
549 """Hide current shell window.
543
550
544 Used to hide the window opened when starting asynchronous
551 Used to hide the window opened when starting asynchronous
545 child process under Windows, unneeded on other systems.
552 child process under Windows, unneeded on other systems.
546 """
553 """
547 pass
554 pass
548
555
549 if os.name == 'nt':
556 if os.name == 'nt':
550 from windows import *
557 from windows import *
551 else:
558 else:
552 from posix import *
559 from posix import *
553
560
554 def makelock(info, pathname):
561 def makelock(info, pathname):
555 try:
562 try:
556 return os.symlink(info, pathname)
563 return os.symlink(info, pathname)
557 except OSError, why:
564 except OSError, why:
558 if why.errno == errno.EEXIST:
565 if why.errno == errno.EEXIST:
559 raise
566 raise
560 except AttributeError: # no symlink in os
567 except AttributeError: # no symlink in os
561 pass
568 pass
562
569
563 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
570 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
564 os.write(ld, info)
571 os.write(ld, info)
565 os.close(ld)
572 os.close(ld)
566
573
567 def readlock(pathname):
574 def readlock(pathname):
568 try:
575 try:
569 return os.readlink(pathname)
576 return os.readlink(pathname)
570 except OSError, why:
577 except OSError, why:
571 if why.errno not in (errno.EINVAL, errno.ENOSYS):
578 if why.errno not in (errno.EINVAL, errno.ENOSYS):
572 raise
579 raise
573 except AttributeError: # no symlink in os
580 except AttributeError: # no symlink in os
574 pass
581 pass
575 return posixfile(pathname).read()
582 return posixfile(pathname).read()
576
583
577 def fstat(fp):
584 def fstat(fp):
578 '''stat file object that may not have fileno method.'''
585 '''stat file object that may not have fileno method.'''
579 try:
586 try:
580 return os.fstat(fp.fileno())
587 return os.fstat(fp.fileno())
581 except AttributeError:
588 except AttributeError:
582 return os.stat(fp.name)
589 return os.stat(fp.name)
583
590
584 # File system features
591 # File system features
585
592
586 def checkcase(path):
593 def checkcase(path):
587 """
594 """
588 Check whether the given path is on a case-sensitive filesystem
595 Check whether the given path is on a case-sensitive filesystem
589
596
590 Requires a path (like /foo/.hg) ending with a foldable final
597 Requires a path (like /foo/.hg) ending with a foldable final
591 directory component.
598 directory component.
592 """
599 """
593 s1 = os.stat(path)
600 s1 = os.stat(path)
594 d, b = os.path.split(path)
601 d, b = os.path.split(path)
595 p2 = os.path.join(d, b.upper())
602 p2 = os.path.join(d, b.upper())
596 if path == p2:
603 if path == p2:
597 p2 = os.path.join(d, b.lower())
604 p2 = os.path.join(d, b.lower())
598 try:
605 try:
599 s2 = os.stat(p2)
606 s2 = os.stat(p2)
600 if s2 == s1:
607 if s2 == s1:
601 return False
608 return False
602 return True
609 return True
603 except:
610 except:
604 return True
611 return True
605
612
606 _fspathcache = {}
613 _fspathcache = {}
607 def fspath(name, root):
614 def fspath(name, root):
608 '''Get name in the case stored in the filesystem
615 '''Get name in the case stored in the filesystem
609
616
610 The name is either relative to root, or it is an absolute path starting
617 The name is either relative to root, or it is an absolute path starting
611 with root. Note that this function is unnecessary, and should not be
618 with root. Note that this function is unnecessary, and should not be
612 called, for case-sensitive filesystems (simply because it's expensive).
619 called, for case-sensitive filesystems (simply because it's expensive).
613 '''
620 '''
614 # If name is absolute, make it relative
621 # If name is absolute, make it relative
615 if name.lower().startswith(root.lower()):
622 if name.lower().startswith(root.lower()):
616 l = len(root)
623 l = len(root)
617 if name[l] == os.sep or name[l] == os.altsep:
624 if name[l] == os.sep or name[l] == os.altsep:
618 l = l + 1
625 l = l + 1
619 name = name[l:]
626 name = name[l:]
620
627
621 if not os.path.exists(os.path.join(root, name)):
628 if not os.path.exists(os.path.join(root, name)):
622 return None
629 return None
623
630
624 seps = os.sep
631 seps = os.sep
625 if os.altsep:
632 if os.altsep:
626 seps = seps + os.altsep
633 seps = seps + os.altsep
627 # Protect backslashes. This gets silly very quickly.
634 # Protect backslashes. This gets silly very quickly.
628 seps.replace('\\','\\\\')
635 seps.replace('\\','\\\\')
629 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
636 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
630 dir = os.path.normcase(os.path.normpath(root))
637 dir = os.path.normcase(os.path.normpath(root))
631 result = []
638 result = []
632 for part, sep in pattern.findall(name):
639 for part, sep in pattern.findall(name):
633 if sep:
640 if sep:
634 result.append(sep)
641 result.append(sep)
635 continue
642 continue
636
643
637 if dir not in _fspathcache:
644 if dir not in _fspathcache:
638 _fspathcache[dir] = os.listdir(dir)
645 _fspathcache[dir] = os.listdir(dir)
639 contents = _fspathcache[dir]
646 contents = _fspathcache[dir]
640
647
641 lpart = part.lower()
648 lpart = part.lower()
642 lenp = len(part)
649 lenp = len(part)
643 for n in contents:
650 for n in contents:
644 if lenp == len(n) and n.lower() == lpart:
651 if lenp == len(n) and n.lower() == lpart:
645 result.append(n)
652 result.append(n)
646 break
653 break
647 else:
654 else:
648 # Cannot happen, as the file exists!
655 # Cannot happen, as the file exists!
649 result.append(part)
656 result.append(part)
650 dir = os.path.join(dir, lpart)
657 dir = os.path.join(dir, lpart)
651
658
652 return ''.join(result)
659 return ''.join(result)
653
660
654 def checkexec(path):
661 def checkexec(path):
655 """
662 """
656 Check whether the given path is on a filesystem with UNIX-like exec flags
663 Check whether the given path is on a filesystem with UNIX-like exec flags
657
664
658 Requires a directory (like /foo/.hg)
665 Requires a directory (like /foo/.hg)
659 """
666 """
660
667
661 # VFAT on some Linux versions can flip mode but it doesn't persist
668 # VFAT on some Linux versions can flip mode but it doesn't persist
662 # a FS remount. Frequently we can detect it if files are created
669 # a FS remount. Frequently we can detect it if files are created
663 # with exec bit on.
670 # with exec bit on.
664
671
665 try:
672 try:
666 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
673 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
667 fh, fn = tempfile.mkstemp("", "", path)
674 fh, fn = tempfile.mkstemp("", "", path)
668 try:
675 try:
669 os.close(fh)
676 os.close(fh)
670 m = os.stat(fn).st_mode & 0777
677 m = os.stat(fn).st_mode & 0777
671 new_file_has_exec = m & EXECFLAGS
678 new_file_has_exec = m & EXECFLAGS
672 os.chmod(fn, m ^ EXECFLAGS)
679 os.chmod(fn, m ^ EXECFLAGS)
673 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
680 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
674 finally:
681 finally:
675 os.unlink(fn)
682 os.unlink(fn)
676 except (IOError, OSError):
683 except (IOError, OSError):
677 # we don't care, the user probably won't be able to commit anyway
684 # we don't care, the user probably won't be able to commit anyway
678 return False
685 return False
679 return not (new_file_has_exec or exec_flags_cannot_flip)
686 return not (new_file_has_exec or exec_flags_cannot_flip)
680
687
681 def checklink(path):
688 def checklink(path):
682 """check whether the given path is on a symlink-capable filesystem"""
689 """check whether the given path is on a symlink-capable filesystem"""
683 # mktemp is not racy because symlink creation will fail if the
690 # mktemp is not racy because symlink creation will fail if the
684 # file already exists
691 # file already exists
685 name = tempfile.mktemp(dir=path)
692 name = tempfile.mktemp(dir=path)
686 try:
693 try:
687 os.symlink(".", name)
694 os.symlink(".", name)
688 os.unlink(name)
695 os.unlink(name)
689 return True
696 return True
690 except (OSError, AttributeError):
697 except (OSError, AttributeError):
691 return False
698 return False
692
699
693 def needbinarypatch():
700 def needbinarypatch():
694 """return True if patches should be applied in binary mode by default."""
701 """return True if patches should be applied in binary mode by default."""
695 return os.name == 'nt'
702 return os.name == 'nt'
696
703
697 def endswithsep(path):
704 def endswithsep(path):
698 '''Check path ends with os.sep or os.altsep.'''
705 '''Check path ends with os.sep or os.altsep.'''
699 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
706 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
700
707
701 def splitpath(path):
708 def splitpath(path):
702 '''Split path by os.sep.
709 '''Split path by os.sep.
703 Note that this function does not use os.altsep because this is
710 Note that this function does not use os.altsep because this is
704 an alternative of simple "xxx.split(os.sep)".
711 an alternative of simple "xxx.split(os.sep)".
705 It is recommended to use os.path.normpath() before using this
712 It is recommended to use os.path.normpath() before using this
706 function if need.'''
713 function if need.'''
707 return path.split(os.sep)
714 return path.split(os.sep)
708
715
709 def gui():
716 def gui():
710 '''Are we running in a GUI?'''
717 '''Are we running in a GUI?'''
711 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
718 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
712
719
713 def mktempcopy(name, emptyok=False, createmode=None):
720 def mktempcopy(name, emptyok=False, createmode=None):
714 """Create a temporary file with the same contents from name
721 """Create a temporary file with the same contents from name
715
722
716 The permission bits are copied from the original file.
723 The permission bits are copied from the original file.
717
724
718 If the temporary file is going to be truncated immediately, you
725 If the temporary file is going to be truncated immediately, you
719 can use emptyok=True as an optimization.
726 can use emptyok=True as an optimization.
720
727
721 Returns the name of the temporary file.
728 Returns the name of the temporary file.
722 """
729 """
723 d, fn = os.path.split(name)
730 d, fn = os.path.split(name)
724 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
731 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
725 os.close(fd)
732 os.close(fd)
726 # Temporary files are created with mode 0600, which is usually not
733 # Temporary files are created with mode 0600, which is usually not
727 # what we want. If the original file already exists, just copy
734 # what we want. If the original file already exists, just copy
728 # its mode. Otherwise, manually obey umask.
735 # its mode. Otherwise, manually obey umask.
729 try:
736 try:
730 st_mode = os.lstat(name).st_mode & 0777
737 st_mode = os.lstat(name).st_mode & 0777
731 except OSError, inst:
738 except OSError, inst:
732 if inst.errno != errno.ENOENT:
739 if inst.errno != errno.ENOENT:
733 raise
740 raise
734 st_mode = createmode
741 st_mode = createmode
735 if st_mode is None:
742 if st_mode is None:
736 st_mode = ~umask
743 st_mode = ~umask
737 st_mode &= 0666
744 st_mode &= 0666
738 os.chmod(temp, st_mode)
745 os.chmod(temp, st_mode)
739 if emptyok:
746 if emptyok:
740 return temp
747 return temp
741 try:
748 try:
742 try:
749 try:
743 ifp = posixfile(name, "rb")
750 ifp = posixfile(name, "rb")
744 except IOError, inst:
751 except IOError, inst:
745 if inst.errno == errno.ENOENT:
752 if inst.errno == errno.ENOENT:
746 return temp
753 return temp
747 if not getattr(inst, 'filename', None):
754 if not getattr(inst, 'filename', None):
748 inst.filename = name
755 inst.filename = name
749 raise
756 raise
750 ofp = posixfile(temp, "wb")
757 ofp = posixfile(temp, "wb")
751 for chunk in filechunkiter(ifp):
758 for chunk in filechunkiter(ifp):
752 ofp.write(chunk)
759 ofp.write(chunk)
753 ifp.close()
760 ifp.close()
754 ofp.close()
761 ofp.close()
755 except:
762 except:
756 try: os.unlink(temp)
763 try: os.unlink(temp)
757 except: pass
764 except: pass
758 raise
765 raise
759 return temp
766 return temp
760
767
761 class atomictempfile(object):
768 class atomictempfile(object):
762 """file-like object that atomically updates a file
769 """file-like object that atomically updates a file
763
770
764 All writes will be redirected to a temporary copy of the original
771 All writes will be redirected to a temporary copy of the original
765 file. When rename is called, the copy is renamed to the original
772 file. When rename is called, the copy is renamed to the original
766 name, making the changes visible.
773 name, making the changes visible.
767 """
774 """
768 def __init__(self, name, mode, createmode):
775 def __init__(self, name, mode, createmode):
769 self.__name = name
776 self.__name = name
770 self._fp = None
777 self._fp = None
771 self.temp = mktempcopy(name, emptyok=('w' in mode),
778 self.temp = mktempcopy(name, emptyok=('w' in mode),
772 createmode=createmode)
779 createmode=createmode)
773 self._fp = posixfile(self.temp, mode)
780 self._fp = posixfile(self.temp, mode)
774
781
775 def __getattr__(self, name):
782 def __getattr__(self, name):
776 return getattr(self._fp, name)
783 return getattr(self._fp, name)
777
784
778 def rename(self):
785 def rename(self):
779 if not self._fp.closed:
786 if not self._fp.closed:
780 self._fp.close()
787 self._fp.close()
781 rename(self.temp, localpath(self.__name))
788 rename(self.temp, localpath(self.__name))
782
789
783 def __del__(self):
790 def __del__(self):
784 if not self._fp:
791 if not self._fp:
785 return
792 return
786 if not self._fp.closed:
793 if not self._fp.closed:
787 try:
794 try:
788 os.unlink(self.temp)
795 os.unlink(self.temp)
789 except: pass
796 except: pass
790 self._fp.close()
797 self._fp.close()
791
798
792 def makedirs(name, mode=None):
799 def makedirs(name, mode=None):
793 """recursive directory creation with parent mode inheritance"""
800 """recursive directory creation with parent mode inheritance"""
794 try:
801 try:
795 os.mkdir(name)
802 os.mkdir(name)
796 if mode is not None:
803 if mode is not None:
797 os.chmod(name, mode)
804 os.chmod(name, mode)
798 return
805 return
799 except OSError, err:
806 except OSError, err:
800 if err.errno == errno.EEXIST:
807 if err.errno == errno.EEXIST:
801 return
808 return
802 if err.errno != errno.ENOENT:
809 if err.errno != errno.ENOENT:
803 raise
810 raise
804 parent = os.path.abspath(os.path.dirname(name))
811 parent = os.path.abspath(os.path.dirname(name))
805 makedirs(parent, mode)
812 makedirs(parent, mode)
806 makedirs(name, mode)
813 makedirs(name, mode)
807
814
808 class opener(object):
815 class opener(object):
809 """Open files relative to a base directory
816 """Open files relative to a base directory
810
817
811 This class is used to hide the details of COW semantics and
818 This class is used to hide the details of COW semantics and
812 remote file access from higher level code.
819 remote file access from higher level code.
813 """
820 """
814 def __init__(self, base, audit=True):
821 def __init__(self, base, audit=True):
815 self.base = base
822 self.base = base
816 if audit:
823 if audit:
817 self.audit_path = path_auditor(base)
824 self.audit_path = path_auditor(base)
818 else:
825 else:
819 self.audit_path = always
826 self.audit_path = always
820 self.createmode = None
827 self.createmode = None
821
828
822 @propertycache
829 @propertycache
823 def _can_symlink(self):
830 def _can_symlink(self):
824 return checklink(self.base)
831 return checklink(self.base)
825
832
826 def _fixfilemode(self, name):
833 def _fixfilemode(self, name):
827 if self.createmode is None:
834 if self.createmode is None:
828 return
835 return
829 os.chmod(name, self.createmode & 0666)
836 os.chmod(name, self.createmode & 0666)
830
837
831 def __call__(self, path, mode="r", text=False, atomictemp=False):
838 def __call__(self, path, mode="r", text=False, atomictemp=False):
832 self.audit_path(path)
839 self.audit_path(path)
833 f = os.path.join(self.base, path)
840 f = os.path.join(self.base, path)
834
841
835 if not text and "b" not in mode:
842 if not text and "b" not in mode:
836 mode += "b" # for that other OS
843 mode += "b" # for that other OS
837
844
838 nlink = -1
845 nlink = -1
839 if mode not in ("r", "rb"):
846 if mode not in ("r", "rb"):
840 try:
847 try:
841 nlink = nlinks(f)
848 nlink = nlinks(f)
842 except OSError:
849 except OSError:
843 nlink = 0
850 nlink = 0
844 d = os.path.dirname(f)
851 d = os.path.dirname(f)
845 if not os.path.isdir(d):
852 if not os.path.isdir(d):
846 makedirs(d, self.createmode)
853 makedirs(d, self.createmode)
847 if atomictemp:
854 if atomictemp:
848 return atomictempfile(f, mode, self.createmode)
855 return atomictempfile(f, mode, self.createmode)
849 if nlink > 1:
856 if nlink > 1:
850 rename(mktempcopy(f), f)
857 rename(mktempcopy(f), f)
851 fp = posixfile(f, mode)
858 fp = posixfile(f, mode)
852 if nlink == 0:
859 if nlink == 0:
853 self._fixfilemode(f)
860 self._fixfilemode(f)
854 return fp
861 return fp
855
862
856 def symlink(self, src, dst):
863 def symlink(self, src, dst):
857 self.audit_path(dst)
864 self.audit_path(dst)
858 linkname = os.path.join(self.base, dst)
865 linkname = os.path.join(self.base, dst)
859 try:
866 try:
860 os.unlink(linkname)
867 os.unlink(linkname)
861 except OSError:
868 except OSError:
862 pass
869 pass
863
870
864 dirname = os.path.dirname(linkname)
871 dirname = os.path.dirname(linkname)
865 if not os.path.exists(dirname):
872 if not os.path.exists(dirname):
866 makedirs(dirname, self.createmode)
873 makedirs(dirname, self.createmode)
867
874
868 if self._can_symlink:
875 if self._can_symlink:
869 try:
876 try:
870 os.symlink(src, linkname)
877 os.symlink(src, linkname)
871 except OSError, err:
878 except OSError, err:
872 raise OSError(err.errno, _('could not symlink to %r: %s') %
879 raise OSError(err.errno, _('could not symlink to %r: %s') %
873 (src, err.strerror), linkname)
880 (src, err.strerror), linkname)
874 else:
881 else:
875 f = self(dst, "w")
882 f = self(dst, "w")
876 f.write(src)
883 f.write(src)
877 f.close()
884 f.close()
878 self._fixfilemode(dst)
885 self._fixfilemode(dst)
879
886
880 class chunkbuffer(object):
887 class chunkbuffer(object):
881 """Allow arbitrary sized chunks of data to be efficiently read from an
888 """Allow arbitrary sized chunks of data to be efficiently read from an
882 iterator over chunks of arbitrary size."""
889 iterator over chunks of arbitrary size."""
883
890
884 def __init__(self, in_iter):
891 def __init__(self, in_iter):
885 """in_iter is the iterator that's iterating over the input chunks.
892 """in_iter is the iterator that's iterating over the input chunks.
886 targetsize is how big a buffer to try to maintain."""
893 targetsize is how big a buffer to try to maintain."""
887 self.iter = iter(in_iter)
894 self.iter = iter(in_iter)
888 self.buf = ''
895 self.buf = ''
889 self.targetsize = 2**16
896 self.targetsize = 2**16
890
897
891 def read(self, l):
898 def read(self, l):
892 """Read L bytes of data from the iterator of chunks of data.
899 """Read L bytes of data from the iterator of chunks of data.
893 Returns less than L bytes if the iterator runs dry."""
900 Returns less than L bytes if the iterator runs dry."""
894 if l > len(self.buf) and self.iter:
901 if l > len(self.buf) and self.iter:
895 # Clamp to a multiple of self.targetsize
902 # Clamp to a multiple of self.targetsize
896 targetsize = max(l, self.targetsize)
903 targetsize = max(l, self.targetsize)
897 collector = cStringIO.StringIO()
904 collector = cStringIO.StringIO()
898 collector.write(self.buf)
905 collector.write(self.buf)
899 collected = len(self.buf)
906 collected = len(self.buf)
900 for chunk in self.iter:
907 for chunk in self.iter:
901 collector.write(chunk)
908 collector.write(chunk)
902 collected += len(chunk)
909 collected += len(chunk)
903 if collected >= targetsize:
910 if collected >= targetsize:
904 break
911 break
905 if collected < targetsize:
912 if collected < targetsize:
906 self.iter = False
913 self.iter = False
907 self.buf = collector.getvalue()
914 self.buf = collector.getvalue()
908 if len(self.buf) == l:
915 if len(self.buf) == l:
909 s, self.buf = str(self.buf), ''
916 s, self.buf = str(self.buf), ''
910 else:
917 else:
911 s, self.buf = self.buf[:l], buffer(self.buf, l)
918 s, self.buf = self.buf[:l], buffer(self.buf, l)
912 return s
919 return s
913
920
914 def filechunkiter(f, size=65536, limit=None):
921 def filechunkiter(f, size=65536, limit=None):
915 """Create a generator that produces the data in the file size
922 """Create a generator that produces the data in the file size
916 (default 65536) bytes at a time, up to optional limit (default is
923 (default 65536) bytes at a time, up to optional limit (default is
917 to read all data). Chunks may be less than size bytes if the
924 to read all data). Chunks may be less than size bytes if the
918 chunk is the last chunk in the file, or the file is a socket or
925 chunk is the last chunk in the file, or the file is a socket or
919 some other type of file that sometimes reads less data than is
926 some other type of file that sometimes reads less data than is
920 requested."""
927 requested."""
921 assert size >= 0
928 assert size >= 0
922 assert limit is None or limit >= 0
929 assert limit is None or limit >= 0
923 while True:
930 while True:
924 if limit is None:
931 if limit is None:
925 nbytes = size
932 nbytes = size
926 else:
933 else:
927 nbytes = min(limit, size)
934 nbytes = min(limit, size)
928 s = nbytes and f.read(nbytes)
935 s = nbytes and f.read(nbytes)
929 if not s:
936 if not s:
930 break
937 break
931 if limit:
938 if limit:
932 limit -= len(s)
939 limit -= len(s)
933 yield s
940 yield s
934
941
935 def makedate():
942 def makedate():
936 lt = time.localtime()
943 lt = time.localtime()
937 if lt[8] == 1 and time.daylight:
944 if lt[8] == 1 and time.daylight:
938 tz = time.altzone
945 tz = time.altzone
939 else:
946 else:
940 tz = time.timezone
947 tz = time.timezone
941 return time.mktime(lt), tz
948 return time.mktime(lt), tz
942
949
943 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
950 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
944 """represent a (unixtime, offset) tuple as a localized time.
951 """represent a (unixtime, offset) tuple as a localized time.
945 unixtime is seconds since the epoch, and offset is the time zone's
952 unixtime is seconds since the epoch, and offset is the time zone's
946 number of seconds away from UTC. if timezone is false, do not
953 number of seconds away from UTC. if timezone is false, do not
947 append time zone to string."""
954 append time zone to string."""
948 t, tz = date or makedate()
955 t, tz = date or makedate()
949 if "%1" in format or "%2" in format:
956 if "%1" in format or "%2" in format:
950 sign = (tz > 0) and "-" or "+"
957 sign = (tz > 0) and "-" or "+"
951 minutes = abs(tz) // 60
958 minutes = abs(tz) // 60
952 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
959 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
953 format = format.replace("%2", "%02d" % (minutes % 60))
960 format = format.replace("%2", "%02d" % (minutes % 60))
954 s = time.strftime(format, time.gmtime(float(t) - tz))
961 s = time.strftime(format, time.gmtime(float(t) - tz))
955 return s
962 return s
956
963
957 def shortdate(date=None):
964 def shortdate(date=None):
958 """turn (timestamp, tzoff) tuple into iso 8631 date."""
965 """turn (timestamp, tzoff) tuple into iso 8631 date."""
959 return datestr(date, format='%Y-%m-%d')
966 return datestr(date, format='%Y-%m-%d')
960
967
961 def strdate(string, format, defaults=[]):
968 def strdate(string, format, defaults=[]):
962 """parse a localized time string and return a (unixtime, offset) tuple.
969 """parse a localized time string and return a (unixtime, offset) tuple.
963 if the string cannot be parsed, ValueError is raised."""
970 if the string cannot be parsed, ValueError is raised."""
964 def timezone(string):
971 def timezone(string):
965 tz = string.split()[-1]
972 tz = string.split()[-1]
966 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
973 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
967 sign = (tz[0] == "+") and 1 or -1
974 sign = (tz[0] == "+") and 1 or -1
968 hours = int(tz[1:3])
975 hours = int(tz[1:3])
969 minutes = int(tz[3:5])
976 minutes = int(tz[3:5])
970 return -sign * (hours * 60 + minutes) * 60
977 return -sign * (hours * 60 + minutes) * 60
971 if tz == "GMT" or tz == "UTC":
978 if tz == "GMT" or tz == "UTC":
972 return 0
979 return 0
973 return None
980 return None
974
981
975 # NOTE: unixtime = localunixtime + offset
982 # NOTE: unixtime = localunixtime + offset
976 offset, date = timezone(string), string
983 offset, date = timezone(string), string
977 if offset != None:
984 if offset != None:
978 date = " ".join(string.split()[:-1])
985 date = " ".join(string.split()[:-1])
979
986
980 # add missing elements from defaults
987 # add missing elements from defaults
981 for part in defaults:
988 for part in defaults:
982 found = [True for p in part if ("%"+p) in format]
989 found = [True for p in part if ("%"+p) in format]
983 if not found:
990 if not found:
984 date += "@" + defaults[part]
991 date += "@" + defaults[part]
985 format += "@%" + part[0]
992 format += "@%" + part[0]
986
993
987 timetuple = time.strptime(date, format)
994 timetuple = time.strptime(date, format)
988 localunixtime = int(calendar.timegm(timetuple))
995 localunixtime = int(calendar.timegm(timetuple))
989 if offset is None:
996 if offset is None:
990 # local timezone
997 # local timezone
991 unixtime = int(time.mktime(timetuple))
998 unixtime = int(time.mktime(timetuple))
992 offset = unixtime - localunixtime
999 offset = unixtime - localunixtime
993 else:
1000 else:
994 unixtime = localunixtime + offset
1001 unixtime = localunixtime + offset
995 return unixtime, offset
1002 return unixtime, offset
996
1003
997 def parsedate(date, formats=None, defaults=None):
1004 def parsedate(date, formats=None, defaults=None):
998 """parse a localized date/time string and return a (unixtime, offset) tuple.
1005 """parse a localized date/time string and return a (unixtime, offset) tuple.
999
1006
1000 The date may be a "unixtime offset" string or in one of the specified
1007 The date may be a "unixtime offset" string or in one of the specified
1001 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1008 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1002 """
1009 """
1003 if not date:
1010 if not date:
1004 return 0, 0
1011 return 0, 0
1005 if isinstance(date, tuple) and len(date) == 2:
1012 if isinstance(date, tuple) and len(date) == 2:
1006 return date
1013 return date
1007 if not formats:
1014 if not formats:
1008 formats = defaultdateformats
1015 formats = defaultdateformats
1009 date = date.strip()
1016 date = date.strip()
1010 try:
1017 try:
1011 when, offset = map(int, date.split(' '))
1018 when, offset = map(int, date.split(' '))
1012 except ValueError:
1019 except ValueError:
1013 # fill out defaults
1020 # fill out defaults
1014 if not defaults:
1021 if not defaults:
1015 defaults = {}
1022 defaults = {}
1016 now = makedate()
1023 now = makedate()
1017 for part in "d mb yY HI M S".split():
1024 for part in "d mb yY HI M S".split():
1018 if part not in defaults:
1025 if part not in defaults:
1019 if part[0] in "HMS":
1026 if part[0] in "HMS":
1020 defaults[part] = "00"
1027 defaults[part] = "00"
1021 else:
1028 else:
1022 defaults[part] = datestr(now, "%" + part[0])
1029 defaults[part] = datestr(now, "%" + part[0])
1023
1030
1024 for format in formats:
1031 for format in formats:
1025 try:
1032 try:
1026 when, offset = strdate(date, format, defaults)
1033 when, offset = strdate(date, format, defaults)
1027 except (ValueError, OverflowError):
1034 except (ValueError, OverflowError):
1028 pass
1035 pass
1029 else:
1036 else:
1030 break
1037 break
1031 else:
1038 else:
1032 raise Abort(_('invalid date: %r ') % date)
1039 raise Abort(_('invalid date: %r ') % date)
1033 # validate explicit (probably user-specified) date and
1040 # validate explicit (probably user-specified) date and
1034 # time zone offset. values must fit in signed 32 bits for
1041 # time zone offset. values must fit in signed 32 bits for
1035 # current 32-bit linux runtimes. timezones go from UTC-12
1042 # current 32-bit linux runtimes. timezones go from UTC-12
1036 # to UTC+14
1043 # to UTC+14
1037 if abs(when) > 0x7fffffff:
1044 if abs(when) > 0x7fffffff:
1038 raise Abort(_('date exceeds 32 bits: %d') % when)
1045 raise Abort(_('date exceeds 32 bits: %d') % when)
1039 if offset < -50400 or offset > 43200:
1046 if offset < -50400 or offset > 43200:
1040 raise Abort(_('impossible time zone offset: %d') % offset)
1047 raise Abort(_('impossible time zone offset: %d') % offset)
1041 return when, offset
1048 return when, offset
1042
1049
1043 def matchdate(date):
1050 def matchdate(date):
1044 """Return a function that matches a given date match specifier
1051 """Return a function that matches a given date match specifier
1045
1052
1046 Formats include:
1053 Formats include:
1047
1054
1048 '{date}' match a given date to the accuracy provided
1055 '{date}' match a given date to the accuracy provided
1049
1056
1050 '<{date}' on or before a given date
1057 '<{date}' on or before a given date
1051
1058
1052 '>{date}' on or after a given date
1059 '>{date}' on or after a given date
1053
1060
1054 """
1061 """
1055
1062
1056 def lower(date):
1063 def lower(date):
1057 d = dict(mb="1", d="1")
1064 d = dict(mb="1", d="1")
1058 return parsedate(date, extendeddateformats, d)[0]
1065 return parsedate(date, extendeddateformats, d)[0]
1059
1066
1060 def upper(date):
1067 def upper(date):
1061 d = dict(mb="12", HI="23", M="59", S="59")
1068 d = dict(mb="12", HI="23", M="59", S="59")
1062 for days in "31 30 29".split():
1069 for days in "31 30 29".split():
1063 try:
1070 try:
1064 d["d"] = days
1071 d["d"] = days
1065 return parsedate(date, extendeddateformats, d)[0]
1072 return parsedate(date, extendeddateformats, d)[0]
1066 except:
1073 except:
1067 pass
1074 pass
1068 d["d"] = "28"
1075 d["d"] = "28"
1069 return parsedate(date, extendeddateformats, d)[0]
1076 return parsedate(date, extendeddateformats, d)[0]
1070
1077
1071 date = date.strip()
1078 date = date.strip()
1072 if date[0] == "<":
1079 if date[0] == "<":
1073 when = upper(date[1:])
1080 when = upper(date[1:])
1074 return lambda x: x <= when
1081 return lambda x: x <= when
1075 elif date[0] == ">":
1082 elif date[0] == ">":
1076 when = lower(date[1:])
1083 when = lower(date[1:])
1077 return lambda x: x >= when
1084 return lambda x: x >= when
1078 elif date[0] == "-":
1085 elif date[0] == "-":
1079 try:
1086 try:
1080 days = int(date[1:])
1087 days = int(date[1:])
1081 except ValueError:
1088 except ValueError:
1082 raise Abort(_("invalid day spec: %s") % date[1:])
1089 raise Abort(_("invalid day spec: %s") % date[1:])
1083 when = makedate()[0] - days * 3600 * 24
1090 when = makedate()[0] - days * 3600 * 24
1084 return lambda x: x >= when
1091 return lambda x: x >= when
1085 elif " to " in date:
1092 elif " to " in date:
1086 a, b = date.split(" to ")
1093 a, b = date.split(" to ")
1087 start, stop = lower(a), upper(b)
1094 start, stop = lower(a), upper(b)
1088 return lambda x: x >= start and x <= stop
1095 return lambda x: x >= start and x <= stop
1089 else:
1096 else:
1090 start, stop = lower(date), upper(date)
1097 start, stop = lower(date), upper(date)
1091 return lambda x: x >= start and x <= stop
1098 return lambda x: x >= start and x <= stop
1092
1099
1093 def shortuser(user):
1100 def shortuser(user):
1094 """Return a short representation of a user name or email address."""
1101 """Return a short representation of a user name or email address."""
1095 f = user.find('@')
1102 f = user.find('@')
1096 if f >= 0:
1103 if f >= 0:
1097 user = user[:f]
1104 user = user[:f]
1098 f = user.find('<')
1105 f = user.find('<')
1099 if f >= 0:
1106 if f >= 0:
1100 user = user[f + 1:]
1107 user = user[f + 1:]
1101 f = user.find(' ')
1108 f = user.find(' ')
1102 if f >= 0:
1109 if f >= 0:
1103 user = user[:f]
1110 user = user[:f]
1104 f = user.find('.')
1111 f = user.find('.')
1105 if f >= 0:
1112 if f >= 0:
1106 user = user[:f]
1113 user = user[:f]
1107 return user
1114 return user
1108
1115
1109 def email(author):
1116 def email(author):
1110 '''get email of author.'''
1117 '''get email of author.'''
1111 r = author.find('>')
1118 r = author.find('>')
1112 if r == -1:
1119 if r == -1:
1113 r = None
1120 r = None
1114 return author[author.find('<') + 1:r]
1121 return author[author.find('<') + 1:r]
1115
1122
1116 def ellipsis(text, maxlength=400):
1123 def ellipsis(text, maxlength=400):
1117 """Trim string to at most maxlength (default: 400) characters."""
1124 """Trim string to at most maxlength (default: 400) characters."""
1118 if len(text) <= maxlength:
1125 if len(text) <= maxlength:
1119 return text
1126 return text
1120 else:
1127 else:
1121 return "%s..." % (text[:maxlength - 3])
1128 return "%s..." % (text[:maxlength - 3])
1122
1129
1123 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1130 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1124 '''yield every hg repository under path, recursively.'''
1131 '''yield every hg repository under path, recursively.'''
1125 def errhandler(err):
1132 def errhandler(err):
1126 if err.filename == path:
1133 if err.filename == path:
1127 raise err
1134 raise err
1128 if followsym and hasattr(os.path, 'samestat'):
1135 if followsym and hasattr(os.path, 'samestat'):
1129 def _add_dir_if_not_there(dirlst, dirname):
1136 def _add_dir_if_not_there(dirlst, dirname):
1130 match = False
1137 match = False
1131 samestat = os.path.samestat
1138 samestat = os.path.samestat
1132 dirstat = os.stat(dirname)
1139 dirstat = os.stat(dirname)
1133 for lstdirstat in dirlst:
1140 for lstdirstat in dirlst:
1134 if samestat(dirstat, lstdirstat):
1141 if samestat(dirstat, lstdirstat):
1135 match = True
1142 match = True
1136 break
1143 break
1137 if not match:
1144 if not match:
1138 dirlst.append(dirstat)
1145 dirlst.append(dirstat)
1139 return not match
1146 return not match
1140 else:
1147 else:
1141 followsym = False
1148 followsym = False
1142
1149
1143 if (seen_dirs is None) and followsym:
1150 if (seen_dirs is None) and followsym:
1144 seen_dirs = []
1151 seen_dirs = []
1145 _add_dir_if_not_there(seen_dirs, path)
1152 _add_dir_if_not_there(seen_dirs, path)
1146 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1153 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1147 dirs.sort()
1154 dirs.sort()
1148 if '.hg' in dirs:
1155 if '.hg' in dirs:
1149 yield root # found a repository
1156 yield root # found a repository
1150 qroot = os.path.join(root, '.hg', 'patches')
1157 qroot = os.path.join(root, '.hg', 'patches')
1151 if os.path.isdir(os.path.join(qroot, '.hg')):
1158 if os.path.isdir(os.path.join(qroot, '.hg')):
1152 yield qroot # we have a patch queue repo here
1159 yield qroot # we have a patch queue repo here
1153 if recurse:
1160 if recurse:
1154 # avoid recursing inside the .hg directory
1161 # avoid recursing inside the .hg directory
1155 dirs.remove('.hg')
1162 dirs.remove('.hg')
1156 else:
1163 else:
1157 dirs[:] = [] # don't descend further
1164 dirs[:] = [] # don't descend further
1158 elif followsym:
1165 elif followsym:
1159 newdirs = []
1166 newdirs = []
1160 for d in dirs:
1167 for d in dirs:
1161 fname = os.path.join(root, d)
1168 fname = os.path.join(root, d)
1162 if _add_dir_if_not_there(seen_dirs, fname):
1169 if _add_dir_if_not_there(seen_dirs, fname):
1163 if os.path.islink(fname):
1170 if os.path.islink(fname):
1164 for hgname in walkrepos(fname, True, seen_dirs):
1171 for hgname in walkrepos(fname, True, seen_dirs):
1165 yield hgname
1172 yield hgname
1166 else:
1173 else:
1167 newdirs.append(d)
1174 newdirs.append(d)
1168 dirs[:] = newdirs
1175 dirs[:] = newdirs
1169
1176
1170 _rcpath = None
1177 _rcpath = None
1171
1178
1172 def os_rcpath():
1179 def os_rcpath():
1173 '''return default os-specific hgrc search path'''
1180 '''return default os-specific hgrc search path'''
1174 path = system_rcpath()
1181 path = system_rcpath()
1175 path.extend(user_rcpath())
1182 path.extend(user_rcpath())
1176 path = [os.path.normpath(f) for f in path]
1183 path = [os.path.normpath(f) for f in path]
1177 return path
1184 return path
1178
1185
1179 def rcpath():
1186 def rcpath():
1180 '''return hgrc search path. if env var HGRCPATH is set, use it.
1187 '''return hgrc search path. if env var HGRCPATH is set, use it.
1181 for each item in path, if directory, use files ending in .rc,
1188 for each item in path, if directory, use files ending in .rc,
1182 else use item.
1189 else use item.
1183 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1190 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1184 if no HGRCPATH, use default os-specific path.'''
1191 if no HGRCPATH, use default os-specific path.'''
1185 global _rcpath
1192 global _rcpath
1186 if _rcpath is None:
1193 if _rcpath is None:
1187 if 'HGRCPATH' in os.environ:
1194 if 'HGRCPATH' in os.environ:
1188 _rcpath = []
1195 _rcpath = []
1189 for p in os.environ['HGRCPATH'].split(os.pathsep):
1196 for p in os.environ['HGRCPATH'].split(os.pathsep):
1190 if not p:
1197 if not p:
1191 continue
1198 continue
1192 p = expandpath(p)
1199 p = expandpath(p)
1193 if os.path.isdir(p):
1200 if os.path.isdir(p):
1194 for f, kind in osutil.listdir(p):
1201 for f, kind in osutil.listdir(p):
1195 if f.endswith('.rc'):
1202 if f.endswith('.rc'):
1196 _rcpath.append(os.path.join(p, f))
1203 _rcpath.append(os.path.join(p, f))
1197 else:
1204 else:
1198 _rcpath.append(p)
1205 _rcpath.append(p)
1199 else:
1206 else:
1200 _rcpath = os_rcpath()
1207 _rcpath = os_rcpath()
1201 return _rcpath
1208 return _rcpath
1202
1209
1203 def bytecount(nbytes):
1210 def bytecount(nbytes):
1204 '''return byte count formatted as readable string, with units'''
1211 '''return byte count formatted as readable string, with units'''
1205
1212
1206 units = (
1213 units = (
1207 (100, 1 << 30, _('%.0f GB')),
1214 (100, 1 << 30, _('%.0f GB')),
1208 (10, 1 << 30, _('%.1f GB')),
1215 (10, 1 << 30, _('%.1f GB')),
1209 (1, 1 << 30, _('%.2f GB')),
1216 (1, 1 << 30, _('%.2f GB')),
1210 (100, 1 << 20, _('%.0f MB')),
1217 (100, 1 << 20, _('%.0f MB')),
1211 (10, 1 << 20, _('%.1f MB')),
1218 (10, 1 << 20, _('%.1f MB')),
1212 (1, 1 << 20, _('%.2f MB')),
1219 (1, 1 << 20, _('%.2f MB')),
1213 (100, 1 << 10, _('%.0f KB')),
1220 (100, 1 << 10, _('%.0f KB')),
1214 (10, 1 << 10, _('%.1f KB')),
1221 (10, 1 << 10, _('%.1f KB')),
1215 (1, 1 << 10, _('%.2f KB')),
1222 (1, 1 << 10, _('%.2f KB')),
1216 (1, 1, _('%.0f bytes')),
1223 (1, 1, _('%.0f bytes')),
1217 )
1224 )
1218
1225
1219 for multiplier, divisor, format in units:
1226 for multiplier, divisor, format in units:
1220 if nbytes >= divisor * multiplier:
1227 if nbytes >= divisor * multiplier:
1221 return format % (nbytes / float(divisor))
1228 return format % (nbytes / float(divisor))
1222 return units[-1][2] % nbytes
1229 return units[-1][2] % nbytes
1223
1230
1224 def drop_scheme(scheme, path):
1231 def drop_scheme(scheme, path):
1225 sc = scheme + ':'
1232 sc = scheme + ':'
1226 if path.startswith(sc):
1233 if path.startswith(sc):
1227 path = path[len(sc):]
1234 path = path[len(sc):]
1228 if path.startswith('//'):
1235 if path.startswith('//'):
1229 if scheme == 'file':
1236 if scheme == 'file':
1230 i = path.find('/', 2)
1237 i = path.find('/', 2)
1231 if i == -1:
1238 if i == -1:
1232 return ''
1239 return ''
1233 # On Windows, absolute paths are rooted at the current drive
1240 # On Windows, absolute paths are rooted at the current drive
1234 # root. On POSIX they are rooted at the file system root.
1241 # root. On POSIX they are rooted at the file system root.
1235 if os.name == 'nt':
1242 if os.name == 'nt':
1236 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1243 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1237 path = os.path.join(droot, path[i + 1:])
1244 path = os.path.join(droot, path[i + 1:])
1238 else:
1245 else:
1239 path = path[i:]
1246 path = path[i:]
1240 else:
1247 else:
1241 path = path[2:]
1248 path = path[2:]
1242 return path
1249 return path
1243
1250
1244 def uirepr(s):
1251 def uirepr(s):
1245 # Avoid double backslash in Windows path repr()
1252 # Avoid double backslash in Windows path repr()
1246 return repr(s).replace('\\\\', '\\')
1253 return repr(s).replace('\\\\', '\\')
1247
1254
1248 def termwidth():
1255 def termwidth():
1249 if 'COLUMNS' in os.environ:
1256 if 'COLUMNS' in os.environ:
1250 try:
1257 try:
1251 return int(os.environ['COLUMNS'])
1258 return int(os.environ['COLUMNS'])
1252 except ValueError:
1259 except ValueError:
1253 pass
1260 pass
1254 try:
1261 try:
1255 import termios, array, fcntl
1262 import termios, array, fcntl
1256 for dev in (sys.stdout, sys.stdin):
1263 for dev in (sys.stdout, sys.stdin):
1257 try:
1264 try:
1258 try:
1265 try:
1259 fd = dev.fileno()
1266 fd = dev.fileno()
1260 except AttributeError:
1267 except AttributeError:
1261 continue
1268 continue
1262 if not os.isatty(fd):
1269 if not os.isatty(fd):
1263 continue
1270 continue
1264 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1271 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1265 return array.array('h', arri)[1]
1272 return array.array('h', arri)[1]
1266 except ValueError:
1273 except ValueError:
1267 pass
1274 pass
1268 except IOError, e:
1275 except IOError, e:
1269 if e[0] == errno.EINVAL:
1276 if e[0] == errno.EINVAL:
1270 pass
1277 pass
1271 else:
1278 else:
1272 raise
1279 raise
1273 except ImportError:
1280 except ImportError:
1274 pass
1281 pass
1275 return 80
1282 return 80
1276
1283
1277 def wrap(line, hangindent, width=None):
1284 def wrap(line, hangindent, width=None):
1278 if width is None:
1285 if width is None:
1279 width = termwidth() - 2
1286 width = termwidth() - 2
1280 if width <= hangindent:
1287 if width <= hangindent:
1281 # adjust for weird terminal size
1288 # adjust for weird terminal size
1282 width = max(78, hangindent + 1)
1289 width = max(78, hangindent + 1)
1283 padding = '\n' + ' ' * hangindent
1290 padding = '\n' + ' ' * hangindent
1284 # To avoid corrupting multi-byte characters in line, we must wrap
1291 # To avoid corrupting multi-byte characters in line, we must wrap
1285 # a Unicode string instead of a bytestring.
1292 # a Unicode string instead of a bytestring.
1286 try:
1293 try:
1287 u = line.decode(encoding.encoding)
1294 u = line.decode(encoding.encoding)
1288 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1295 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1289 return w.encode(encoding.encoding)
1296 return w.encode(encoding.encoding)
1290 except UnicodeDecodeError:
1297 except UnicodeDecodeError:
1291 return padding.join(textwrap.wrap(line, width=width - hangindent))
1298 return padding.join(textwrap.wrap(line, width=width - hangindent))
1292
1299
1293 def iterlines(iterator):
1300 def iterlines(iterator):
1294 for chunk in iterator:
1301 for chunk in iterator:
1295 for line in chunk.splitlines():
1302 for line in chunk.splitlines():
1296 yield line
1303 yield line
1297
1304
1298 def expandpath(path):
1305 def expandpath(path):
1299 return os.path.expanduser(os.path.expandvars(path))
1306 return os.path.expanduser(os.path.expandvars(path))
1300
1307
1301 def hgcmd():
1308 def hgcmd():
1302 """Return the command used to execute current hg
1309 """Return the command used to execute current hg
1303
1310
1304 This is different from hgexecutable() because on Windows we want
1311 This is different from hgexecutable() because on Windows we want
1305 to avoid things opening new shell windows like batch files, so we
1312 to avoid things opening new shell windows like batch files, so we
1306 get either the python call or current executable.
1313 get either the python call or current executable.
1307 """
1314 """
1308 if main_is_frozen():
1315 if main_is_frozen():
1309 return [sys.executable]
1316 return [sys.executable]
1310 return gethgcmd()
1317 return gethgcmd()
1311
1318
1312 def rundetached(args, condfn):
1319 def rundetached(args, condfn):
1313 """Execute the argument list in a detached process.
1320 """Execute the argument list in a detached process.
1314
1321
1315 condfn is a callable which is called repeatedly and should return
1322 condfn is a callable which is called repeatedly and should return
1316 True once the child process is known to have started successfully.
1323 True once the child process is known to have started successfully.
1317 At this point, the child process PID is returned. If the child
1324 At this point, the child process PID is returned. If the child
1318 process fails to start or finishes before condfn() evaluates to
1325 process fails to start or finishes before condfn() evaluates to
1319 True, return -1.
1326 True, return -1.
1320 """
1327 """
1321 # Windows case is easier because the child process is either
1328 # Windows case is easier because the child process is either
1322 # successfully starting and validating the condition or exiting
1329 # successfully starting and validating the condition or exiting
1323 # on failure. We just poll on its PID. On Unix, if the child
1330 # on failure. We just poll on its PID. On Unix, if the child
1324 # process fails to start, it will be left in a zombie state until
1331 # process fails to start, it will be left in a zombie state until
1325 # the parent wait on it, which we cannot do since we expect a long
1332 # the parent wait on it, which we cannot do since we expect a long
1326 # running process on success. Instead we listen for SIGCHLD telling
1333 # running process on success. Instead we listen for SIGCHLD telling
1327 # us our child process terminated.
1334 # us our child process terminated.
1328 terminated = set()
1335 terminated = set()
1329 def handler(signum, frame):
1336 def handler(signum, frame):
1330 terminated.add(os.wait())
1337 terminated.add(os.wait())
1331 prevhandler = None
1338 prevhandler = None
1332 if hasattr(signal, 'SIGCHLD'):
1339 if hasattr(signal, 'SIGCHLD'):
1333 prevhandler = signal.signal(signal.SIGCHLD, handler)
1340 prevhandler = signal.signal(signal.SIGCHLD, handler)
1334 try:
1341 try:
1335 pid = spawndetached(args)
1342 pid = spawndetached(args)
1336 while not condfn():
1343 while not condfn():
1337 if ((pid in terminated or not testpid(pid))
1344 if ((pid in terminated or not testpid(pid))
1338 and not condfn()):
1345 and not condfn()):
1339 return -1
1346 return -1
1340 time.sleep(0.1)
1347 time.sleep(0.1)
1341 return pid
1348 return pid
1342 finally:
1349 finally:
1343 if prevhandler is not None:
1350 if prevhandler is not None:
1344 signal.signal(signal.SIGCHLD, prevhandler)
1351 signal.signal(signal.SIGCHLD, prevhandler)
1345
1352
1346 try:
1353 try:
1347 any, all = any, all
1354 any, all = any, all
1348 except NameError:
1355 except NameError:
1349 def any(iterable):
1356 def any(iterable):
1350 for i in iterable:
1357 for i in iterable:
1351 if i:
1358 if i:
1352 return True
1359 return True
1353 return False
1360 return False
1354
1361
1355 def all(iterable):
1362 def all(iterable):
1356 for i in iterable:
1363 for i in iterable:
1357 if not i:
1364 if not i:
1358 return False
1365 return False
1359 return True
1366 return True
General Comments 0
You need to be logged in to leave comments. Login now