##// END OF EJS Templates
opener: use %r in format string for abort
Adrian Buehlmann -
r13943:545091b1 default
parent child Browse files
Show More
@@ -1,1580 +1,1580
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except:
209 except:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except:
214 except:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 def canonpath(root, cwd, myname, auditor=None):
298 def canonpath(root, cwd, myname, auditor=None):
299 """return the canonical path of myname, given cwd and root"""
299 """return the canonical path of myname, given cwd and root"""
300 if endswithsep(root):
300 if endswithsep(root):
301 rootsep = root
301 rootsep = root
302 else:
302 else:
303 rootsep = root + os.sep
303 rootsep = root + os.sep
304 name = myname
304 name = myname
305 if not os.path.isabs(name):
305 if not os.path.isabs(name):
306 name = os.path.join(root, cwd, name)
306 name = os.path.join(root, cwd, name)
307 name = os.path.normpath(name)
307 name = os.path.normpath(name)
308 if auditor is None:
308 if auditor is None:
309 auditor = path_auditor(root)
309 auditor = path_auditor(root)
310 if name != rootsep and name.startswith(rootsep):
310 if name != rootsep and name.startswith(rootsep):
311 name = name[len(rootsep):]
311 name = name[len(rootsep):]
312 auditor(name)
312 auditor(name)
313 return pconvert(name)
313 return pconvert(name)
314 elif name == root:
314 elif name == root:
315 return ''
315 return ''
316 else:
316 else:
317 # Determine whether `name' is in the hierarchy at or beneath `root',
317 # Determine whether `name' is in the hierarchy at or beneath `root',
318 # by iterating name=dirname(name) until that causes no change (can't
318 # by iterating name=dirname(name) until that causes no change (can't
319 # check name == '/', because that doesn't work on windows). For each
319 # check name == '/', because that doesn't work on windows). For each
320 # `name', compare dev/inode numbers. If they match, the list `rel'
320 # `name', compare dev/inode numbers. If they match, the list `rel'
321 # holds the reversed list of components making up the relative file
321 # holds the reversed list of components making up the relative file
322 # name we want.
322 # name we want.
323 root_st = os.stat(root)
323 root_st = os.stat(root)
324 rel = []
324 rel = []
325 while True:
325 while True:
326 try:
326 try:
327 name_st = os.stat(name)
327 name_st = os.stat(name)
328 except OSError:
328 except OSError:
329 break
329 break
330 if samestat(name_st, root_st):
330 if samestat(name_st, root_st):
331 if not rel:
331 if not rel:
332 # name was actually the same as root (maybe a symlink)
332 # name was actually the same as root (maybe a symlink)
333 return ''
333 return ''
334 rel.reverse()
334 rel.reverse()
335 name = os.path.join(*rel)
335 name = os.path.join(*rel)
336 auditor(name)
336 auditor(name)
337 return pconvert(name)
337 return pconvert(name)
338 dirname, basename = os.path.split(name)
338 dirname, basename = os.path.split(name)
339 rel.append(basename)
339 rel.append(basename)
340 if dirname == name:
340 if dirname == name:
341 break
341 break
342 name = dirname
342 name = dirname
343
343
344 raise Abort('%s not under root' % myname)
344 raise Abort('%s not under root' % myname)
345
345
346 _hgexecutable = None
346 _hgexecutable = None
347
347
348 def main_is_frozen():
348 def main_is_frozen():
349 """return True if we are a frozen executable.
349 """return True if we are a frozen executable.
350
350
351 The code supports py2exe (most common, Windows only) and tools/freeze
351 The code supports py2exe (most common, Windows only) and tools/freeze
352 (portable, not much used).
352 (portable, not much used).
353 """
353 """
354 return (hasattr(sys, "frozen") or # new py2exe
354 return (hasattr(sys, "frozen") or # new py2exe
355 hasattr(sys, "importers") or # old py2exe
355 hasattr(sys, "importers") or # old py2exe
356 imp.is_frozen("__main__")) # tools/freeze
356 imp.is_frozen("__main__")) # tools/freeze
357
357
358 def hgexecutable():
358 def hgexecutable():
359 """return location of the 'hg' executable.
359 """return location of the 'hg' executable.
360
360
361 Defaults to $HG or 'hg' in the search path.
361 Defaults to $HG or 'hg' in the search path.
362 """
362 """
363 if _hgexecutable is None:
363 if _hgexecutable is None:
364 hg = os.environ.get('HG')
364 hg = os.environ.get('HG')
365 if hg:
365 if hg:
366 set_hgexecutable(hg)
366 set_hgexecutable(hg)
367 elif main_is_frozen():
367 elif main_is_frozen():
368 set_hgexecutable(sys.executable)
368 set_hgexecutable(sys.executable)
369 else:
369 else:
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
371 set_hgexecutable(exe)
371 set_hgexecutable(exe)
372 return _hgexecutable
372 return _hgexecutable
373
373
374 def set_hgexecutable(path):
374 def set_hgexecutable(path):
375 """set location of the 'hg' executable"""
375 """set location of the 'hg' executable"""
376 global _hgexecutable
376 global _hgexecutable
377 _hgexecutable = path
377 _hgexecutable = path
378
378
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
380 '''enhanced shell command execution.
380 '''enhanced shell command execution.
381 run with environment maybe modified, maybe in different dir.
381 run with environment maybe modified, maybe in different dir.
382
382
383 if command fails and onerr is None, return status. if ui object,
383 if command fails and onerr is None, return status. if ui object,
384 print error message and return status, else raise onerr object as
384 print error message and return status, else raise onerr object as
385 exception.
385 exception.
386
386
387 if out is specified, it is assumed to be a file-like object that has a
387 if out is specified, it is assumed to be a file-like object that has a
388 write() method. stdout and stderr will be redirected to out.'''
388 write() method. stdout and stderr will be redirected to out.'''
389 try:
389 try:
390 sys.stdout.flush()
390 sys.stdout.flush()
391 except Exception:
391 except Exception:
392 pass
392 pass
393 def py2shell(val):
393 def py2shell(val):
394 'convert python object into string that is useful to shell'
394 'convert python object into string that is useful to shell'
395 if val is None or val is False:
395 if val is None or val is False:
396 return '0'
396 return '0'
397 if val is True:
397 if val is True:
398 return '1'
398 return '1'
399 return str(val)
399 return str(val)
400 origcmd = cmd
400 origcmd = cmd
401 cmd = quotecommand(cmd)
401 cmd = quotecommand(cmd)
402 env = dict(os.environ)
402 env = dict(os.environ)
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
404 env['HG'] = hgexecutable()
404 env['HG'] = hgexecutable()
405 if out is None:
405 if out is None:
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
407 env=env, cwd=cwd)
407 env=env, cwd=cwd)
408 else:
408 else:
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
411 stderr=subprocess.STDOUT)
411 stderr=subprocess.STDOUT)
412 for line in proc.stdout:
412 for line in proc.stdout:
413 out.write(line)
413 out.write(line)
414 proc.wait()
414 proc.wait()
415 rc = proc.returncode
415 rc = proc.returncode
416 if sys.platform == 'OpenVMS' and rc & 1:
416 if sys.platform == 'OpenVMS' and rc & 1:
417 rc = 0
417 rc = 0
418 if rc and onerr:
418 if rc and onerr:
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
420 explain_exit(rc)[0])
420 explain_exit(rc)[0])
421 if errprefix:
421 if errprefix:
422 errmsg = '%s: %s' % (errprefix, errmsg)
422 errmsg = '%s: %s' % (errprefix, errmsg)
423 try:
423 try:
424 onerr.warn(errmsg + '\n')
424 onerr.warn(errmsg + '\n')
425 except AttributeError:
425 except AttributeError:
426 raise onerr(errmsg)
426 raise onerr(errmsg)
427 return rc
427 return rc
428
428
429 def checksignature(func):
429 def checksignature(func):
430 '''wrap a function with code to check for calling errors'''
430 '''wrap a function with code to check for calling errors'''
431 def check(*args, **kwargs):
431 def check(*args, **kwargs):
432 try:
432 try:
433 return func(*args, **kwargs)
433 return func(*args, **kwargs)
434 except TypeError:
434 except TypeError:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
436 raise error.SignatureError
436 raise error.SignatureError
437 raise
437 raise
438
438
439 return check
439 return check
440
440
441 def makedir(path, notindexed):
441 def makedir(path, notindexed):
442 os.mkdir(path)
442 os.mkdir(path)
443
443
444 def unlinkpath(f):
444 def unlinkpath(f):
445 """unlink and remove the directory if it is empty"""
445 """unlink and remove the directory if it is empty"""
446 os.unlink(f)
446 os.unlink(f)
447 # try removing directories that might now be empty
447 # try removing directories that might now be empty
448 try:
448 try:
449 os.removedirs(os.path.dirname(f))
449 os.removedirs(os.path.dirname(f))
450 except OSError:
450 except OSError:
451 pass
451 pass
452
452
453 def copyfile(src, dest):
453 def copyfile(src, dest):
454 "copy a file, preserving mode and atime/mtime"
454 "copy a file, preserving mode and atime/mtime"
455 if os.path.islink(src):
455 if os.path.islink(src):
456 try:
456 try:
457 os.unlink(dest)
457 os.unlink(dest)
458 except:
458 except:
459 pass
459 pass
460 os.symlink(os.readlink(src), dest)
460 os.symlink(os.readlink(src), dest)
461 else:
461 else:
462 try:
462 try:
463 shutil.copyfile(src, dest)
463 shutil.copyfile(src, dest)
464 shutil.copymode(src, dest)
464 shutil.copymode(src, dest)
465 except shutil.Error, inst:
465 except shutil.Error, inst:
466 raise Abort(str(inst))
466 raise Abort(str(inst))
467
467
468 def copyfiles(src, dst, hardlink=None):
468 def copyfiles(src, dst, hardlink=None):
469 """Copy a directory tree using hardlinks if possible"""
469 """Copy a directory tree using hardlinks if possible"""
470
470
471 if hardlink is None:
471 if hardlink is None:
472 hardlink = (os.stat(src).st_dev ==
472 hardlink = (os.stat(src).st_dev ==
473 os.stat(os.path.dirname(dst)).st_dev)
473 os.stat(os.path.dirname(dst)).st_dev)
474
474
475 num = 0
475 num = 0
476 if os.path.isdir(src):
476 if os.path.isdir(src):
477 os.mkdir(dst)
477 os.mkdir(dst)
478 for name, kind in osutil.listdir(src):
478 for name, kind in osutil.listdir(src):
479 srcname = os.path.join(src, name)
479 srcname = os.path.join(src, name)
480 dstname = os.path.join(dst, name)
480 dstname = os.path.join(dst, name)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
482 num += n
482 num += n
483 else:
483 else:
484 if hardlink:
484 if hardlink:
485 try:
485 try:
486 os_link(src, dst)
486 os_link(src, dst)
487 except (IOError, OSError):
487 except (IOError, OSError):
488 hardlink = False
488 hardlink = False
489 shutil.copy(src, dst)
489 shutil.copy(src, dst)
490 else:
490 else:
491 shutil.copy(src, dst)
491 shutil.copy(src, dst)
492 num += 1
492 num += 1
493
493
494 return hardlink, num
494 return hardlink, num
495
495
496 _windows_reserved_filenames = '''con prn aux nul
496 _windows_reserved_filenames = '''con prn aux nul
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
499 _windows_reserved_chars = ':*?"<>|'
499 _windows_reserved_chars = ':*?"<>|'
500 def checkwinfilename(path):
500 def checkwinfilename(path):
501 '''Check that the base-relative path is a valid filename on Windows.
501 '''Check that the base-relative path is a valid filename on Windows.
502 Returns None if the path is ok, or a UI string describing the problem.
502 Returns None if the path is ok, or a UI string describing the problem.
503
503
504 >>> checkwinfilename("just/a/normal/path")
504 >>> checkwinfilename("just/a/normal/path")
505 >>> checkwinfilename("foo/bar/con.xml")
505 >>> checkwinfilename("foo/bar/con.xml")
506 "filename contains 'con', which is reserved on Windows"
506 "filename contains 'con', which is reserved on Windows"
507 >>> checkwinfilename("foo/con.xml/bar")
507 >>> checkwinfilename("foo/con.xml/bar")
508 "filename contains 'con', which is reserved on Windows"
508 "filename contains 'con', which is reserved on Windows"
509 >>> checkwinfilename("foo/bar/xml.con")
509 >>> checkwinfilename("foo/bar/xml.con")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
511 "filename contains 'AUX', which is reserved on Windows"
511 "filename contains 'AUX', which is reserved on Windows"
512 >>> checkwinfilename("foo/bar/bla:.txt")
512 >>> checkwinfilename("foo/bar/bla:.txt")
513 "filename contains ':', which is reserved on Windows"
513 "filename contains ':', which is reserved on Windows"
514 >>> checkwinfilename("foo/bar/b\07la.txt")
514 >>> checkwinfilename("foo/bar/b\07la.txt")
515 "filename contains '\\x07', which is invalid on Windows"
515 "filename contains '\\x07', which is invalid on Windows"
516 >>> checkwinfilename("foo/bar/bla ")
516 >>> checkwinfilename("foo/bar/bla ")
517 "filename ends with ' ', which is not allowed on Windows"
517 "filename ends with ' ', which is not allowed on Windows"
518 '''
518 '''
519 for n in path.replace('\\', '/').split('/'):
519 for n in path.replace('\\', '/').split('/'):
520 if not n:
520 if not n:
521 continue
521 continue
522 for c in n:
522 for c in n:
523 if c in _windows_reserved_chars:
523 if c in _windows_reserved_chars:
524 return _("filename contains '%s', which is reserved "
524 return _("filename contains '%s', which is reserved "
525 "on Windows") % c
525 "on Windows") % c
526 if ord(c) <= 31:
526 if ord(c) <= 31:
527 return _("filename contains '%s', which is invalid "
527 return _("filename contains '%s', which is invalid "
528 "on Windows") % c
528 "on Windows") % c
529 base = n.split('.')[0]
529 base = n.split('.')[0]
530 if base and base.lower() in _windows_reserved_filenames:
530 if base and base.lower() in _windows_reserved_filenames:
531 return _("filename contains '%s', which is reserved "
531 return _("filename contains '%s', which is reserved "
532 "on Windows") % base
532 "on Windows") % base
533 t = n[-1]
533 t = n[-1]
534 if t in '. ':
534 if t in '. ':
535 return _("filename ends with '%s', which is not allowed "
535 return _("filename ends with '%s', which is not allowed "
536 "on Windows") % t
536 "on Windows") % t
537
537
538 class path_auditor(object):
538 class path_auditor(object):
539 '''ensure that a filesystem path contains no banned components.
539 '''ensure that a filesystem path contains no banned components.
540 the following properties of a path are checked:
540 the following properties of a path are checked:
541
541
542 - ends with a directory separator
542 - ends with a directory separator
543 - under top-level .hg
543 - under top-level .hg
544 - starts at the root of a windows drive
544 - starts at the root of a windows drive
545 - contains ".."
545 - contains ".."
546 - traverses a symlink (e.g. a/symlink_here/b)
546 - traverses a symlink (e.g. a/symlink_here/b)
547 - inside a nested repository (a callback can be used to approve
547 - inside a nested repository (a callback can be used to approve
548 some nested repositories, e.g., subrepositories)
548 some nested repositories, e.g., subrepositories)
549 '''
549 '''
550
550
551 def __init__(self, root, callback=None):
551 def __init__(self, root, callback=None):
552 self.audited = set()
552 self.audited = set()
553 self.auditeddir = set()
553 self.auditeddir = set()
554 self.root = root
554 self.root = root
555 self.callback = callback
555 self.callback = callback
556
556
557 def __call__(self, path):
557 def __call__(self, path):
558 '''Check the relative path.
558 '''Check the relative path.
559 path may contain a pattern (e.g. foodir/**.txt)'''
559 path may contain a pattern (e.g. foodir/**.txt)'''
560
560
561 if path in self.audited:
561 if path in self.audited:
562 return
562 return
563 # AIX ignores "/" at end of path, others raise EISDIR.
563 # AIX ignores "/" at end of path, others raise EISDIR.
564 if endswithsep(path):
564 if endswithsep(path):
565 raise Abort(_("path ends in directory separator: %s") % path)
565 raise Abort(_("path ends in directory separator: %s") % path)
566 normpath = os.path.normcase(path)
566 normpath = os.path.normcase(path)
567 parts = splitpath(normpath)
567 parts = splitpath(normpath)
568 if (os.path.splitdrive(path)[0]
568 if (os.path.splitdrive(path)[0]
569 or parts[0].lower() in ('.hg', '.hg.', '')
569 or parts[0].lower() in ('.hg', '.hg.', '')
570 or os.pardir in parts):
570 or os.pardir in parts):
571 raise Abort(_("path contains illegal component: %s") % path)
571 raise Abort(_("path contains illegal component: %s") % path)
572 if '.hg' in path.lower():
572 if '.hg' in path.lower():
573 lparts = [p.lower() for p in parts]
573 lparts = [p.lower() for p in parts]
574 for p in '.hg', '.hg.':
574 for p in '.hg', '.hg.':
575 if p in lparts[1:]:
575 if p in lparts[1:]:
576 pos = lparts.index(p)
576 pos = lparts.index(p)
577 base = os.path.join(*parts[:pos])
577 base = os.path.join(*parts[:pos])
578 raise Abort(_('path %r is inside nested repo %r')
578 raise Abort(_('path %r is inside nested repo %r')
579 % (path, base))
579 % (path, base))
580
580
581 parts.pop()
581 parts.pop()
582 prefixes = []
582 prefixes = []
583 while parts:
583 while parts:
584 prefix = os.sep.join(parts)
584 prefix = os.sep.join(parts)
585 if prefix in self.auditeddir:
585 if prefix in self.auditeddir:
586 break
586 break
587 curpath = os.path.join(self.root, prefix)
587 curpath = os.path.join(self.root, prefix)
588 try:
588 try:
589 st = os.lstat(curpath)
589 st = os.lstat(curpath)
590 except OSError, err:
590 except OSError, err:
591 # EINVAL can be raised as invalid path syntax under win32.
591 # EINVAL can be raised as invalid path syntax under win32.
592 # They must be ignored for patterns can be checked too.
592 # They must be ignored for patterns can be checked too.
593 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
593 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
594 raise
594 raise
595 else:
595 else:
596 if stat.S_ISLNK(st.st_mode):
596 if stat.S_ISLNK(st.st_mode):
597 raise Abort(_('path %r traverses symbolic link %r') %
597 raise Abort(_('path %r traverses symbolic link %r') %
598 (path, prefix))
598 (path, prefix))
599 elif (stat.S_ISDIR(st.st_mode) and
599 elif (stat.S_ISDIR(st.st_mode) and
600 os.path.isdir(os.path.join(curpath, '.hg'))):
600 os.path.isdir(os.path.join(curpath, '.hg'))):
601 if not self.callback or not self.callback(curpath):
601 if not self.callback or not self.callback(curpath):
602 raise Abort(_('path %r is inside nested repo %r') %
602 raise Abort(_('path %r is inside nested repo %r') %
603 (path, prefix))
603 (path, prefix))
604 prefixes.append(prefix)
604 prefixes.append(prefix)
605 parts.pop()
605 parts.pop()
606
606
607 self.audited.add(path)
607 self.audited.add(path)
608 # only add prefixes to the cache after checking everything: we don't
608 # only add prefixes to the cache after checking everything: we don't
609 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
609 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
610 self.auditeddir.update(prefixes)
610 self.auditeddir.update(prefixes)
611
611
612 def lookup_reg(key, name=None, scope=None):
612 def lookup_reg(key, name=None, scope=None):
613 return None
613 return None
614
614
615 def hidewindow():
615 def hidewindow():
616 """Hide current shell window.
616 """Hide current shell window.
617
617
618 Used to hide the window opened when starting asynchronous
618 Used to hide the window opened when starting asynchronous
619 child process under Windows, unneeded on other systems.
619 child process under Windows, unneeded on other systems.
620 """
620 """
621 pass
621 pass
622
622
623 if os.name == 'nt':
623 if os.name == 'nt':
624 checkosfilename = checkwinfilename
624 checkosfilename = checkwinfilename
625 from windows import *
625 from windows import *
626 else:
626 else:
627 from posix import *
627 from posix import *
628
628
629 def makelock(info, pathname):
629 def makelock(info, pathname):
630 try:
630 try:
631 return os.symlink(info, pathname)
631 return os.symlink(info, pathname)
632 except OSError, why:
632 except OSError, why:
633 if why.errno == errno.EEXIST:
633 if why.errno == errno.EEXIST:
634 raise
634 raise
635 except AttributeError: # no symlink in os
635 except AttributeError: # no symlink in os
636 pass
636 pass
637
637
638 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
638 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
639 os.write(ld, info)
639 os.write(ld, info)
640 os.close(ld)
640 os.close(ld)
641
641
642 def readlock(pathname):
642 def readlock(pathname):
643 try:
643 try:
644 return os.readlink(pathname)
644 return os.readlink(pathname)
645 except OSError, why:
645 except OSError, why:
646 if why.errno not in (errno.EINVAL, errno.ENOSYS):
646 if why.errno not in (errno.EINVAL, errno.ENOSYS):
647 raise
647 raise
648 except AttributeError: # no symlink in os
648 except AttributeError: # no symlink in os
649 pass
649 pass
650 fp = posixfile(pathname)
650 fp = posixfile(pathname)
651 r = fp.read()
651 r = fp.read()
652 fp.close()
652 fp.close()
653 return r
653 return r
654
654
655 def fstat(fp):
655 def fstat(fp):
656 '''stat file object that may not have fileno method.'''
656 '''stat file object that may not have fileno method.'''
657 try:
657 try:
658 return os.fstat(fp.fileno())
658 return os.fstat(fp.fileno())
659 except AttributeError:
659 except AttributeError:
660 return os.stat(fp.name)
660 return os.stat(fp.name)
661
661
662 # File system features
662 # File system features
663
663
664 def checkcase(path):
664 def checkcase(path):
665 """
665 """
666 Check whether the given path is on a case-sensitive filesystem
666 Check whether the given path is on a case-sensitive filesystem
667
667
668 Requires a path (like /foo/.hg) ending with a foldable final
668 Requires a path (like /foo/.hg) ending with a foldable final
669 directory component.
669 directory component.
670 """
670 """
671 s1 = os.stat(path)
671 s1 = os.stat(path)
672 d, b = os.path.split(path)
672 d, b = os.path.split(path)
673 p2 = os.path.join(d, b.upper())
673 p2 = os.path.join(d, b.upper())
674 if path == p2:
674 if path == p2:
675 p2 = os.path.join(d, b.lower())
675 p2 = os.path.join(d, b.lower())
676 try:
676 try:
677 s2 = os.stat(p2)
677 s2 = os.stat(p2)
678 if s2 == s1:
678 if s2 == s1:
679 return False
679 return False
680 return True
680 return True
681 except:
681 except:
682 return True
682 return True
683
683
684 _fspathcache = {}
684 _fspathcache = {}
685 def fspath(name, root):
685 def fspath(name, root):
686 '''Get name in the case stored in the filesystem
686 '''Get name in the case stored in the filesystem
687
687
688 The name is either relative to root, or it is an absolute path starting
688 The name is either relative to root, or it is an absolute path starting
689 with root. Note that this function is unnecessary, and should not be
689 with root. Note that this function is unnecessary, and should not be
690 called, for case-sensitive filesystems (simply because it's expensive).
690 called, for case-sensitive filesystems (simply because it's expensive).
691 '''
691 '''
692 # If name is absolute, make it relative
692 # If name is absolute, make it relative
693 if name.lower().startswith(root.lower()):
693 if name.lower().startswith(root.lower()):
694 l = len(root)
694 l = len(root)
695 if name[l] == os.sep or name[l] == os.altsep:
695 if name[l] == os.sep or name[l] == os.altsep:
696 l = l + 1
696 l = l + 1
697 name = name[l:]
697 name = name[l:]
698
698
699 if not os.path.lexists(os.path.join(root, name)):
699 if not os.path.lexists(os.path.join(root, name)):
700 return None
700 return None
701
701
702 seps = os.sep
702 seps = os.sep
703 if os.altsep:
703 if os.altsep:
704 seps = seps + os.altsep
704 seps = seps + os.altsep
705 # Protect backslashes. This gets silly very quickly.
705 # Protect backslashes. This gets silly very quickly.
706 seps.replace('\\','\\\\')
706 seps.replace('\\','\\\\')
707 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
707 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
708 dir = os.path.normcase(os.path.normpath(root))
708 dir = os.path.normcase(os.path.normpath(root))
709 result = []
709 result = []
710 for part, sep in pattern.findall(name):
710 for part, sep in pattern.findall(name):
711 if sep:
711 if sep:
712 result.append(sep)
712 result.append(sep)
713 continue
713 continue
714
714
715 if dir not in _fspathcache:
715 if dir not in _fspathcache:
716 _fspathcache[dir] = os.listdir(dir)
716 _fspathcache[dir] = os.listdir(dir)
717 contents = _fspathcache[dir]
717 contents = _fspathcache[dir]
718
718
719 lpart = part.lower()
719 lpart = part.lower()
720 lenp = len(part)
720 lenp = len(part)
721 for n in contents:
721 for n in contents:
722 if lenp == len(n) and n.lower() == lpart:
722 if lenp == len(n) and n.lower() == lpart:
723 result.append(n)
723 result.append(n)
724 break
724 break
725 else:
725 else:
726 # Cannot happen, as the file exists!
726 # Cannot happen, as the file exists!
727 result.append(part)
727 result.append(part)
728 dir = os.path.join(dir, lpart)
728 dir = os.path.join(dir, lpart)
729
729
730 return ''.join(result)
730 return ''.join(result)
731
731
732 def checknlink(testfile):
732 def checknlink(testfile):
733 '''check whether hardlink count reporting works properly'''
733 '''check whether hardlink count reporting works properly'''
734
734
735 # testfile may be open, so we need a separate file for checking to
735 # testfile may be open, so we need a separate file for checking to
736 # work around issue2543 (or testfile may get lost on Samba shares)
736 # work around issue2543 (or testfile may get lost on Samba shares)
737 f1 = testfile + ".hgtmp1"
737 f1 = testfile + ".hgtmp1"
738 if os.path.lexists(f1):
738 if os.path.lexists(f1):
739 return False
739 return False
740 try:
740 try:
741 posixfile(f1, 'w').close()
741 posixfile(f1, 'w').close()
742 except IOError:
742 except IOError:
743 return False
743 return False
744
744
745 f2 = testfile + ".hgtmp2"
745 f2 = testfile + ".hgtmp2"
746 fd = None
746 fd = None
747 try:
747 try:
748 try:
748 try:
749 os_link(f1, f2)
749 os_link(f1, f2)
750 except OSError:
750 except OSError:
751 return False
751 return False
752
752
753 # nlinks() may behave differently for files on Windows shares if
753 # nlinks() may behave differently for files on Windows shares if
754 # the file is open.
754 # the file is open.
755 fd = posixfile(f2)
755 fd = posixfile(f2)
756 return nlinks(f2) > 1
756 return nlinks(f2) > 1
757 finally:
757 finally:
758 if fd is not None:
758 if fd is not None:
759 fd.close()
759 fd.close()
760 for f in (f1, f2):
760 for f in (f1, f2):
761 try:
761 try:
762 os.unlink(f)
762 os.unlink(f)
763 except OSError:
763 except OSError:
764 pass
764 pass
765
765
766 return False
766 return False
767
767
768 def endswithsep(path):
768 def endswithsep(path):
769 '''Check path ends with os.sep or os.altsep.'''
769 '''Check path ends with os.sep or os.altsep.'''
770 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
770 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
771
771
772 def splitpath(path):
772 def splitpath(path):
773 '''Split path by os.sep.
773 '''Split path by os.sep.
774 Note that this function does not use os.altsep because this is
774 Note that this function does not use os.altsep because this is
775 an alternative of simple "xxx.split(os.sep)".
775 an alternative of simple "xxx.split(os.sep)".
776 It is recommended to use os.path.normpath() before using this
776 It is recommended to use os.path.normpath() before using this
777 function if need.'''
777 function if need.'''
778 return path.split(os.sep)
778 return path.split(os.sep)
779
779
780 def gui():
780 def gui():
781 '''Are we running in a GUI?'''
781 '''Are we running in a GUI?'''
782 if sys.platform == 'darwin':
782 if sys.platform == 'darwin':
783 if 'SSH_CONNECTION' in os.environ:
783 if 'SSH_CONNECTION' in os.environ:
784 # handle SSH access to a box where the user is logged in
784 # handle SSH access to a box where the user is logged in
785 return False
785 return False
786 elif getattr(osutil, 'isgui', None):
786 elif getattr(osutil, 'isgui', None):
787 # check if a CoreGraphics session is available
787 # check if a CoreGraphics session is available
788 return osutil.isgui()
788 return osutil.isgui()
789 else:
789 else:
790 # pure build; use a safe default
790 # pure build; use a safe default
791 return True
791 return True
792 else:
792 else:
793 return os.name == "nt" or os.environ.get("DISPLAY")
793 return os.name == "nt" or os.environ.get("DISPLAY")
794
794
795 def mktempcopy(name, emptyok=False, createmode=None):
795 def mktempcopy(name, emptyok=False, createmode=None):
796 """Create a temporary file with the same contents from name
796 """Create a temporary file with the same contents from name
797
797
798 The permission bits are copied from the original file.
798 The permission bits are copied from the original file.
799
799
800 If the temporary file is going to be truncated immediately, you
800 If the temporary file is going to be truncated immediately, you
801 can use emptyok=True as an optimization.
801 can use emptyok=True as an optimization.
802
802
803 Returns the name of the temporary file.
803 Returns the name of the temporary file.
804 """
804 """
805 d, fn = os.path.split(name)
805 d, fn = os.path.split(name)
806 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
806 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
807 os.close(fd)
807 os.close(fd)
808 # Temporary files are created with mode 0600, which is usually not
808 # Temporary files are created with mode 0600, which is usually not
809 # what we want. If the original file already exists, just copy
809 # what we want. If the original file already exists, just copy
810 # its mode. Otherwise, manually obey umask.
810 # its mode. Otherwise, manually obey umask.
811 try:
811 try:
812 st_mode = os.lstat(name).st_mode & 0777
812 st_mode = os.lstat(name).st_mode & 0777
813 except OSError, inst:
813 except OSError, inst:
814 if inst.errno != errno.ENOENT:
814 if inst.errno != errno.ENOENT:
815 raise
815 raise
816 st_mode = createmode
816 st_mode = createmode
817 if st_mode is None:
817 if st_mode is None:
818 st_mode = ~umask
818 st_mode = ~umask
819 st_mode &= 0666
819 st_mode &= 0666
820 os.chmod(temp, st_mode)
820 os.chmod(temp, st_mode)
821 if emptyok:
821 if emptyok:
822 return temp
822 return temp
823 try:
823 try:
824 try:
824 try:
825 ifp = posixfile(name, "rb")
825 ifp = posixfile(name, "rb")
826 except IOError, inst:
826 except IOError, inst:
827 if inst.errno == errno.ENOENT:
827 if inst.errno == errno.ENOENT:
828 return temp
828 return temp
829 if not getattr(inst, 'filename', None):
829 if not getattr(inst, 'filename', None):
830 inst.filename = name
830 inst.filename = name
831 raise
831 raise
832 ofp = posixfile(temp, "wb")
832 ofp = posixfile(temp, "wb")
833 for chunk in filechunkiter(ifp):
833 for chunk in filechunkiter(ifp):
834 ofp.write(chunk)
834 ofp.write(chunk)
835 ifp.close()
835 ifp.close()
836 ofp.close()
836 ofp.close()
837 except:
837 except:
838 try: os.unlink(temp)
838 try: os.unlink(temp)
839 except: pass
839 except: pass
840 raise
840 raise
841 return temp
841 return temp
842
842
843 class atomictempfile(object):
843 class atomictempfile(object):
844 """file-like object that atomically updates a file
844 """file-like object that atomically updates a file
845
845
846 All writes will be redirected to a temporary copy of the original
846 All writes will be redirected to a temporary copy of the original
847 file. When rename is called, the copy is renamed to the original
847 file. When rename is called, the copy is renamed to the original
848 name, making the changes visible.
848 name, making the changes visible.
849 """
849 """
850 def __init__(self, name, mode='w+b', createmode=None):
850 def __init__(self, name, mode='w+b', createmode=None):
851 self.__name = name
851 self.__name = name
852 self._fp = None
852 self._fp = None
853 self.temp = mktempcopy(name, emptyok=('w' in mode),
853 self.temp = mktempcopy(name, emptyok=('w' in mode),
854 createmode=createmode)
854 createmode=createmode)
855 self._fp = posixfile(self.temp, mode)
855 self._fp = posixfile(self.temp, mode)
856
856
857 def __getattr__(self, name):
857 def __getattr__(self, name):
858 return getattr(self._fp, name)
858 return getattr(self._fp, name)
859
859
860 def rename(self):
860 def rename(self):
861 if not self._fp.closed:
861 if not self._fp.closed:
862 self._fp.close()
862 self._fp.close()
863 rename(self.temp, localpath(self.__name))
863 rename(self.temp, localpath(self.__name))
864
864
865 def close(self):
865 def close(self):
866 if not self._fp:
866 if not self._fp:
867 return
867 return
868 if not self._fp.closed:
868 if not self._fp.closed:
869 try:
869 try:
870 os.unlink(self.temp)
870 os.unlink(self.temp)
871 except: pass
871 except: pass
872 self._fp.close()
872 self._fp.close()
873
873
874 def __del__(self):
874 def __del__(self):
875 self.close()
875 self.close()
876
876
877 def makedirs(name, mode=None):
877 def makedirs(name, mode=None):
878 """recursive directory creation with parent mode inheritance"""
878 """recursive directory creation with parent mode inheritance"""
879 parent = os.path.abspath(os.path.dirname(name))
879 parent = os.path.abspath(os.path.dirname(name))
880 try:
880 try:
881 os.mkdir(name)
881 os.mkdir(name)
882 if mode is not None:
882 if mode is not None:
883 os.chmod(name, mode)
883 os.chmod(name, mode)
884 return
884 return
885 except OSError, err:
885 except OSError, err:
886 if err.errno == errno.EEXIST:
886 if err.errno == errno.EEXIST:
887 return
887 return
888 if not name or parent == name or err.errno != errno.ENOENT:
888 if not name or parent == name or err.errno != errno.ENOENT:
889 raise
889 raise
890 makedirs(parent, mode)
890 makedirs(parent, mode)
891 makedirs(name, mode)
891 makedirs(name, mode)
892
892
893 class opener(object):
893 class opener(object):
894 """Open files relative to a base directory
894 """Open files relative to a base directory
895
895
896 This class is used to hide the details of COW semantics and
896 This class is used to hide the details of COW semantics and
897 remote file access from higher level code.
897 remote file access from higher level code.
898 """
898 """
899 def __init__(self, base, audit=True):
899 def __init__(self, base, audit=True):
900 self.base = base
900 self.base = base
901 if audit:
901 if audit:
902 self.auditor = path_auditor(base)
902 self.auditor = path_auditor(base)
903 else:
903 else:
904 self.auditor = always
904 self.auditor = always
905 self.createmode = None
905 self.createmode = None
906 self._trustnlink = None
906 self._trustnlink = None
907
907
908 @propertycache
908 @propertycache
909 def _can_symlink(self):
909 def _can_symlink(self):
910 return checklink(self.base)
910 return checklink(self.base)
911
911
912 def _fixfilemode(self, name):
912 def _fixfilemode(self, name):
913 if self.createmode is None:
913 if self.createmode is None:
914 return
914 return
915 os.chmod(name, self.createmode & 0666)
915 os.chmod(name, self.createmode & 0666)
916
916
917 def __call__(self, path, mode="r", text=False, atomictemp=False):
917 def __call__(self, path, mode="r", text=False, atomictemp=False):
918 r = checkosfilename(path)
918 r = checkosfilename(path)
919 if r:
919 if r:
920 raise Abort("%s: %s" % (r, path))
920 raise Abort("%s: %r" % (r, path))
921 self.auditor(path)
921 self.auditor(path)
922 f = os.path.join(self.base, path)
922 f = os.path.join(self.base, path)
923
923
924 if not text and "b" not in mode:
924 if not text and "b" not in mode:
925 mode += "b" # for that other OS
925 mode += "b" # for that other OS
926
926
927 nlink = -1
927 nlink = -1
928 dirname, basename = os.path.split(f)
928 dirname, basename = os.path.split(f)
929 # If basename is empty, then the path is malformed because it points
929 # If basename is empty, then the path is malformed because it points
930 # to a directory. Let the posixfile() call below raise IOError.
930 # to a directory. Let the posixfile() call below raise IOError.
931 if basename and mode not in ('r', 'rb'):
931 if basename and mode not in ('r', 'rb'):
932 if atomictemp:
932 if atomictemp:
933 if not os.path.isdir(dirname):
933 if not os.path.isdir(dirname):
934 makedirs(dirname, self.createmode)
934 makedirs(dirname, self.createmode)
935 return atomictempfile(f, mode, self.createmode)
935 return atomictempfile(f, mode, self.createmode)
936 try:
936 try:
937 if 'w' in mode:
937 if 'w' in mode:
938 unlink(f)
938 unlink(f)
939 nlink = 0
939 nlink = 0
940 else:
940 else:
941 # nlinks() may behave differently for files on Windows
941 # nlinks() may behave differently for files on Windows
942 # shares if the file is open.
942 # shares if the file is open.
943 fd = posixfile(f)
943 fd = posixfile(f)
944 nlink = nlinks(f)
944 nlink = nlinks(f)
945 if nlink < 1:
945 if nlink < 1:
946 nlink = 2 # force mktempcopy (issue1922)
946 nlink = 2 # force mktempcopy (issue1922)
947 fd.close()
947 fd.close()
948 except (OSError, IOError), e:
948 except (OSError, IOError), e:
949 if e.errno != errno.ENOENT:
949 if e.errno != errno.ENOENT:
950 raise
950 raise
951 nlink = 0
951 nlink = 0
952 if not os.path.isdir(dirname):
952 if not os.path.isdir(dirname):
953 makedirs(dirname, self.createmode)
953 makedirs(dirname, self.createmode)
954 if nlink > 0:
954 if nlink > 0:
955 if self._trustnlink is None:
955 if self._trustnlink is None:
956 self._trustnlink = nlink > 1 or checknlink(f)
956 self._trustnlink = nlink > 1 or checknlink(f)
957 if nlink > 1 or not self._trustnlink:
957 if nlink > 1 or not self._trustnlink:
958 rename(mktempcopy(f), f)
958 rename(mktempcopy(f), f)
959 fp = posixfile(f, mode)
959 fp = posixfile(f, mode)
960 if nlink == 0:
960 if nlink == 0:
961 self._fixfilemode(f)
961 self._fixfilemode(f)
962 return fp
962 return fp
963
963
964 def symlink(self, src, dst):
964 def symlink(self, src, dst):
965 self.auditor(dst)
965 self.auditor(dst)
966 linkname = os.path.join(self.base, dst)
966 linkname = os.path.join(self.base, dst)
967 try:
967 try:
968 os.unlink(linkname)
968 os.unlink(linkname)
969 except OSError:
969 except OSError:
970 pass
970 pass
971
971
972 dirname = os.path.dirname(linkname)
972 dirname = os.path.dirname(linkname)
973 if not os.path.exists(dirname):
973 if not os.path.exists(dirname):
974 makedirs(dirname, self.createmode)
974 makedirs(dirname, self.createmode)
975
975
976 if self._can_symlink:
976 if self._can_symlink:
977 try:
977 try:
978 os.symlink(src, linkname)
978 os.symlink(src, linkname)
979 except OSError, err:
979 except OSError, err:
980 raise OSError(err.errno, _('could not symlink to %r: %s') %
980 raise OSError(err.errno, _('could not symlink to %r: %s') %
981 (src, err.strerror), linkname)
981 (src, err.strerror), linkname)
982 else:
982 else:
983 f = self(dst, "w")
983 f = self(dst, "w")
984 f.write(src)
984 f.write(src)
985 f.close()
985 f.close()
986 self._fixfilemode(dst)
986 self._fixfilemode(dst)
987
987
988 class chunkbuffer(object):
988 class chunkbuffer(object):
989 """Allow arbitrary sized chunks of data to be efficiently read from an
989 """Allow arbitrary sized chunks of data to be efficiently read from an
990 iterator over chunks of arbitrary size."""
990 iterator over chunks of arbitrary size."""
991
991
992 def __init__(self, in_iter):
992 def __init__(self, in_iter):
993 """in_iter is the iterator that's iterating over the input chunks.
993 """in_iter is the iterator that's iterating over the input chunks.
994 targetsize is how big a buffer to try to maintain."""
994 targetsize is how big a buffer to try to maintain."""
995 def splitbig(chunks):
995 def splitbig(chunks):
996 for chunk in chunks:
996 for chunk in chunks:
997 if len(chunk) > 2**20:
997 if len(chunk) > 2**20:
998 pos = 0
998 pos = 0
999 while pos < len(chunk):
999 while pos < len(chunk):
1000 end = pos + 2 ** 18
1000 end = pos + 2 ** 18
1001 yield chunk[pos:end]
1001 yield chunk[pos:end]
1002 pos = end
1002 pos = end
1003 else:
1003 else:
1004 yield chunk
1004 yield chunk
1005 self.iter = splitbig(in_iter)
1005 self.iter = splitbig(in_iter)
1006 self._queue = []
1006 self._queue = []
1007
1007
1008 def read(self, l):
1008 def read(self, l):
1009 """Read L bytes of data from the iterator of chunks of data.
1009 """Read L bytes of data from the iterator of chunks of data.
1010 Returns less than L bytes if the iterator runs dry."""
1010 Returns less than L bytes if the iterator runs dry."""
1011 left = l
1011 left = l
1012 buf = ''
1012 buf = ''
1013 queue = self._queue
1013 queue = self._queue
1014 while left > 0:
1014 while left > 0:
1015 # refill the queue
1015 # refill the queue
1016 if not queue:
1016 if not queue:
1017 target = 2**18
1017 target = 2**18
1018 for chunk in self.iter:
1018 for chunk in self.iter:
1019 queue.append(chunk)
1019 queue.append(chunk)
1020 target -= len(chunk)
1020 target -= len(chunk)
1021 if target <= 0:
1021 if target <= 0:
1022 break
1022 break
1023 if not queue:
1023 if not queue:
1024 break
1024 break
1025
1025
1026 chunk = queue.pop(0)
1026 chunk = queue.pop(0)
1027 left -= len(chunk)
1027 left -= len(chunk)
1028 if left < 0:
1028 if left < 0:
1029 queue.insert(0, chunk[left:])
1029 queue.insert(0, chunk[left:])
1030 buf += chunk[:left]
1030 buf += chunk[:left]
1031 else:
1031 else:
1032 buf += chunk
1032 buf += chunk
1033
1033
1034 return buf
1034 return buf
1035
1035
1036 def filechunkiter(f, size=65536, limit=None):
1036 def filechunkiter(f, size=65536, limit=None):
1037 """Create a generator that produces the data in the file size
1037 """Create a generator that produces the data in the file size
1038 (default 65536) bytes at a time, up to optional limit (default is
1038 (default 65536) bytes at a time, up to optional limit (default is
1039 to read all data). Chunks may be less than size bytes if the
1039 to read all data). Chunks may be less than size bytes if the
1040 chunk is the last chunk in the file, or the file is a socket or
1040 chunk is the last chunk in the file, or the file is a socket or
1041 some other type of file that sometimes reads less data than is
1041 some other type of file that sometimes reads less data than is
1042 requested."""
1042 requested."""
1043 assert size >= 0
1043 assert size >= 0
1044 assert limit is None or limit >= 0
1044 assert limit is None or limit >= 0
1045 while True:
1045 while True:
1046 if limit is None:
1046 if limit is None:
1047 nbytes = size
1047 nbytes = size
1048 else:
1048 else:
1049 nbytes = min(limit, size)
1049 nbytes = min(limit, size)
1050 s = nbytes and f.read(nbytes)
1050 s = nbytes and f.read(nbytes)
1051 if not s:
1051 if not s:
1052 break
1052 break
1053 if limit:
1053 if limit:
1054 limit -= len(s)
1054 limit -= len(s)
1055 yield s
1055 yield s
1056
1056
1057 def makedate():
1057 def makedate():
1058 lt = time.localtime()
1058 lt = time.localtime()
1059 if lt[8] == 1 and time.daylight:
1059 if lt[8] == 1 and time.daylight:
1060 tz = time.altzone
1060 tz = time.altzone
1061 else:
1061 else:
1062 tz = time.timezone
1062 tz = time.timezone
1063 t = time.mktime(lt)
1063 t = time.mktime(lt)
1064 if t < 0:
1064 if t < 0:
1065 hint = _("check your clock")
1065 hint = _("check your clock")
1066 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1066 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1067 return t, tz
1067 return t, tz
1068
1068
1069 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1069 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1070 """represent a (unixtime, offset) tuple as a localized time.
1070 """represent a (unixtime, offset) tuple as a localized time.
1071 unixtime is seconds since the epoch, and offset is the time zone's
1071 unixtime is seconds since the epoch, and offset is the time zone's
1072 number of seconds away from UTC. if timezone is false, do not
1072 number of seconds away from UTC. if timezone is false, do not
1073 append time zone to string."""
1073 append time zone to string."""
1074 t, tz = date or makedate()
1074 t, tz = date or makedate()
1075 if t < 0:
1075 if t < 0:
1076 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1076 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1077 tz = 0
1077 tz = 0
1078 if "%1" in format or "%2" in format:
1078 if "%1" in format or "%2" in format:
1079 sign = (tz > 0) and "-" or "+"
1079 sign = (tz > 0) and "-" or "+"
1080 minutes = abs(tz) // 60
1080 minutes = abs(tz) // 60
1081 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1081 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1082 format = format.replace("%2", "%02d" % (minutes % 60))
1082 format = format.replace("%2", "%02d" % (minutes % 60))
1083 s = time.strftime(format, time.gmtime(float(t) - tz))
1083 s = time.strftime(format, time.gmtime(float(t) - tz))
1084 return s
1084 return s
1085
1085
1086 def shortdate(date=None):
1086 def shortdate(date=None):
1087 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1087 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1088 return datestr(date, format='%Y-%m-%d')
1088 return datestr(date, format='%Y-%m-%d')
1089
1089
1090 def strdate(string, format, defaults=[]):
1090 def strdate(string, format, defaults=[]):
1091 """parse a localized time string and return a (unixtime, offset) tuple.
1091 """parse a localized time string and return a (unixtime, offset) tuple.
1092 if the string cannot be parsed, ValueError is raised."""
1092 if the string cannot be parsed, ValueError is raised."""
1093 def timezone(string):
1093 def timezone(string):
1094 tz = string.split()[-1]
1094 tz = string.split()[-1]
1095 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1095 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1096 sign = (tz[0] == "+") and 1 or -1
1096 sign = (tz[0] == "+") and 1 or -1
1097 hours = int(tz[1:3])
1097 hours = int(tz[1:3])
1098 minutes = int(tz[3:5])
1098 minutes = int(tz[3:5])
1099 return -sign * (hours * 60 + minutes) * 60
1099 return -sign * (hours * 60 + minutes) * 60
1100 if tz == "GMT" or tz == "UTC":
1100 if tz == "GMT" or tz == "UTC":
1101 return 0
1101 return 0
1102 return None
1102 return None
1103
1103
1104 # NOTE: unixtime = localunixtime + offset
1104 # NOTE: unixtime = localunixtime + offset
1105 offset, date = timezone(string), string
1105 offset, date = timezone(string), string
1106 if offset is not None:
1106 if offset is not None:
1107 date = " ".join(string.split()[:-1])
1107 date = " ".join(string.split()[:-1])
1108
1108
1109 # add missing elements from defaults
1109 # add missing elements from defaults
1110 usenow = False # default to using biased defaults
1110 usenow = False # default to using biased defaults
1111 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1111 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1112 found = [True for p in part if ("%"+p) in format]
1112 found = [True for p in part if ("%"+p) in format]
1113 if not found:
1113 if not found:
1114 date += "@" + defaults[part][usenow]
1114 date += "@" + defaults[part][usenow]
1115 format += "@%" + part[0]
1115 format += "@%" + part[0]
1116 else:
1116 else:
1117 # We've found a specific time element, less specific time
1117 # We've found a specific time element, less specific time
1118 # elements are relative to today
1118 # elements are relative to today
1119 usenow = True
1119 usenow = True
1120
1120
1121 timetuple = time.strptime(date, format)
1121 timetuple = time.strptime(date, format)
1122 localunixtime = int(calendar.timegm(timetuple))
1122 localunixtime = int(calendar.timegm(timetuple))
1123 if offset is None:
1123 if offset is None:
1124 # local timezone
1124 # local timezone
1125 unixtime = int(time.mktime(timetuple))
1125 unixtime = int(time.mktime(timetuple))
1126 offset = unixtime - localunixtime
1126 offset = unixtime - localunixtime
1127 else:
1127 else:
1128 unixtime = localunixtime + offset
1128 unixtime = localunixtime + offset
1129 return unixtime, offset
1129 return unixtime, offset
1130
1130
1131 def parsedate(date, formats=None, bias={}):
1131 def parsedate(date, formats=None, bias={}):
1132 """parse a localized date/time and return a (unixtime, offset) tuple.
1132 """parse a localized date/time and return a (unixtime, offset) tuple.
1133
1133
1134 The date may be a "unixtime offset" string or in one of the specified
1134 The date may be a "unixtime offset" string or in one of the specified
1135 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1135 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1136 """
1136 """
1137 if not date:
1137 if not date:
1138 return 0, 0
1138 return 0, 0
1139 if isinstance(date, tuple) and len(date) == 2:
1139 if isinstance(date, tuple) and len(date) == 2:
1140 return date
1140 return date
1141 if not formats:
1141 if not formats:
1142 formats = defaultdateformats
1142 formats = defaultdateformats
1143 date = date.strip()
1143 date = date.strip()
1144 try:
1144 try:
1145 when, offset = map(int, date.split(' '))
1145 when, offset = map(int, date.split(' '))
1146 except ValueError:
1146 except ValueError:
1147 # fill out defaults
1147 # fill out defaults
1148 now = makedate()
1148 now = makedate()
1149 defaults = {}
1149 defaults = {}
1150 nowmap = {}
1150 nowmap = {}
1151 for part in ("d", "mb", "yY", "HI", "M", "S"):
1151 for part in ("d", "mb", "yY", "HI", "M", "S"):
1152 # this piece is for rounding the specific end of unknowns
1152 # this piece is for rounding the specific end of unknowns
1153 b = bias.get(part)
1153 b = bias.get(part)
1154 if b is None:
1154 if b is None:
1155 if part[0] in "HMS":
1155 if part[0] in "HMS":
1156 b = "00"
1156 b = "00"
1157 else:
1157 else:
1158 b = "0"
1158 b = "0"
1159
1159
1160 # this piece is for matching the generic end to today's date
1160 # this piece is for matching the generic end to today's date
1161 n = datestr(now, "%" + part[0])
1161 n = datestr(now, "%" + part[0])
1162
1162
1163 defaults[part] = (b, n)
1163 defaults[part] = (b, n)
1164
1164
1165 for format in formats:
1165 for format in formats:
1166 try:
1166 try:
1167 when, offset = strdate(date, format, defaults)
1167 when, offset = strdate(date, format, defaults)
1168 except (ValueError, OverflowError):
1168 except (ValueError, OverflowError):
1169 pass
1169 pass
1170 else:
1170 else:
1171 break
1171 break
1172 else:
1172 else:
1173 raise Abort(_('invalid date: %r') % date)
1173 raise Abort(_('invalid date: %r') % date)
1174 # validate explicit (probably user-specified) date and
1174 # validate explicit (probably user-specified) date and
1175 # time zone offset. values must fit in signed 32 bits for
1175 # time zone offset. values must fit in signed 32 bits for
1176 # current 32-bit linux runtimes. timezones go from UTC-12
1176 # current 32-bit linux runtimes. timezones go from UTC-12
1177 # to UTC+14
1177 # to UTC+14
1178 if abs(when) > 0x7fffffff:
1178 if abs(when) > 0x7fffffff:
1179 raise Abort(_('date exceeds 32 bits: %d') % when)
1179 raise Abort(_('date exceeds 32 bits: %d') % when)
1180 if when < 0:
1180 if when < 0:
1181 raise Abort(_('negative date value: %d') % when)
1181 raise Abort(_('negative date value: %d') % when)
1182 if offset < -50400 or offset > 43200:
1182 if offset < -50400 or offset > 43200:
1183 raise Abort(_('impossible time zone offset: %d') % offset)
1183 raise Abort(_('impossible time zone offset: %d') % offset)
1184 return when, offset
1184 return when, offset
1185
1185
1186 def matchdate(date):
1186 def matchdate(date):
1187 """Return a function that matches a given date match specifier
1187 """Return a function that matches a given date match specifier
1188
1188
1189 Formats include:
1189 Formats include:
1190
1190
1191 '{date}' match a given date to the accuracy provided
1191 '{date}' match a given date to the accuracy provided
1192
1192
1193 '<{date}' on or before a given date
1193 '<{date}' on or before a given date
1194
1194
1195 '>{date}' on or after a given date
1195 '>{date}' on or after a given date
1196
1196
1197 >>> p1 = parsedate("10:29:59")
1197 >>> p1 = parsedate("10:29:59")
1198 >>> p2 = parsedate("10:30:00")
1198 >>> p2 = parsedate("10:30:00")
1199 >>> p3 = parsedate("10:30:59")
1199 >>> p3 = parsedate("10:30:59")
1200 >>> p4 = parsedate("10:31:00")
1200 >>> p4 = parsedate("10:31:00")
1201 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1201 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1202 >>> f = matchdate("10:30")
1202 >>> f = matchdate("10:30")
1203 >>> f(p1[0])
1203 >>> f(p1[0])
1204 False
1204 False
1205 >>> f(p2[0])
1205 >>> f(p2[0])
1206 True
1206 True
1207 >>> f(p3[0])
1207 >>> f(p3[0])
1208 True
1208 True
1209 >>> f(p4[0])
1209 >>> f(p4[0])
1210 False
1210 False
1211 >>> f(p5[0])
1211 >>> f(p5[0])
1212 False
1212 False
1213 """
1213 """
1214
1214
1215 def lower(date):
1215 def lower(date):
1216 d = dict(mb="1", d="1")
1216 d = dict(mb="1", d="1")
1217 return parsedate(date, extendeddateformats, d)[0]
1217 return parsedate(date, extendeddateformats, d)[0]
1218
1218
1219 def upper(date):
1219 def upper(date):
1220 d = dict(mb="12", HI="23", M="59", S="59")
1220 d = dict(mb="12", HI="23", M="59", S="59")
1221 for days in ("31", "30", "29"):
1221 for days in ("31", "30", "29"):
1222 try:
1222 try:
1223 d["d"] = days
1223 d["d"] = days
1224 return parsedate(date, extendeddateformats, d)[0]
1224 return parsedate(date, extendeddateformats, d)[0]
1225 except:
1225 except:
1226 pass
1226 pass
1227 d["d"] = "28"
1227 d["d"] = "28"
1228 return parsedate(date, extendeddateformats, d)[0]
1228 return parsedate(date, extendeddateformats, d)[0]
1229
1229
1230 date = date.strip()
1230 date = date.strip()
1231
1231
1232 if not date:
1232 if not date:
1233 raise Abort(_("dates cannot consist entirely of whitespace"))
1233 raise Abort(_("dates cannot consist entirely of whitespace"))
1234 elif date[0] == "<":
1234 elif date[0] == "<":
1235 if not date[1:]:
1235 if not date[1:]:
1236 raise Abort(_("invalid day spec, use '<DATE'"))
1236 raise Abort(_("invalid day spec, use '<DATE'"))
1237 when = upper(date[1:])
1237 when = upper(date[1:])
1238 return lambda x: x <= when
1238 return lambda x: x <= when
1239 elif date[0] == ">":
1239 elif date[0] == ">":
1240 if not date[1:]:
1240 if not date[1:]:
1241 raise Abort(_("invalid day spec, use '>DATE'"))
1241 raise Abort(_("invalid day spec, use '>DATE'"))
1242 when = lower(date[1:])
1242 when = lower(date[1:])
1243 return lambda x: x >= when
1243 return lambda x: x >= when
1244 elif date[0] == "-":
1244 elif date[0] == "-":
1245 try:
1245 try:
1246 days = int(date[1:])
1246 days = int(date[1:])
1247 except ValueError:
1247 except ValueError:
1248 raise Abort(_("invalid day spec: %s") % date[1:])
1248 raise Abort(_("invalid day spec: %s") % date[1:])
1249 if days < 0:
1249 if days < 0:
1250 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1250 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1251 % date[1:])
1251 % date[1:])
1252 when = makedate()[0] - days * 3600 * 24
1252 when = makedate()[0] - days * 3600 * 24
1253 return lambda x: x >= when
1253 return lambda x: x >= when
1254 elif " to " in date:
1254 elif " to " in date:
1255 a, b = date.split(" to ")
1255 a, b = date.split(" to ")
1256 start, stop = lower(a), upper(b)
1256 start, stop = lower(a), upper(b)
1257 return lambda x: x >= start and x <= stop
1257 return lambda x: x >= start and x <= stop
1258 else:
1258 else:
1259 start, stop = lower(date), upper(date)
1259 start, stop = lower(date), upper(date)
1260 return lambda x: x >= start and x <= stop
1260 return lambda x: x >= start and x <= stop
1261
1261
1262 def shortuser(user):
1262 def shortuser(user):
1263 """Return a short representation of a user name or email address."""
1263 """Return a short representation of a user name or email address."""
1264 f = user.find('@')
1264 f = user.find('@')
1265 if f >= 0:
1265 if f >= 0:
1266 user = user[:f]
1266 user = user[:f]
1267 f = user.find('<')
1267 f = user.find('<')
1268 if f >= 0:
1268 if f >= 0:
1269 user = user[f + 1:]
1269 user = user[f + 1:]
1270 f = user.find(' ')
1270 f = user.find(' ')
1271 if f >= 0:
1271 if f >= 0:
1272 user = user[:f]
1272 user = user[:f]
1273 f = user.find('.')
1273 f = user.find('.')
1274 if f >= 0:
1274 if f >= 0:
1275 user = user[:f]
1275 user = user[:f]
1276 return user
1276 return user
1277
1277
1278 def email(author):
1278 def email(author):
1279 '''get email of author.'''
1279 '''get email of author.'''
1280 r = author.find('>')
1280 r = author.find('>')
1281 if r == -1:
1281 if r == -1:
1282 r = None
1282 r = None
1283 return author[author.find('<') + 1:r]
1283 return author[author.find('<') + 1:r]
1284
1284
1285 def _ellipsis(text, maxlength):
1285 def _ellipsis(text, maxlength):
1286 if len(text) <= maxlength:
1286 if len(text) <= maxlength:
1287 return text, False
1287 return text, False
1288 else:
1288 else:
1289 return "%s..." % (text[:maxlength - 3]), True
1289 return "%s..." % (text[:maxlength - 3]), True
1290
1290
1291 def ellipsis(text, maxlength=400):
1291 def ellipsis(text, maxlength=400):
1292 """Trim string to at most maxlength (default: 400) characters."""
1292 """Trim string to at most maxlength (default: 400) characters."""
1293 try:
1293 try:
1294 # use unicode not to split at intermediate multi-byte sequence
1294 # use unicode not to split at intermediate multi-byte sequence
1295 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1295 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1296 maxlength)
1296 maxlength)
1297 if not truncated:
1297 if not truncated:
1298 return text
1298 return text
1299 return utext.encode(encoding.encoding)
1299 return utext.encode(encoding.encoding)
1300 except (UnicodeDecodeError, UnicodeEncodeError):
1300 except (UnicodeDecodeError, UnicodeEncodeError):
1301 return _ellipsis(text, maxlength)[0]
1301 return _ellipsis(text, maxlength)[0]
1302
1302
1303 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1303 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1304 '''yield every hg repository under path, recursively.'''
1304 '''yield every hg repository under path, recursively.'''
1305 def errhandler(err):
1305 def errhandler(err):
1306 if err.filename == path:
1306 if err.filename == path:
1307 raise err
1307 raise err
1308 if followsym and hasattr(os.path, 'samestat'):
1308 if followsym and hasattr(os.path, 'samestat'):
1309 def _add_dir_if_not_there(dirlst, dirname):
1309 def _add_dir_if_not_there(dirlst, dirname):
1310 match = False
1310 match = False
1311 samestat = os.path.samestat
1311 samestat = os.path.samestat
1312 dirstat = os.stat(dirname)
1312 dirstat = os.stat(dirname)
1313 for lstdirstat in dirlst:
1313 for lstdirstat in dirlst:
1314 if samestat(dirstat, lstdirstat):
1314 if samestat(dirstat, lstdirstat):
1315 match = True
1315 match = True
1316 break
1316 break
1317 if not match:
1317 if not match:
1318 dirlst.append(dirstat)
1318 dirlst.append(dirstat)
1319 return not match
1319 return not match
1320 else:
1320 else:
1321 followsym = False
1321 followsym = False
1322
1322
1323 if (seen_dirs is None) and followsym:
1323 if (seen_dirs is None) and followsym:
1324 seen_dirs = []
1324 seen_dirs = []
1325 _add_dir_if_not_there(seen_dirs, path)
1325 _add_dir_if_not_there(seen_dirs, path)
1326 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1326 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1327 dirs.sort()
1327 dirs.sort()
1328 if '.hg' in dirs:
1328 if '.hg' in dirs:
1329 yield root # found a repository
1329 yield root # found a repository
1330 qroot = os.path.join(root, '.hg', 'patches')
1330 qroot = os.path.join(root, '.hg', 'patches')
1331 if os.path.isdir(os.path.join(qroot, '.hg')):
1331 if os.path.isdir(os.path.join(qroot, '.hg')):
1332 yield qroot # we have a patch queue repo here
1332 yield qroot # we have a patch queue repo here
1333 if recurse:
1333 if recurse:
1334 # avoid recursing inside the .hg directory
1334 # avoid recursing inside the .hg directory
1335 dirs.remove('.hg')
1335 dirs.remove('.hg')
1336 else:
1336 else:
1337 dirs[:] = [] # don't descend further
1337 dirs[:] = [] # don't descend further
1338 elif followsym:
1338 elif followsym:
1339 newdirs = []
1339 newdirs = []
1340 for d in dirs:
1340 for d in dirs:
1341 fname = os.path.join(root, d)
1341 fname = os.path.join(root, d)
1342 if _add_dir_if_not_there(seen_dirs, fname):
1342 if _add_dir_if_not_there(seen_dirs, fname):
1343 if os.path.islink(fname):
1343 if os.path.islink(fname):
1344 for hgname in walkrepos(fname, True, seen_dirs):
1344 for hgname in walkrepos(fname, True, seen_dirs):
1345 yield hgname
1345 yield hgname
1346 else:
1346 else:
1347 newdirs.append(d)
1347 newdirs.append(d)
1348 dirs[:] = newdirs
1348 dirs[:] = newdirs
1349
1349
1350 _rcpath = None
1350 _rcpath = None
1351
1351
1352 def os_rcpath():
1352 def os_rcpath():
1353 '''return default os-specific hgrc search path'''
1353 '''return default os-specific hgrc search path'''
1354 path = system_rcpath()
1354 path = system_rcpath()
1355 path.extend(user_rcpath())
1355 path.extend(user_rcpath())
1356 path = [os.path.normpath(f) for f in path]
1356 path = [os.path.normpath(f) for f in path]
1357 return path
1357 return path
1358
1358
1359 def rcpath():
1359 def rcpath():
1360 '''return hgrc search path. if env var HGRCPATH is set, use it.
1360 '''return hgrc search path. if env var HGRCPATH is set, use it.
1361 for each item in path, if directory, use files ending in .rc,
1361 for each item in path, if directory, use files ending in .rc,
1362 else use item.
1362 else use item.
1363 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1363 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1364 if no HGRCPATH, use default os-specific path.'''
1364 if no HGRCPATH, use default os-specific path.'''
1365 global _rcpath
1365 global _rcpath
1366 if _rcpath is None:
1366 if _rcpath is None:
1367 if 'HGRCPATH' in os.environ:
1367 if 'HGRCPATH' in os.environ:
1368 _rcpath = []
1368 _rcpath = []
1369 for p in os.environ['HGRCPATH'].split(os.pathsep):
1369 for p in os.environ['HGRCPATH'].split(os.pathsep):
1370 if not p:
1370 if not p:
1371 continue
1371 continue
1372 p = expandpath(p)
1372 p = expandpath(p)
1373 if os.path.isdir(p):
1373 if os.path.isdir(p):
1374 for f, kind in osutil.listdir(p):
1374 for f, kind in osutil.listdir(p):
1375 if f.endswith('.rc'):
1375 if f.endswith('.rc'):
1376 _rcpath.append(os.path.join(p, f))
1376 _rcpath.append(os.path.join(p, f))
1377 else:
1377 else:
1378 _rcpath.append(p)
1378 _rcpath.append(p)
1379 else:
1379 else:
1380 _rcpath = os_rcpath()
1380 _rcpath = os_rcpath()
1381 return _rcpath
1381 return _rcpath
1382
1382
1383 def bytecount(nbytes):
1383 def bytecount(nbytes):
1384 '''return byte count formatted as readable string, with units'''
1384 '''return byte count formatted as readable string, with units'''
1385
1385
1386 units = (
1386 units = (
1387 (100, 1 << 30, _('%.0f GB')),
1387 (100, 1 << 30, _('%.0f GB')),
1388 (10, 1 << 30, _('%.1f GB')),
1388 (10, 1 << 30, _('%.1f GB')),
1389 (1, 1 << 30, _('%.2f GB')),
1389 (1, 1 << 30, _('%.2f GB')),
1390 (100, 1 << 20, _('%.0f MB')),
1390 (100, 1 << 20, _('%.0f MB')),
1391 (10, 1 << 20, _('%.1f MB')),
1391 (10, 1 << 20, _('%.1f MB')),
1392 (1, 1 << 20, _('%.2f MB')),
1392 (1, 1 << 20, _('%.2f MB')),
1393 (100, 1 << 10, _('%.0f KB')),
1393 (100, 1 << 10, _('%.0f KB')),
1394 (10, 1 << 10, _('%.1f KB')),
1394 (10, 1 << 10, _('%.1f KB')),
1395 (1, 1 << 10, _('%.2f KB')),
1395 (1, 1 << 10, _('%.2f KB')),
1396 (1, 1, _('%.0f bytes')),
1396 (1, 1, _('%.0f bytes')),
1397 )
1397 )
1398
1398
1399 for multiplier, divisor, format in units:
1399 for multiplier, divisor, format in units:
1400 if nbytes >= divisor * multiplier:
1400 if nbytes >= divisor * multiplier:
1401 return format % (nbytes / float(divisor))
1401 return format % (nbytes / float(divisor))
1402 return units[-1][2] % nbytes
1402 return units[-1][2] % nbytes
1403
1403
1404 def uirepr(s):
1404 def uirepr(s):
1405 # Avoid double backslash in Windows path repr()
1405 # Avoid double backslash in Windows path repr()
1406 return repr(s).replace('\\\\', '\\')
1406 return repr(s).replace('\\\\', '\\')
1407
1407
1408 # delay import of textwrap
1408 # delay import of textwrap
1409 def MBTextWrapper(**kwargs):
1409 def MBTextWrapper(**kwargs):
1410 class tw(textwrap.TextWrapper):
1410 class tw(textwrap.TextWrapper):
1411 """
1411 """
1412 Extend TextWrapper for double-width characters.
1412 Extend TextWrapper for double-width characters.
1413
1413
1414 Some Asian characters use two terminal columns instead of one.
1414 Some Asian characters use two terminal columns instead of one.
1415 A good example of this behavior can be seen with u'\u65e5\u672c',
1415 A good example of this behavior can be seen with u'\u65e5\u672c',
1416 the two Japanese characters for "Japan":
1416 the two Japanese characters for "Japan":
1417 len() returns 2, but when printed to a terminal, they eat 4 columns.
1417 len() returns 2, but when printed to a terminal, they eat 4 columns.
1418
1418
1419 (Note that this has nothing to do whatsoever with unicode
1419 (Note that this has nothing to do whatsoever with unicode
1420 representation, or encoding of the underlying string)
1420 representation, or encoding of the underlying string)
1421 """
1421 """
1422 def __init__(self, **kwargs):
1422 def __init__(self, **kwargs):
1423 textwrap.TextWrapper.__init__(self, **kwargs)
1423 textwrap.TextWrapper.__init__(self, **kwargs)
1424
1424
1425 def _cutdown(self, str, space_left):
1425 def _cutdown(self, str, space_left):
1426 l = 0
1426 l = 0
1427 ucstr = unicode(str, encoding.encoding)
1427 ucstr = unicode(str, encoding.encoding)
1428 colwidth = unicodedata.east_asian_width
1428 colwidth = unicodedata.east_asian_width
1429 for i in xrange(len(ucstr)):
1429 for i in xrange(len(ucstr)):
1430 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1430 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1431 if space_left < l:
1431 if space_left < l:
1432 return (ucstr[:i].encode(encoding.encoding),
1432 return (ucstr[:i].encode(encoding.encoding),
1433 ucstr[i:].encode(encoding.encoding))
1433 ucstr[i:].encode(encoding.encoding))
1434 return str, ''
1434 return str, ''
1435
1435
1436 # overriding of base class
1436 # overriding of base class
1437 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1437 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1438 space_left = max(width - cur_len, 1)
1438 space_left = max(width - cur_len, 1)
1439
1439
1440 if self.break_long_words:
1440 if self.break_long_words:
1441 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1441 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1442 cur_line.append(cut)
1442 cur_line.append(cut)
1443 reversed_chunks[-1] = res
1443 reversed_chunks[-1] = res
1444 elif not cur_line:
1444 elif not cur_line:
1445 cur_line.append(reversed_chunks.pop())
1445 cur_line.append(reversed_chunks.pop())
1446
1446
1447 global MBTextWrapper
1447 global MBTextWrapper
1448 MBTextWrapper = tw
1448 MBTextWrapper = tw
1449 return tw(**kwargs)
1449 return tw(**kwargs)
1450
1450
1451 def wrap(line, width, initindent='', hangindent=''):
1451 def wrap(line, width, initindent='', hangindent=''):
1452 maxindent = max(len(hangindent), len(initindent))
1452 maxindent = max(len(hangindent), len(initindent))
1453 if width <= maxindent:
1453 if width <= maxindent:
1454 # adjust for weird terminal size
1454 # adjust for weird terminal size
1455 width = max(78, maxindent + 1)
1455 width = max(78, maxindent + 1)
1456 wrapper = MBTextWrapper(width=width,
1456 wrapper = MBTextWrapper(width=width,
1457 initial_indent=initindent,
1457 initial_indent=initindent,
1458 subsequent_indent=hangindent)
1458 subsequent_indent=hangindent)
1459 return wrapper.fill(line)
1459 return wrapper.fill(line)
1460
1460
1461 def iterlines(iterator):
1461 def iterlines(iterator):
1462 for chunk in iterator:
1462 for chunk in iterator:
1463 for line in chunk.splitlines():
1463 for line in chunk.splitlines():
1464 yield line
1464 yield line
1465
1465
1466 def expandpath(path):
1466 def expandpath(path):
1467 return os.path.expanduser(os.path.expandvars(path))
1467 return os.path.expanduser(os.path.expandvars(path))
1468
1468
1469 def hgcmd():
1469 def hgcmd():
1470 """Return the command used to execute current hg
1470 """Return the command used to execute current hg
1471
1471
1472 This is different from hgexecutable() because on Windows we want
1472 This is different from hgexecutable() because on Windows we want
1473 to avoid things opening new shell windows like batch files, so we
1473 to avoid things opening new shell windows like batch files, so we
1474 get either the python call or current executable.
1474 get either the python call or current executable.
1475 """
1475 """
1476 if main_is_frozen():
1476 if main_is_frozen():
1477 return [sys.executable]
1477 return [sys.executable]
1478 return gethgcmd()
1478 return gethgcmd()
1479
1479
1480 def rundetached(args, condfn):
1480 def rundetached(args, condfn):
1481 """Execute the argument list in a detached process.
1481 """Execute the argument list in a detached process.
1482
1482
1483 condfn is a callable which is called repeatedly and should return
1483 condfn is a callable which is called repeatedly and should return
1484 True once the child process is known to have started successfully.
1484 True once the child process is known to have started successfully.
1485 At this point, the child process PID is returned. If the child
1485 At this point, the child process PID is returned. If the child
1486 process fails to start or finishes before condfn() evaluates to
1486 process fails to start or finishes before condfn() evaluates to
1487 True, return -1.
1487 True, return -1.
1488 """
1488 """
1489 # Windows case is easier because the child process is either
1489 # Windows case is easier because the child process is either
1490 # successfully starting and validating the condition or exiting
1490 # successfully starting and validating the condition or exiting
1491 # on failure. We just poll on its PID. On Unix, if the child
1491 # on failure. We just poll on its PID. On Unix, if the child
1492 # process fails to start, it will be left in a zombie state until
1492 # process fails to start, it will be left in a zombie state until
1493 # the parent wait on it, which we cannot do since we expect a long
1493 # the parent wait on it, which we cannot do since we expect a long
1494 # running process on success. Instead we listen for SIGCHLD telling
1494 # running process on success. Instead we listen for SIGCHLD telling
1495 # us our child process terminated.
1495 # us our child process terminated.
1496 terminated = set()
1496 terminated = set()
1497 def handler(signum, frame):
1497 def handler(signum, frame):
1498 terminated.add(os.wait())
1498 terminated.add(os.wait())
1499 prevhandler = None
1499 prevhandler = None
1500 if hasattr(signal, 'SIGCHLD'):
1500 if hasattr(signal, 'SIGCHLD'):
1501 prevhandler = signal.signal(signal.SIGCHLD, handler)
1501 prevhandler = signal.signal(signal.SIGCHLD, handler)
1502 try:
1502 try:
1503 pid = spawndetached(args)
1503 pid = spawndetached(args)
1504 while not condfn():
1504 while not condfn():
1505 if ((pid in terminated or not testpid(pid))
1505 if ((pid in terminated or not testpid(pid))
1506 and not condfn()):
1506 and not condfn()):
1507 return -1
1507 return -1
1508 time.sleep(0.1)
1508 time.sleep(0.1)
1509 return pid
1509 return pid
1510 finally:
1510 finally:
1511 if prevhandler is not None:
1511 if prevhandler is not None:
1512 signal.signal(signal.SIGCHLD, prevhandler)
1512 signal.signal(signal.SIGCHLD, prevhandler)
1513
1513
1514 try:
1514 try:
1515 any, all = any, all
1515 any, all = any, all
1516 except NameError:
1516 except NameError:
1517 def any(iterable):
1517 def any(iterable):
1518 for i in iterable:
1518 for i in iterable:
1519 if i:
1519 if i:
1520 return True
1520 return True
1521 return False
1521 return False
1522
1522
1523 def all(iterable):
1523 def all(iterable):
1524 for i in iterable:
1524 for i in iterable:
1525 if not i:
1525 if not i:
1526 return False
1526 return False
1527 return True
1527 return True
1528
1528
1529 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1529 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1530 """Return the result of interpolating items in the mapping into string s.
1530 """Return the result of interpolating items in the mapping into string s.
1531
1531
1532 prefix is a single character string, or a two character string with
1532 prefix is a single character string, or a two character string with
1533 a backslash as the first character if the prefix needs to be escaped in
1533 a backslash as the first character if the prefix needs to be escaped in
1534 a regular expression.
1534 a regular expression.
1535
1535
1536 fn is an optional function that will be applied to the replacement text
1536 fn is an optional function that will be applied to the replacement text
1537 just before replacement.
1537 just before replacement.
1538
1538
1539 escape_prefix is an optional flag that allows using doubled prefix for
1539 escape_prefix is an optional flag that allows using doubled prefix for
1540 its escaping.
1540 its escaping.
1541 """
1541 """
1542 fn = fn or (lambda s: s)
1542 fn = fn or (lambda s: s)
1543 patterns = '|'.join(mapping.keys())
1543 patterns = '|'.join(mapping.keys())
1544 if escape_prefix:
1544 if escape_prefix:
1545 patterns += '|' + prefix
1545 patterns += '|' + prefix
1546 if len(prefix) > 1:
1546 if len(prefix) > 1:
1547 prefix_char = prefix[1:]
1547 prefix_char = prefix[1:]
1548 else:
1548 else:
1549 prefix_char = prefix
1549 prefix_char = prefix
1550 mapping[prefix_char] = prefix_char
1550 mapping[prefix_char] = prefix_char
1551 r = re.compile(r'%s(%s)' % (prefix, patterns))
1551 r = re.compile(r'%s(%s)' % (prefix, patterns))
1552 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1552 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1553
1553
1554 def getport(port):
1554 def getport(port):
1555 """Return the port for a given network service.
1555 """Return the port for a given network service.
1556
1556
1557 If port is an integer, it's returned as is. If it's a string, it's
1557 If port is an integer, it's returned as is. If it's a string, it's
1558 looked up using socket.getservbyname(). If there's no matching
1558 looked up using socket.getservbyname(). If there's no matching
1559 service, util.Abort is raised.
1559 service, util.Abort is raised.
1560 """
1560 """
1561 try:
1561 try:
1562 return int(port)
1562 return int(port)
1563 except ValueError:
1563 except ValueError:
1564 pass
1564 pass
1565
1565
1566 try:
1566 try:
1567 return socket.getservbyname(port)
1567 return socket.getservbyname(port)
1568 except socket.error:
1568 except socket.error:
1569 raise Abort(_("no port number associated with service '%s'") % port)
1569 raise Abort(_("no port number associated with service '%s'") % port)
1570
1570
1571 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1571 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1572 '0': False, 'no': False, 'false': False, 'off': False,
1572 '0': False, 'no': False, 'false': False, 'off': False,
1573 'never': False}
1573 'never': False}
1574
1574
1575 def parsebool(s):
1575 def parsebool(s):
1576 """Parse s into a boolean.
1576 """Parse s into a boolean.
1577
1577
1578 If s is not a valid boolean, returns None.
1578 If s is not a valid boolean, returns None.
1579 """
1579 """
1580 return _booleans.get(s.lower(), None)
1580 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now