##// END OF EJS Templates
path_auditor: eliminate local function 'check' in __call__
Adrian Buehlmann -
r13928:155d2e17 default
parent child Browse files
Show More
@@ -1,1578 +1,1580
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except:
209 except:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except:
214 except:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 def canonpath(root, cwd, myname, auditor=None):
298 def canonpath(root, cwd, myname, auditor=None):
299 """return the canonical path of myname, given cwd and root"""
299 """return the canonical path of myname, given cwd and root"""
300 if endswithsep(root):
300 if endswithsep(root):
301 rootsep = root
301 rootsep = root
302 else:
302 else:
303 rootsep = root + os.sep
303 rootsep = root + os.sep
304 name = myname
304 name = myname
305 if not os.path.isabs(name):
305 if not os.path.isabs(name):
306 name = os.path.join(root, cwd, name)
306 name = os.path.join(root, cwd, name)
307 name = os.path.normpath(name)
307 name = os.path.normpath(name)
308 if auditor is None:
308 if auditor is None:
309 auditor = path_auditor(root)
309 auditor = path_auditor(root)
310 if name != rootsep and name.startswith(rootsep):
310 if name != rootsep and name.startswith(rootsep):
311 name = name[len(rootsep):]
311 name = name[len(rootsep):]
312 auditor(name)
312 auditor(name)
313 return pconvert(name)
313 return pconvert(name)
314 elif name == root:
314 elif name == root:
315 return ''
315 return ''
316 else:
316 else:
317 # Determine whether `name' is in the hierarchy at or beneath `root',
317 # Determine whether `name' is in the hierarchy at or beneath `root',
318 # by iterating name=dirname(name) until that causes no change (can't
318 # by iterating name=dirname(name) until that causes no change (can't
319 # check name == '/', because that doesn't work on windows). For each
319 # check name == '/', because that doesn't work on windows). For each
320 # `name', compare dev/inode numbers. If they match, the list `rel'
320 # `name', compare dev/inode numbers. If they match, the list `rel'
321 # holds the reversed list of components making up the relative file
321 # holds the reversed list of components making up the relative file
322 # name we want.
322 # name we want.
323 root_st = os.stat(root)
323 root_st = os.stat(root)
324 rel = []
324 rel = []
325 while True:
325 while True:
326 try:
326 try:
327 name_st = os.stat(name)
327 name_st = os.stat(name)
328 except OSError:
328 except OSError:
329 break
329 break
330 if samestat(name_st, root_st):
330 if samestat(name_st, root_st):
331 if not rel:
331 if not rel:
332 # name was actually the same as root (maybe a symlink)
332 # name was actually the same as root (maybe a symlink)
333 return ''
333 return ''
334 rel.reverse()
334 rel.reverse()
335 name = os.path.join(*rel)
335 name = os.path.join(*rel)
336 auditor(name)
336 auditor(name)
337 return pconvert(name)
337 return pconvert(name)
338 dirname, basename = os.path.split(name)
338 dirname, basename = os.path.split(name)
339 rel.append(basename)
339 rel.append(basename)
340 if dirname == name:
340 if dirname == name:
341 break
341 break
342 name = dirname
342 name = dirname
343
343
344 raise Abort('%s not under root' % myname)
344 raise Abort('%s not under root' % myname)
345
345
346 _hgexecutable = None
346 _hgexecutable = None
347
347
348 def main_is_frozen():
348 def main_is_frozen():
349 """return True if we are a frozen executable.
349 """return True if we are a frozen executable.
350
350
351 The code supports py2exe (most common, Windows only) and tools/freeze
351 The code supports py2exe (most common, Windows only) and tools/freeze
352 (portable, not much used).
352 (portable, not much used).
353 """
353 """
354 return (hasattr(sys, "frozen") or # new py2exe
354 return (hasattr(sys, "frozen") or # new py2exe
355 hasattr(sys, "importers") or # old py2exe
355 hasattr(sys, "importers") or # old py2exe
356 imp.is_frozen("__main__")) # tools/freeze
356 imp.is_frozen("__main__")) # tools/freeze
357
357
358 def hgexecutable():
358 def hgexecutable():
359 """return location of the 'hg' executable.
359 """return location of the 'hg' executable.
360
360
361 Defaults to $HG or 'hg' in the search path.
361 Defaults to $HG or 'hg' in the search path.
362 """
362 """
363 if _hgexecutable is None:
363 if _hgexecutable is None:
364 hg = os.environ.get('HG')
364 hg = os.environ.get('HG')
365 if hg:
365 if hg:
366 set_hgexecutable(hg)
366 set_hgexecutable(hg)
367 elif main_is_frozen():
367 elif main_is_frozen():
368 set_hgexecutable(sys.executable)
368 set_hgexecutable(sys.executable)
369 else:
369 else:
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
371 set_hgexecutable(exe)
371 set_hgexecutable(exe)
372 return _hgexecutable
372 return _hgexecutable
373
373
374 def set_hgexecutable(path):
374 def set_hgexecutable(path):
375 """set location of the 'hg' executable"""
375 """set location of the 'hg' executable"""
376 global _hgexecutable
376 global _hgexecutable
377 _hgexecutable = path
377 _hgexecutable = path
378
378
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
380 '''enhanced shell command execution.
380 '''enhanced shell command execution.
381 run with environment maybe modified, maybe in different dir.
381 run with environment maybe modified, maybe in different dir.
382
382
383 if command fails and onerr is None, return status. if ui object,
383 if command fails and onerr is None, return status. if ui object,
384 print error message and return status, else raise onerr object as
384 print error message and return status, else raise onerr object as
385 exception.
385 exception.
386
386
387 if out is specified, it is assumed to be a file-like object that has a
387 if out is specified, it is assumed to be a file-like object that has a
388 write() method. stdout and stderr will be redirected to out.'''
388 write() method. stdout and stderr will be redirected to out.'''
389 try:
389 try:
390 sys.stdout.flush()
390 sys.stdout.flush()
391 except Exception:
391 except Exception:
392 pass
392 pass
393 def py2shell(val):
393 def py2shell(val):
394 'convert python object into string that is useful to shell'
394 'convert python object into string that is useful to shell'
395 if val is None or val is False:
395 if val is None or val is False:
396 return '0'
396 return '0'
397 if val is True:
397 if val is True:
398 return '1'
398 return '1'
399 return str(val)
399 return str(val)
400 origcmd = cmd
400 origcmd = cmd
401 cmd = quotecommand(cmd)
401 cmd = quotecommand(cmd)
402 env = dict(os.environ)
402 env = dict(os.environ)
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
404 env['HG'] = hgexecutable()
404 env['HG'] = hgexecutable()
405 if out is None:
405 if out is None:
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
407 env=env, cwd=cwd)
407 env=env, cwd=cwd)
408 else:
408 else:
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
411 stderr=subprocess.STDOUT)
411 stderr=subprocess.STDOUT)
412 for line in proc.stdout:
412 for line in proc.stdout:
413 out.write(line)
413 out.write(line)
414 proc.wait()
414 proc.wait()
415 rc = proc.returncode
415 rc = proc.returncode
416 if sys.platform == 'OpenVMS' and rc & 1:
416 if sys.platform == 'OpenVMS' and rc & 1:
417 rc = 0
417 rc = 0
418 if rc and onerr:
418 if rc and onerr:
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
420 explain_exit(rc)[0])
420 explain_exit(rc)[0])
421 if errprefix:
421 if errprefix:
422 errmsg = '%s: %s' % (errprefix, errmsg)
422 errmsg = '%s: %s' % (errprefix, errmsg)
423 try:
423 try:
424 onerr.warn(errmsg + '\n')
424 onerr.warn(errmsg + '\n')
425 except AttributeError:
425 except AttributeError:
426 raise onerr(errmsg)
426 raise onerr(errmsg)
427 return rc
427 return rc
428
428
429 def checksignature(func):
429 def checksignature(func):
430 '''wrap a function with code to check for calling errors'''
430 '''wrap a function with code to check for calling errors'''
431 def check(*args, **kwargs):
431 def check(*args, **kwargs):
432 try:
432 try:
433 return func(*args, **kwargs)
433 return func(*args, **kwargs)
434 except TypeError:
434 except TypeError:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
436 raise error.SignatureError
436 raise error.SignatureError
437 raise
437 raise
438
438
439 return check
439 return check
440
440
441 def makedir(path, notindexed):
441 def makedir(path, notindexed):
442 os.mkdir(path)
442 os.mkdir(path)
443
443
444 def unlinkpath(f):
444 def unlinkpath(f):
445 """unlink and remove the directory if it is empty"""
445 """unlink and remove the directory if it is empty"""
446 os.unlink(f)
446 os.unlink(f)
447 # try removing directories that might now be empty
447 # try removing directories that might now be empty
448 try:
448 try:
449 os.removedirs(os.path.dirname(f))
449 os.removedirs(os.path.dirname(f))
450 except OSError:
450 except OSError:
451 pass
451 pass
452
452
453 def copyfile(src, dest):
453 def copyfile(src, dest):
454 "copy a file, preserving mode and atime/mtime"
454 "copy a file, preserving mode and atime/mtime"
455 if os.path.islink(src):
455 if os.path.islink(src):
456 try:
456 try:
457 os.unlink(dest)
457 os.unlink(dest)
458 except:
458 except:
459 pass
459 pass
460 os.symlink(os.readlink(src), dest)
460 os.symlink(os.readlink(src), dest)
461 else:
461 else:
462 try:
462 try:
463 shutil.copyfile(src, dest)
463 shutil.copyfile(src, dest)
464 shutil.copymode(src, dest)
464 shutil.copymode(src, dest)
465 except shutil.Error, inst:
465 except shutil.Error, inst:
466 raise Abort(str(inst))
466 raise Abort(str(inst))
467
467
468 def copyfiles(src, dst, hardlink=None):
468 def copyfiles(src, dst, hardlink=None):
469 """Copy a directory tree using hardlinks if possible"""
469 """Copy a directory tree using hardlinks if possible"""
470
470
471 if hardlink is None:
471 if hardlink is None:
472 hardlink = (os.stat(src).st_dev ==
472 hardlink = (os.stat(src).st_dev ==
473 os.stat(os.path.dirname(dst)).st_dev)
473 os.stat(os.path.dirname(dst)).st_dev)
474
474
475 num = 0
475 num = 0
476 if os.path.isdir(src):
476 if os.path.isdir(src):
477 os.mkdir(dst)
477 os.mkdir(dst)
478 for name, kind in osutil.listdir(src):
478 for name, kind in osutil.listdir(src):
479 srcname = os.path.join(src, name)
479 srcname = os.path.join(src, name)
480 dstname = os.path.join(dst, name)
480 dstname = os.path.join(dst, name)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
482 num += n
482 num += n
483 else:
483 else:
484 if hardlink:
484 if hardlink:
485 try:
485 try:
486 os_link(src, dst)
486 os_link(src, dst)
487 except (IOError, OSError):
487 except (IOError, OSError):
488 hardlink = False
488 hardlink = False
489 shutil.copy(src, dst)
489 shutil.copy(src, dst)
490 else:
490 else:
491 shutil.copy(src, dst)
491 shutil.copy(src, dst)
492 num += 1
492 num += 1
493
493
494 return hardlink, num
494 return hardlink, num
495
495
496 _windows_reserved_filenames = '''con prn aux nul
496 _windows_reserved_filenames = '''con prn aux nul
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
499 _windows_reserved_chars = ':*?"<>|'
499 _windows_reserved_chars = ':*?"<>|'
500 def checkwinfilename(path):
500 def checkwinfilename(path):
501 '''Check that the base-relative path is a valid filename on Windows.
501 '''Check that the base-relative path is a valid filename on Windows.
502 Returns None if the path is ok, or a UI string describing the problem.
502 Returns None if the path is ok, or a UI string describing the problem.
503
503
504 >>> checkwinfilename("just/a/normal/path")
504 >>> checkwinfilename("just/a/normal/path")
505 >>> checkwinfilename("foo/bar/con.xml")
505 >>> checkwinfilename("foo/bar/con.xml")
506 "filename contains 'con', which is reserved on Windows"
506 "filename contains 'con', which is reserved on Windows"
507 >>> checkwinfilename("foo/con.xml/bar")
507 >>> checkwinfilename("foo/con.xml/bar")
508 "filename contains 'con', which is reserved on Windows"
508 "filename contains 'con', which is reserved on Windows"
509 >>> checkwinfilename("foo/bar/xml.con")
509 >>> checkwinfilename("foo/bar/xml.con")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
511 "filename contains 'AUX', which is reserved on Windows"
511 "filename contains 'AUX', which is reserved on Windows"
512 >>> checkwinfilename("foo/bar/bla:.txt")
512 >>> checkwinfilename("foo/bar/bla:.txt")
513 "filename contains ':', which is reserved on Windows"
513 "filename contains ':', which is reserved on Windows"
514 >>> checkwinfilename("foo/bar/b\07la.txt")
514 >>> checkwinfilename("foo/bar/b\07la.txt")
515 "filename contains '\\x07', which is invalid on Windows"
515 "filename contains '\\x07', which is invalid on Windows"
516 >>> checkwinfilename("foo/bar/bla ")
516 >>> checkwinfilename("foo/bar/bla ")
517 "filename ends with ' ', which is not allowed on Windows"
517 "filename ends with ' ', which is not allowed on Windows"
518 '''
518 '''
519 for n in path.replace('\\', '/').split('/'):
519 for n in path.replace('\\', '/').split('/'):
520 if not n:
520 if not n:
521 continue
521 continue
522 for c in n:
522 for c in n:
523 if c in _windows_reserved_chars:
523 if c in _windows_reserved_chars:
524 return _("filename contains '%s', which is reserved "
524 return _("filename contains '%s', which is reserved "
525 "on Windows") % c
525 "on Windows") % c
526 if ord(c) <= 31:
526 if ord(c) <= 31:
527 return _("filename contains '%s', which is invalid "
527 return _("filename contains '%s', which is invalid "
528 "on Windows") % c
528 "on Windows") % c
529 base = n.split('.')[0]
529 base = n.split('.')[0]
530 if base and base.lower() in _windows_reserved_filenames:
530 if base and base.lower() in _windows_reserved_filenames:
531 return _("filename contains '%s', which is reserved "
531 return _("filename contains '%s', which is reserved "
532 "on Windows") % base
532 "on Windows") % base
533 t = n[-1]
533 t = n[-1]
534 if t in '. ':
534 if t in '. ':
535 return _("filename ends with '%s', which is not allowed "
535 return _("filename ends with '%s', which is not allowed "
536 "on Windows") % t
536 "on Windows") % t
537
537
538 class path_auditor(object):
538 class path_auditor(object):
539 '''ensure that a filesystem path contains no banned components.
539 '''ensure that a filesystem path contains no banned components.
540 the following properties of a path are checked:
540 the following properties of a path are checked:
541
541
542 - ends with a directory separator
542 - ends with a directory separator
543 - under top-level .hg
543 - under top-level .hg
544 - starts at the root of a windows drive
544 - starts at the root of a windows drive
545 - contains ".."
545 - contains ".."
546 - traverses a symlink (e.g. a/symlink_here/b)
546 - traverses a symlink (e.g. a/symlink_here/b)
547 - inside a nested repository (a callback can be used to approve
547 - inside a nested repository (a callback can be used to approve
548 some nested repositories, e.g., subrepositories)
548 some nested repositories, e.g., subrepositories)
549 '''
549 '''
550
550
551 def __init__(self, root, callback=None):
551 def __init__(self, root, callback=None):
552 self.audited = set()
552 self.audited = set()
553 self.auditeddir = set()
553 self.auditeddir = set()
554 self.root = root
554 self.root = root
555 self.callback = callback
555 self.callback = callback
556
556
557 def __call__(self, path):
557 def __call__(self, path):
558 '''Check the relative path.
559 path may contain a pattern (e.g. foodir/**.txt)'''
560
558 if path in self.audited:
561 if path in self.audited:
559 return
562 return
560 # AIX ignores "/" at end of path, others raise EISDIR.
563 # AIX ignores "/" at end of path, others raise EISDIR.
561 if endswithsep(path):
564 if endswithsep(path):
562 raise Abort(_("path ends in directory separator: %s") % path)
565 raise Abort(_("path ends in directory separator: %s") % path)
563 normpath = os.path.normcase(path)
566 normpath = os.path.normcase(path)
564 parts = splitpath(normpath)
567 parts = splitpath(normpath)
565 if (os.path.splitdrive(path)[0]
568 if (os.path.splitdrive(path)[0]
566 or parts[0].lower() in ('.hg', '.hg.', '')
569 or parts[0].lower() in ('.hg', '.hg.', '')
567 or os.pardir in parts):
570 or os.pardir in parts):
568 raise Abort(_("path contains illegal component: %s") % path)
571 raise Abort(_("path contains illegal component: %s") % path)
569 if '.hg' in path.lower():
572 if '.hg' in path.lower():
570 lparts = [p.lower() for p in parts]
573 lparts = [p.lower() for p in parts]
571 for p in '.hg', '.hg.':
574 for p in '.hg', '.hg.':
572 if p in lparts[1:]:
575 if p in lparts[1:]:
573 pos = lparts.index(p)
576 pos = lparts.index(p)
574 base = os.path.join(*parts[:pos])
577 base = os.path.join(*parts[:pos])
575 raise Abort(_('path %r is inside nested repo %r')
578 raise Abort(_('path %r is inside nested repo %r')
576 % (path, base))
579 % (path, base))
577 def check(prefix):
580
581 parts.pop()
582 prefixes = []
583 while parts:
584 prefix = os.sep.join(parts)
585 if prefix in self.auditeddir:
586 break
578 curpath = os.path.join(self.root, prefix)
587 curpath = os.path.join(self.root, prefix)
579 try:
588 try:
580 st = os.lstat(curpath)
589 st = os.lstat(curpath)
581 except OSError, err:
590 except OSError, err:
582 # EINVAL can be raised as invalid path syntax under win32.
591 # EINVAL can be raised as invalid path syntax under win32.
583 # They must be ignored for patterns can be checked too.
592 # They must be ignored for patterns can be checked too.
584 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
593 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
585 raise
594 raise
586 else:
595 else:
587 if stat.S_ISLNK(st.st_mode):
596 if stat.S_ISLNK(st.st_mode):
588 raise Abort(_('path %r traverses symbolic link %r') %
597 raise Abort(_('path %r traverses symbolic link %r') %
589 (path, prefix))
598 (path, prefix))
590 elif (stat.S_ISDIR(st.st_mode) and
599 elif (stat.S_ISDIR(st.st_mode) and
591 os.path.isdir(os.path.join(curpath, '.hg'))):
600 os.path.isdir(os.path.join(curpath, '.hg'))):
592 if not self.callback or not self.callback(curpath):
601 if not self.callback or not self.callback(curpath):
593 raise Abort(_('path %r is inside nested repo %r') %
602 raise Abort(_('path %r is inside nested repo %r') %
594 (path, prefix))
603 (path, prefix))
595 parts.pop()
596 prefixes = []
597 while parts:
598 prefix = os.sep.join(parts)
599 if prefix in self.auditeddir:
600 break
601 check(prefix)
602 prefixes.append(prefix)
604 prefixes.append(prefix)
603 parts.pop()
605 parts.pop()
604
606
605 self.audited.add(path)
607 self.audited.add(path)
606 # only add prefixes to the cache after checking everything: we don't
608 # only add prefixes to the cache after checking everything: we don't
607 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
609 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
608 self.auditeddir.update(prefixes)
610 self.auditeddir.update(prefixes)
609
611
610 def lookup_reg(key, name=None, scope=None):
612 def lookup_reg(key, name=None, scope=None):
611 return None
613 return None
612
614
613 def hidewindow():
615 def hidewindow():
614 """Hide current shell window.
616 """Hide current shell window.
615
617
616 Used to hide the window opened when starting asynchronous
618 Used to hide the window opened when starting asynchronous
617 child process under Windows, unneeded on other systems.
619 child process under Windows, unneeded on other systems.
618 """
620 """
619 pass
621 pass
620
622
621 if os.name == 'nt':
623 if os.name == 'nt':
622 checkosfilename = checkwinfilename
624 checkosfilename = checkwinfilename
623 from windows import *
625 from windows import *
624 else:
626 else:
625 from posix import *
627 from posix import *
626
628
627 def makelock(info, pathname):
629 def makelock(info, pathname):
628 try:
630 try:
629 return os.symlink(info, pathname)
631 return os.symlink(info, pathname)
630 except OSError, why:
632 except OSError, why:
631 if why.errno == errno.EEXIST:
633 if why.errno == errno.EEXIST:
632 raise
634 raise
633 except AttributeError: # no symlink in os
635 except AttributeError: # no symlink in os
634 pass
636 pass
635
637
636 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
638 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
637 os.write(ld, info)
639 os.write(ld, info)
638 os.close(ld)
640 os.close(ld)
639
641
640 def readlock(pathname):
642 def readlock(pathname):
641 try:
643 try:
642 return os.readlink(pathname)
644 return os.readlink(pathname)
643 except OSError, why:
645 except OSError, why:
644 if why.errno not in (errno.EINVAL, errno.ENOSYS):
646 if why.errno not in (errno.EINVAL, errno.ENOSYS):
645 raise
647 raise
646 except AttributeError: # no symlink in os
648 except AttributeError: # no symlink in os
647 pass
649 pass
648 fp = posixfile(pathname)
650 fp = posixfile(pathname)
649 r = fp.read()
651 r = fp.read()
650 fp.close()
652 fp.close()
651 return r
653 return r
652
654
653 def fstat(fp):
655 def fstat(fp):
654 '''stat file object that may not have fileno method.'''
656 '''stat file object that may not have fileno method.'''
655 try:
657 try:
656 return os.fstat(fp.fileno())
658 return os.fstat(fp.fileno())
657 except AttributeError:
659 except AttributeError:
658 return os.stat(fp.name)
660 return os.stat(fp.name)
659
661
660 # File system features
662 # File system features
661
663
662 def checkcase(path):
664 def checkcase(path):
663 """
665 """
664 Check whether the given path is on a case-sensitive filesystem
666 Check whether the given path is on a case-sensitive filesystem
665
667
666 Requires a path (like /foo/.hg) ending with a foldable final
668 Requires a path (like /foo/.hg) ending with a foldable final
667 directory component.
669 directory component.
668 """
670 """
669 s1 = os.stat(path)
671 s1 = os.stat(path)
670 d, b = os.path.split(path)
672 d, b = os.path.split(path)
671 p2 = os.path.join(d, b.upper())
673 p2 = os.path.join(d, b.upper())
672 if path == p2:
674 if path == p2:
673 p2 = os.path.join(d, b.lower())
675 p2 = os.path.join(d, b.lower())
674 try:
676 try:
675 s2 = os.stat(p2)
677 s2 = os.stat(p2)
676 if s2 == s1:
678 if s2 == s1:
677 return False
679 return False
678 return True
680 return True
679 except:
681 except:
680 return True
682 return True
681
683
682 _fspathcache = {}
684 _fspathcache = {}
683 def fspath(name, root):
685 def fspath(name, root):
684 '''Get name in the case stored in the filesystem
686 '''Get name in the case stored in the filesystem
685
687
686 The name is either relative to root, or it is an absolute path starting
688 The name is either relative to root, or it is an absolute path starting
687 with root. Note that this function is unnecessary, and should not be
689 with root. Note that this function is unnecessary, and should not be
688 called, for case-sensitive filesystems (simply because it's expensive).
690 called, for case-sensitive filesystems (simply because it's expensive).
689 '''
691 '''
690 # If name is absolute, make it relative
692 # If name is absolute, make it relative
691 if name.lower().startswith(root.lower()):
693 if name.lower().startswith(root.lower()):
692 l = len(root)
694 l = len(root)
693 if name[l] == os.sep or name[l] == os.altsep:
695 if name[l] == os.sep or name[l] == os.altsep:
694 l = l + 1
696 l = l + 1
695 name = name[l:]
697 name = name[l:]
696
698
697 if not os.path.lexists(os.path.join(root, name)):
699 if not os.path.lexists(os.path.join(root, name)):
698 return None
700 return None
699
701
700 seps = os.sep
702 seps = os.sep
701 if os.altsep:
703 if os.altsep:
702 seps = seps + os.altsep
704 seps = seps + os.altsep
703 # Protect backslashes. This gets silly very quickly.
705 # Protect backslashes. This gets silly very quickly.
704 seps.replace('\\','\\\\')
706 seps.replace('\\','\\\\')
705 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
707 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
706 dir = os.path.normcase(os.path.normpath(root))
708 dir = os.path.normcase(os.path.normpath(root))
707 result = []
709 result = []
708 for part, sep in pattern.findall(name):
710 for part, sep in pattern.findall(name):
709 if sep:
711 if sep:
710 result.append(sep)
712 result.append(sep)
711 continue
713 continue
712
714
713 if dir not in _fspathcache:
715 if dir not in _fspathcache:
714 _fspathcache[dir] = os.listdir(dir)
716 _fspathcache[dir] = os.listdir(dir)
715 contents = _fspathcache[dir]
717 contents = _fspathcache[dir]
716
718
717 lpart = part.lower()
719 lpart = part.lower()
718 lenp = len(part)
720 lenp = len(part)
719 for n in contents:
721 for n in contents:
720 if lenp == len(n) and n.lower() == lpart:
722 if lenp == len(n) and n.lower() == lpart:
721 result.append(n)
723 result.append(n)
722 break
724 break
723 else:
725 else:
724 # Cannot happen, as the file exists!
726 # Cannot happen, as the file exists!
725 result.append(part)
727 result.append(part)
726 dir = os.path.join(dir, lpart)
728 dir = os.path.join(dir, lpart)
727
729
728 return ''.join(result)
730 return ''.join(result)
729
731
730 def checknlink(testfile):
732 def checknlink(testfile):
731 '''check whether hardlink count reporting works properly'''
733 '''check whether hardlink count reporting works properly'''
732
734
733 # testfile may be open, so we need a separate file for checking to
735 # testfile may be open, so we need a separate file for checking to
734 # work around issue2543 (or testfile may get lost on Samba shares)
736 # work around issue2543 (or testfile may get lost on Samba shares)
735 f1 = testfile + ".hgtmp1"
737 f1 = testfile + ".hgtmp1"
736 if os.path.lexists(f1):
738 if os.path.lexists(f1):
737 return False
739 return False
738 try:
740 try:
739 posixfile(f1, 'w').close()
741 posixfile(f1, 'w').close()
740 except IOError:
742 except IOError:
741 return False
743 return False
742
744
743 f2 = testfile + ".hgtmp2"
745 f2 = testfile + ".hgtmp2"
744 fd = None
746 fd = None
745 try:
747 try:
746 try:
748 try:
747 os_link(f1, f2)
749 os_link(f1, f2)
748 except OSError:
750 except OSError:
749 return False
751 return False
750
752
751 # nlinks() may behave differently for files on Windows shares if
753 # nlinks() may behave differently for files on Windows shares if
752 # the file is open.
754 # the file is open.
753 fd = posixfile(f2)
755 fd = posixfile(f2)
754 return nlinks(f2) > 1
756 return nlinks(f2) > 1
755 finally:
757 finally:
756 if fd is not None:
758 if fd is not None:
757 fd.close()
759 fd.close()
758 for f in (f1, f2):
760 for f in (f1, f2):
759 try:
761 try:
760 os.unlink(f)
762 os.unlink(f)
761 except OSError:
763 except OSError:
762 pass
764 pass
763
765
764 return False
766 return False
765
767
766 def endswithsep(path):
768 def endswithsep(path):
767 '''Check path ends with os.sep or os.altsep.'''
769 '''Check path ends with os.sep or os.altsep.'''
768 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
770 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
769
771
770 def splitpath(path):
772 def splitpath(path):
771 '''Split path by os.sep.
773 '''Split path by os.sep.
772 Note that this function does not use os.altsep because this is
774 Note that this function does not use os.altsep because this is
773 an alternative of simple "xxx.split(os.sep)".
775 an alternative of simple "xxx.split(os.sep)".
774 It is recommended to use os.path.normpath() before using this
776 It is recommended to use os.path.normpath() before using this
775 function if need.'''
777 function if need.'''
776 return path.split(os.sep)
778 return path.split(os.sep)
777
779
778 def gui():
780 def gui():
779 '''Are we running in a GUI?'''
781 '''Are we running in a GUI?'''
780 if sys.platform == 'darwin':
782 if sys.platform == 'darwin':
781 if 'SSH_CONNECTION' in os.environ:
783 if 'SSH_CONNECTION' in os.environ:
782 # handle SSH access to a box where the user is logged in
784 # handle SSH access to a box where the user is logged in
783 return False
785 return False
784 elif getattr(osutil, 'isgui', None):
786 elif getattr(osutil, 'isgui', None):
785 # check if a CoreGraphics session is available
787 # check if a CoreGraphics session is available
786 return osutil.isgui()
788 return osutil.isgui()
787 else:
789 else:
788 # pure build; use a safe default
790 # pure build; use a safe default
789 return True
791 return True
790 else:
792 else:
791 return os.name == "nt" or os.environ.get("DISPLAY")
793 return os.name == "nt" or os.environ.get("DISPLAY")
792
794
793 def mktempcopy(name, emptyok=False, createmode=None):
795 def mktempcopy(name, emptyok=False, createmode=None):
794 """Create a temporary file with the same contents from name
796 """Create a temporary file with the same contents from name
795
797
796 The permission bits are copied from the original file.
798 The permission bits are copied from the original file.
797
799
798 If the temporary file is going to be truncated immediately, you
800 If the temporary file is going to be truncated immediately, you
799 can use emptyok=True as an optimization.
801 can use emptyok=True as an optimization.
800
802
801 Returns the name of the temporary file.
803 Returns the name of the temporary file.
802 """
804 """
803 d, fn = os.path.split(name)
805 d, fn = os.path.split(name)
804 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
806 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
805 os.close(fd)
807 os.close(fd)
806 # Temporary files are created with mode 0600, which is usually not
808 # Temporary files are created with mode 0600, which is usually not
807 # what we want. If the original file already exists, just copy
809 # what we want. If the original file already exists, just copy
808 # its mode. Otherwise, manually obey umask.
810 # its mode. Otherwise, manually obey umask.
809 try:
811 try:
810 st_mode = os.lstat(name).st_mode & 0777
812 st_mode = os.lstat(name).st_mode & 0777
811 except OSError, inst:
813 except OSError, inst:
812 if inst.errno != errno.ENOENT:
814 if inst.errno != errno.ENOENT:
813 raise
815 raise
814 st_mode = createmode
816 st_mode = createmode
815 if st_mode is None:
817 if st_mode is None:
816 st_mode = ~umask
818 st_mode = ~umask
817 st_mode &= 0666
819 st_mode &= 0666
818 os.chmod(temp, st_mode)
820 os.chmod(temp, st_mode)
819 if emptyok:
821 if emptyok:
820 return temp
822 return temp
821 try:
823 try:
822 try:
824 try:
823 ifp = posixfile(name, "rb")
825 ifp = posixfile(name, "rb")
824 except IOError, inst:
826 except IOError, inst:
825 if inst.errno == errno.ENOENT:
827 if inst.errno == errno.ENOENT:
826 return temp
828 return temp
827 if not getattr(inst, 'filename', None):
829 if not getattr(inst, 'filename', None):
828 inst.filename = name
830 inst.filename = name
829 raise
831 raise
830 ofp = posixfile(temp, "wb")
832 ofp = posixfile(temp, "wb")
831 for chunk in filechunkiter(ifp):
833 for chunk in filechunkiter(ifp):
832 ofp.write(chunk)
834 ofp.write(chunk)
833 ifp.close()
835 ifp.close()
834 ofp.close()
836 ofp.close()
835 except:
837 except:
836 try: os.unlink(temp)
838 try: os.unlink(temp)
837 except: pass
839 except: pass
838 raise
840 raise
839 return temp
841 return temp
840
842
841 class atomictempfile(object):
843 class atomictempfile(object):
842 """file-like object that atomically updates a file
844 """file-like object that atomically updates a file
843
845
844 All writes will be redirected to a temporary copy of the original
846 All writes will be redirected to a temporary copy of the original
845 file. When rename is called, the copy is renamed to the original
847 file. When rename is called, the copy is renamed to the original
846 name, making the changes visible.
848 name, making the changes visible.
847 """
849 """
848 def __init__(self, name, mode='w+b', createmode=None):
850 def __init__(self, name, mode='w+b', createmode=None):
849 self.__name = name
851 self.__name = name
850 self._fp = None
852 self._fp = None
851 self.temp = mktempcopy(name, emptyok=('w' in mode),
853 self.temp = mktempcopy(name, emptyok=('w' in mode),
852 createmode=createmode)
854 createmode=createmode)
853 self._fp = posixfile(self.temp, mode)
855 self._fp = posixfile(self.temp, mode)
854
856
855 def __getattr__(self, name):
857 def __getattr__(self, name):
856 return getattr(self._fp, name)
858 return getattr(self._fp, name)
857
859
858 def rename(self):
860 def rename(self):
859 if not self._fp.closed:
861 if not self._fp.closed:
860 self._fp.close()
862 self._fp.close()
861 rename(self.temp, localpath(self.__name))
863 rename(self.temp, localpath(self.__name))
862
864
863 def close(self):
865 def close(self):
864 if not self._fp:
866 if not self._fp:
865 return
867 return
866 if not self._fp.closed:
868 if not self._fp.closed:
867 try:
869 try:
868 os.unlink(self.temp)
870 os.unlink(self.temp)
869 except: pass
871 except: pass
870 self._fp.close()
872 self._fp.close()
871
873
872 def __del__(self):
874 def __del__(self):
873 self.close()
875 self.close()
874
876
875 def makedirs(name, mode=None):
877 def makedirs(name, mode=None):
876 """recursive directory creation with parent mode inheritance"""
878 """recursive directory creation with parent mode inheritance"""
877 parent = os.path.abspath(os.path.dirname(name))
879 parent = os.path.abspath(os.path.dirname(name))
878 try:
880 try:
879 os.mkdir(name)
881 os.mkdir(name)
880 if mode is not None:
882 if mode is not None:
881 os.chmod(name, mode)
883 os.chmod(name, mode)
882 return
884 return
883 except OSError, err:
885 except OSError, err:
884 if err.errno == errno.EEXIST:
886 if err.errno == errno.EEXIST:
885 return
887 return
886 if not name or parent == name or err.errno != errno.ENOENT:
888 if not name or parent == name or err.errno != errno.ENOENT:
887 raise
889 raise
888 makedirs(parent, mode)
890 makedirs(parent, mode)
889 makedirs(name, mode)
891 makedirs(name, mode)
890
892
891 class opener(object):
893 class opener(object):
892 """Open files relative to a base directory
894 """Open files relative to a base directory
893
895
894 This class is used to hide the details of COW semantics and
896 This class is used to hide the details of COW semantics and
895 remote file access from higher level code.
897 remote file access from higher level code.
896 """
898 """
897 def __init__(self, base, audit=True):
899 def __init__(self, base, audit=True):
898 self.base = base
900 self.base = base
899 if audit:
901 if audit:
900 self.auditor = path_auditor(base)
902 self.auditor = path_auditor(base)
901 else:
903 else:
902 self.auditor = always
904 self.auditor = always
903 self.createmode = None
905 self.createmode = None
904 self._trustnlink = None
906 self._trustnlink = None
905
907
906 @propertycache
908 @propertycache
907 def _can_symlink(self):
909 def _can_symlink(self):
908 return checklink(self.base)
910 return checklink(self.base)
909
911
910 def _fixfilemode(self, name):
912 def _fixfilemode(self, name):
911 if self.createmode is None:
913 if self.createmode is None:
912 return
914 return
913 os.chmod(name, self.createmode & 0666)
915 os.chmod(name, self.createmode & 0666)
914
916
915 def __call__(self, path, mode="r", text=False, atomictemp=False):
917 def __call__(self, path, mode="r", text=False, atomictemp=False):
916 r = checkosfilename(path)
918 r = checkosfilename(path)
917 if r:
919 if r:
918 raise Abort("%s: %s" % (r, path))
920 raise Abort("%s: %s" % (r, path))
919 self.auditor(path)
921 self.auditor(path)
920 f = os.path.join(self.base, path)
922 f = os.path.join(self.base, path)
921
923
922 if not text and "b" not in mode:
924 if not text and "b" not in mode:
923 mode += "b" # for that other OS
925 mode += "b" # for that other OS
924
926
925 nlink = -1
927 nlink = -1
926 dirname, basename = os.path.split(f)
928 dirname, basename = os.path.split(f)
927 # If basename is empty, then the path is malformed because it points
929 # If basename is empty, then the path is malformed because it points
928 # to a directory. Let the posixfile() call below raise IOError.
930 # to a directory. Let the posixfile() call below raise IOError.
929 if basename and mode not in ('r', 'rb'):
931 if basename and mode not in ('r', 'rb'):
930 if atomictemp:
932 if atomictemp:
931 if not os.path.isdir(dirname):
933 if not os.path.isdir(dirname):
932 makedirs(dirname, self.createmode)
934 makedirs(dirname, self.createmode)
933 return atomictempfile(f, mode, self.createmode)
935 return atomictempfile(f, mode, self.createmode)
934 try:
936 try:
935 if 'w' in mode:
937 if 'w' in mode:
936 unlink(f)
938 unlink(f)
937 nlink = 0
939 nlink = 0
938 else:
940 else:
939 # nlinks() may behave differently for files on Windows
941 # nlinks() may behave differently for files on Windows
940 # shares if the file is open.
942 # shares if the file is open.
941 fd = posixfile(f)
943 fd = posixfile(f)
942 nlink = nlinks(f)
944 nlink = nlinks(f)
943 if nlink < 1:
945 if nlink < 1:
944 nlink = 2 # force mktempcopy (issue1922)
946 nlink = 2 # force mktempcopy (issue1922)
945 fd.close()
947 fd.close()
946 except (OSError, IOError), e:
948 except (OSError, IOError), e:
947 if e.errno != errno.ENOENT:
949 if e.errno != errno.ENOENT:
948 raise
950 raise
949 nlink = 0
951 nlink = 0
950 if not os.path.isdir(dirname):
952 if not os.path.isdir(dirname):
951 makedirs(dirname, self.createmode)
953 makedirs(dirname, self.createmode)
952 if nlink > 0:
954 if nlink > 0:
953 if self._trustnlink is None:
955 if self._trustnlink is None:
954 self._trustnlink = nlink > 1 or checknlink(f)
956 self._trustnlink = nlink > 1 or checknlink(f)
955 if nlink > 1 or not self._trustnlink:
957 if nlink > 1 or not self._trustnlink:
956 rename(mktempcopy(f), f)
958 rename(mktempcopy(f), f)
957 fp = posixfile(f, mode)
959 fp = posixfile(f, mode)
958 if nlink == 0:
960 if nlink == 0:
959 self._fixfilemode(f)
961 self._fixfilemode(f)
960 return fp
962 return fp
961
963
962 def symlink(self, src, dst):
964 def symlink(self, src, dst):
963 self.auditor(dst)
965 self.auditor(dst)
964 linkname = os.path.join(self.base, dst)
966 linkname = os.path.join(self.base, dst)
965 try:
967 try:
966 os.unlink(linkname)
968 os.unlink(linkname)
967 except OSError:
969 except OSError:
968 pass
970 pass
969
971
970 dirname = os.path.dirname(linkname)
972 dirname = os.path.dirname(linkname)
971 if not os.path.exists(dirname):
973 if not os.path.exists(dirname):
972 makedirs(dirname, self.createmode)
974 makedirs(dirname, self.createmode)
973
975
974 if self._can_symlink:
976 if self._can_symlink:
975 try:
977 try:
976 os.symlink(src, linkname)
978 os.symlink(src, linkname)
977 except OSError, err:
979 except OSError, err:
978 raise OSError(err.errno, _('could not symlink to %r: %s') %
980 raise OSError(err.errno, _('could not symlink to %r: %s') %
979 (src, err.strerror), linkname)
981 (src, err.strerror), linkname)
980 else:
982 else:
981 f = self(dst, "w")
983 f = self(dst, "w")
982 f.write(src)
984 f.write(src)
983 f.close()
985 f.close()
984 self._fixfilemode(dst)
986 self._fixfilemode(dst)
985
987
986 class chunkbuffer(object):
988 class chunkbuffer(object):
987 """Allow arbitrary sized chunks of data to be efficiently read from an
989 """Allow arbitrary sized chunks of data to be efficiently read from an
988 iterator over chunks of arbitrary size."""
990 iterator over chunks of arbitrary size."""
989
991
990 def __init__(self, in_iter):
992 def __init__(self, in_iter):
991 """in_iter is the iterator that's iterating over the input chunks.
993 """in_iter is the iterator that's iterating over the input chunks.
992 targetsize is how big a buffer to try to maintain."""
994 targetsize is how big a buffer to try to maintain."""
993 def splitbig(chunks):
995 def splitbig(chunks):
994 for chunk in chunks:
996 for chunk in chunks:
995 if len(chunk) > 2**20:
997 if len(chunk) > 2**20:
996 pos = 0
998 pos = 0
997 while pos < len(chunk):
999 while pos < len(chunk):
998 end = pos + 2 ** 18
1000 end = pos + 2 ** 18
999 yield chunk[pos:end]
1001 yield chunk[pos:end]
1000 pos = end
1002 pos = end
1001 else:
1003 else:
1002 yield chunk
1004 yield chunk
1003 self.iter = splitbig(in_iter)
1005 self.iter = splitbig(in_iter)
1004 self._queue = []
1006 self._queue = []
1005
1007
1006 def read(self, l):
1008 def read(self, l):
1007 """Read L bytes of data from the iterator of chunks of data.
1009 """Read L bytes of data from the iterator of chunks of data.
1008 Returns less than L bytes if the iterator runs dry."""
1010 Returns less than L bytes if the iterator runs dry."""
1009 left = l
1011 left = l
1010 buf = ''
1012 buf = ''
1011 queue = self._queue
1013 queue = self._queue
1012 while left > 0:
1014 while left > 0:
1013 # refill the queue
1015 # refill the queue
1014 if not queue:
1016 if not queue:
1015 target = 2**18
1017 target = 2**18
1016 for chunk in self.iter:
1018 for chunk in self.iter:
1017 queue.append(chunk)
1019 queue.append(chunk)
1018 target -= len(chunk)
1020 target -= len(chunk)
1019 if target <= 0:
1021 if target <= 0:
1020 break
1022 break
1021 if not queue:
1023 if not queue:
1022 break
1024 break
1023
1025
1024 chunk = queue.pop(0)
1026 chunk = queue.pop(0)
1025 left -= len(chunk)
1027 left -= len(chunk)
1026 if left < 0:
1028 if left < 0:
1027 queue.insert(0, chunk[left:])
1029 queue.insert(0, chunk[left:])
1028 buf += chunk[:left]
1030 buf += chunk[:left]
1029 else:
1031 else:
1030 buf += chunk
1032 buf += chunk
1031
1033
1032 return buf
1034 return buf
1033
1035
1034 def filechunkiter(f, size=65536, limit=None):
1036 def filechunkiter(f, size=65536, limit=None):
1035 """Create a generator that produces the data in the file size
1037 """Create a generator that produces the data in the file size
1036 (default 65536) bytes at a time, up to optional limit (default is
1038 (default 65536) bytes at a time, up to optional limit (default is
1037 to read all data). Chunks may be less than size bytes if the
1039 to read all data). Chunks may be less than size bytes if the
1038 chunk is the last chunk in the file, or the file is a socket or
1040 chunk is the last chunk in the file, or the file is a socket or
1039 some other type of file that sometimes reads less data than is
1041 some other type of file that sometimes reads less data than is
1040 requested."""
1042 requested."""
1041 assert size >= 0
1043 assert size >= 0
1042 assert limit is None or limit >= 0
1044 assert limit is None or limit >= 0
1043 while True:
1045 while True:
1044 if limit is None:
1046 if limit is None:
1045 nbytes = size
1047 nbytes = size
1046 else:
1048 else:
1047 nbytes = min(limit, size)
1049 nbytes = min(limit, size)
1048 s = nbytes and f.read(nbytes)
1050 s = nbytes and f.read(nbytes)
1049 if not s:
1051 if not s:
1050 break
1052 break
1051 if limit:
1053 if limit:
1052 limit -= len(s)
1054 limit -= len(s)
1053 yield s
1055 yield s
1054
1056
1055 def makedate():
1057 def makedate():
1056 lt = time.localtime()
1058 lt = time.localtime()
1057 if lt[8] == 1 and time.daylight:
1059 if lt[8] == 1 and time.daylight:
1058 tz = time.altzone
1060 tz = time.altzone
1059 else:
1061 else:
1060 tz = time.timezone
1062 tz = time.timezone
1061 t = time.mktime(lt)
1063 t = time.mktime(lt)
1062 if t < 0:
1064 if t < 0:
1063 hint = _("check your clock")
1065 hint = _("check your clock")
1064 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1066 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1065 return t, tz
1067 return t, tz
1066
1068
1067 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1069 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1068 """represent a (unixtime, offset) tuple as a localized time.
1070 """represent a (unixtime, offset) tuple as a localized time.
1069 unixtime is seconds since the epoch, and offset is the time zone's
1071 unixtime is seconds since the epoch, and offset is the time zone's
1070 number of seconds away from UTC. if timezone is false, do not
1072 number of seconds away from UTC. if timezone is false, do not
1071 append time zone to string."""
1073 append time zone to string."""
1072 t, tz = date or makedate()
1074 t, tz = date or makedate()
1073 if t < 0:
1075 if t < 0:
1074 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1076 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1075 tz = 0
1077 tz = 0
1076 if "%1" in format or "%2" in format:
1078 if "%1" in format or "%2" in format:
1077 sign = (tz > 0) and "-" or "+"
1079 sign = (tz > 0) and "-" or "+"
1078 minutes = abs(tz) // 60
1080 minutes = abs(tz) // 60
1079 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1081 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1080 format = format.replace("%2", "%02d" % (minutes % 60))
1082 format = format.replace("%2", "%02d" % (minutes % 60))
1081 s = time.strftime(format, time.gmtime(float(t) - tz))
1083 s = time.strftime(format, time.gmtime(float(t) - tz))
1082 return s
1084 return s
1083
1085
1084 def shortdate(date=None):
1086 def shortdate(date=None):
1085 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1087 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1086 return datestr(date, format='%Y-%m-%d')
1088 return datestr(date, format='%Y-%m-%d')
1087
1089
1088 def strdate(string, format, defaults=[]):
1090 def strdate(string, format, defaults=[]):
1089 """parse a localized time string and return a (unixtime, offset) tuple.
1091 """parse a localized time string and return a (unixtime, offset) tuple.
1090 if the string cannot be parsed, ValueError is raised."""
1092 if the string cannot be parsed, ValueError is raised."""
1091 def timezone(string):
1093 def timezone(string):
1092 tz = string.split()[-1]
1094 tz = string.split()[-1]
1093 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1095 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1094 sign = (tz[0] == "+") and 1 or -1
1096 sign = (tz[0] == "+") and 1 or -1
1095 hours = int(tz[1:3])
1097 hours = int(tz[1:3])
1096 minutes = int(tz[3:5])
1098 minutes = int(tz[3:5])
1097 return -sign * (hours * 60 + minutes) * 60
1099 return -sign * (hours * 60 + minutes) * 60
1098 if tz == "GMT" or tz == "UTC":
1100 if tz == "GMT" or tz == "UTC":
1099 return 0
1101 return 0
1100 return None
1102 return None
1101
1103
1102 # NOTE: unixtime = localunixtime + offset
1104 # NOTE: unixtime = localunixtime + offset
1103 offset, date = timezone(string), string
1105 offset, date = timezone(string), string
1104 if offset is not None:
1106 if offset is not None:
1105 date = " ".join(string.split()[:-1])
1107 date = " ".join(string.split()[:-1])
1106
1108
1107 # add missing elements from defaults
1109 # add missing elements from defaults
1108 usenow = False # default to using biased defaults
1110 usenow = False # default to using biased defaults
1109 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1111 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1110 found = [True for p in part if ("%"+p) in format]
1112 found = [True for p in part if ("%"+p) in format]
1111 if not found:
1113 if not found:
1112 date += "@" + defaults[part][usenow]
1114 date += "@" + defaults[part][usenow]
1113 format += "@%" + part[0]
1115 format += "@%" + part[0]
1114 else:
1116 else:
1115 # We've found a specific time element, less specific time
1117 # We've found a specific time element, less specific time
1116 # elements are relative to today
1118 # elements are relative to today
1117 usenow = True
1119 usenow = True
1118
1120
1119 timetuple = time.strptime(date, format)
1121 timetuple = time.strptime(date, format)
1120 localunixtime = int(calendar.timegm(timetuple))
1122 localunixtime = int(calendar.timegm(timetuple))
1121 if offset is None:
1123 if offset is None:
1122 # local timezone
1124 # local timezone
1123 unixtime = int(time.mktime(timetuple))
1125 unixtime = int(time.mktime(timetuple))
1124 offset = unixtime - localunixtime
1126 offset = unixtime - localunixtime
1125 else:
1127 else:
1126 unixtime = localunixtime + offset
1128 unixtime = localunixtime + offset
1127 return unixtime, offset
1129 return unixtime, offset
1128
1130
1129 def parsedate(date, formats=None, bias={}):
1131 def parsedate(date, formats=None, bias={}):
1130 """parse a localized date/time and return a (unixtime, offset) tuple.
1132 """parse a localized date/time and return a (unixtime, offset) tuple.
1131
1133
1132 The date may be a "unixtime offset" string or in one of the specified
1134 The date may be a "unixtime offset" string or in one of the specified
1133 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1135 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1134 """
1136 """
1135 if not date:
1137 if not date:
1136 return 0, 0
1138 return 0, 0
1137 if isinstance(date, tuple) and len(date) == 2:
1139 if isinstance(date, tuple) and len(date) == 2:
1138 return date
1140 return date
1139 if not formats:
1141 if not formats:
1140 formats = defaultdateformats
1142 formats = defaultdateformats
1141 date = date.strip()
1143 date = date.strip()
1142 try:
1144 try:
1143 when, offset = map(int, date.split(' '))
1145 when, offset = map(int, date.split(' '))
1144 except ValueError:
1146 except ValueError:
1145 # fill out defaults
1147 # fill out defaults
1146 now = makedate()
1148 now = makedate()
1147 defaults = {}
1149 defaults = {}
1148 nowmap = {}
1150 nowmap = {}
1149 for part in ("d", "mb", "yY", "HI", "M", "S"):
1151 for part in ("d", "mb", "yY", "HI", "M", "S"):
1150 # this piece is for rounding the specific end of unknowns
1152 # this piece is for rounding the specific end of unknowns
1151 b = bias.get(part)
1153 b = bias.get(part)
1152 if b is None:
1154 if b is None:
1153 if part[0] in "HMS":
1155 if part[0] in "HMS":
1154 b = "00"
1156 b = "00"
1155 else:
1157 else:
1156 b = "0"
1158 b = "0"
1157
1159
1158 # this piece is for matching the generic end to today's date
1160 # this piece is for matching the generic end to today's date
1159 n = datestr(now, "%" + part[0])
1161 n = datestr(now, "%" + part[0])
1160
1162
1161 defaults[part] = (b, n)
1163 defaults[part] = (b, n)
1162
1164
1163 for format in formats:
1165 for format in formats:
1164 try:
1166 try:
1165 when, offset = strdate(date, format, defaults)
1167 when, offset = strdate(date, format, defaults)
1166 except (ValueError, OverflowError):
1168 except (ValueError, OverflowError):
1167 pass
1169 pass
1168 else:
1170 else:
1169 break
1171 break
1170 else:
1172 else:
1171 raise Abort(_('invalid date: %r') % date)
1173 raise Abort(_('invalid date: %r') % date)
1172 # validate explicit (probably user-specified) date and
1174 # validate explicit (probably user-specified) date and
1173 # time zone offset. values must fit in signed 32 bits for
1175 # time zone offset. values must fit in signed 32 bits for
1174 # current 32-bit linux runtimes. timezones go from UTC-12
1176 # current 32-bit linux runtimes. timezones go from UTC-12
1175 # to UTC+14
1177 # to UTC+14
1176 if abs(when) > 0x7fffffff:
1178 if abs(when) > 0x7fffffff:
1177 raise Abort(_('date exceeds 32 bits: %d') % when)
1179 raise Abort(_('date exceeds 32 bits: %d') % when)
1178 if when < 0:
1180 if when < 0:
1179 raise Abort(_('negative date value: %d') % when)
1181 raise Abort(_('negative date value: %d') % when)
1180 if offset < -50400 or offset > 43200:
1182 if offset < -50400 or offset > 43200:
1181 raise Abort(_('impossible time zone offset: %d') % offset)
1183 raise Abort(_('impossible time zone offset: %d') % offset)
1182 return when, offset
1184 return when, offset
1183
1185
1184 def matchdate(date):
1186 def matchdate(date):
1185 """Return a function that matches a given date match specifier
1187 """Return a function that matches a given date match specifier
1186
1188
1187 Formats include:
1189 Formats include:
1188
1190
1189 '{date}' match a given date to the accuracy provided
1191 '{date}' match a given date to the accuracy provided
1190
1192
1191 '<{date}' on or before a given date
1193 '<{date}' on or before a given date
1192
1194
1193 '>{date}' on or after a given date
1195 '>{date}' on or after a given date
1194
1196
1195 >>> p1 = parsedate("10:29:59")
1197 >>> p1 = parsedate("10:29:59")
1196 >>> p2 = parsedate("10:30:00")
1198 >>> p2 = parsedate("10:30:00")
1197 >>> p3 = parsedate("10:30:59")
1199 >>> p3 = parsedate("10:30:59")
1198 >>> p4 = parsedate("10:31:00")
1200 >>> p4 = parsedate("10:31:00")
1199 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1201 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1200 >>> f = matchdate("10:30")
1202 >>> f = matchdate("10:30")
1201 >>> f(p1[0])
1203 >>> f(p1[0])
1202 False
1204 False
1203 >>> f(p2[0])
1205 >>> f(p2[0])
1204 True
1206 True
1205 >>> f(p3[0])
1207 >>> f(p3[0])
1206 True
1208 True
1207 >>> f(p4[0])
1209 >>> f(p4[0])
1208 False
1210 False
1209 >>> f(p5[0])
1211 >>> f(p5[0])
1210 False
1212 False
1211 """
1213 """
1212
1214
1213 def lower(date):
1215 def lower(date):
1214 d = dict(mb="1", d="1")
1216 d = dict(mb="1", d="1")
1215 return parsedate(date, extendeddateformats, d)[0]
1217 return parsedate(date, extendeddateformats, d)[0]
1216
1218
1217 def upper(date):
1219 def upper(date):
1218 d = dict(mb="12", HI="23", M="59", S="59")
1220 d = dict(mb="12", HI="23", M="59", S="59")
1219 for days in ("31", "30", "29"):
1221 for days in ("31", "30", "29"):
1220 try:
1222 try:
1221 d["d"] = days
1223 d["d"] = days
1222 return parsedate(date, extendeddateformats, d)[0]
1224 return parsedate(date, extendeddateformats, d)[0]
1223 except:
1225 except:
1224 pass
1226 pass
1225 d["d"] = "28"
1227 d["d"] = "28"
1226 return parsedate(date, extendeddateformats, d)[0]
1228 return parsedate(date, extendeddateformats, d)[0]
1227
1229
1228 date = date.strip()
1230 date = date.strip()
1229
1231
1230 if not date:
1232 if not date:
1231 raise Abort(_("dates cannot consist entirely of whitespace"))
1233 raise Abort(_("dates cannot consist entirely of whitespace"))
1232 elif date[0] == "<":
1234 elif date[0] == "<":
1233 if not date[1:]:
1235 if not date[1:]:
1234 raise Abort(_("invalid day spec, use '<DATE'"))
1236 raise Abort(_("invalid day spec, use '<DATE'"))
1235 when = upper(date[1:])
1237 when = upper(date[1:])
1236 return lambda x: x <= when
1238 return lambda x: x <= when
1237 elif date[0] == ">":
1239 elif date[0] == ">":
1238 if not date[1:]:
1240 if not date[1:]:
1239 raise Abort(_("invalid day spec, use '>DATE'"))
1241 raise Abort(_("invalid day spec, use '>DATE'"))
1240 when = lower(date[1:])
1242 when = lower(date[1:])
1241 return lambda x: x >= when
1243 return lambda x: x >= when
1242 elif date[0] == "-":
1244 elif date[0] == "-":
1243 try:
1245 try:
1244 days = int(date[1:])
1246 days = int(date[1:])
1245 except ValueError:
1247 except ValueError:
1246 raise Abort(_("invalid day spec: %s") % date[1:])
1248 raise Abort(_("invalid day spec: %s") % date[1:])
1247 if days < 0:
1249 if days < 0:
1248 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1250 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1249 % date[1:])
1251 % date[1:])
1250 when = makedate()[0] - days * 3600 * 24
1252 when = makedate()[0] - days * 3600 * 24
1251 return lambda x: x >= when
1253 return lambda x: x >= when
1252 elif " to " in date:
1254 elif " to " in date:
1253 a, b = date.split(" to ")
1255 a, b = date.split(" to ")
1254 start, stop = lower(a), upper(b)
1256 start, stop = lower(a), upper(b)
1255 return lambda x: x >= start and x <= stop
1257 return lambda x: x >= start and x <= stop
1256 else:
1258 else:
1257 start, stop = lower(date), upper(date)
1259 start, stop = lower(date), upper(date)
1258 return lambda x: x >= start and x <= stop
1260 return lambda x: x >= start and x <= stop
1259
1261
1260 def shortuser(user):
1262 def shortuser(user):
1261 """Return a short representation of a user name or email address."""
1263 """Return a short representation of a user name or email address."""
1262 f = user.find('@')
1264 f = user.find('@')
1263 if f >= 0:
1265 if f >= 0:
1264 user = user[:f]
1266 user = user[:f]
1265 f = user.find('<')
1267 f = user.find('<')
1266 if f >= 0:
1268 if f >= 0:
1267 user = user[f + 1:]
1269 user = user[f + 1:]
1268 f = user.find(' ')
1270 f = user.find(' ')
1269 if f >= 0:
1271 if f >= 0:
1270 user = user[:f]
1272 user = user[:f]
1271 f = user.find('.')
1273 f = user.find('.')
1272 if f >= 0:
1274 if f >= 0:
1273 user = user[:f]
1275 user = user[:f]
1274 return user
1276 return user
1275
1277
1276 def email(author):
1278 def email(author):
1277 '''get email of author.'''
1279 '''get email of author.'''
1278 r = author.find('>')
1280 r = author.find('>')
1279 if r == -1:
1281 if r == -1:
1280 r = None
1282 r = None
1281 return author[author.find('<') + 1:r]
1283 return author[author.find('<') + 1:r]
1282
1284
1283 def _ellipsis(text, maxlength):
1285 def _ellipsis(text, maxlength):
1284 if len(text) <= maxlength:
1286 if len(text) <= maxlength:
1285 return text, False
1287 return text, False
1286 else:
1288 else:
1287 return "%s..." % (text[:maxlength - 3]), True
1289 return "%s..." % (text[:maxlength - 3]), True
1288
1290
1289 def ellipsis(text, maxlength=400):
1291 def ellipsis(text, maxlength=400):
1290 """Trim string to at most maxlength (default: 400) characters."""
1292 """Trim string to at most maxlength (default: 400) characters."""
1291 try:
1293 try:
1292 # use unicode not to split at intermediate multi-byte sequence
1294 # use unicode not to split at intermediate multi-byte sequence
1293 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1295 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1294 maxlength)
1296 maxlength)
1295 if not truncated:
1297 if not truncated:
1296 return text
1298 return text
1297 return utext.encode(encoding.encoding)
1299 return utext.encode(encoding.encoding)
1298 except (UnicodeDecodeError, UnicodeEncodeError):
1300 except (UnicodeDecodeError, UnicodeEncodeError):
1299 return _ellipsis(text, maxlength)[0]
1301 return _ellipsis(text, maxlength)[0]
1300
1302
1301 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1303 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1302 '''yield every hg repository under path, recursively.'''
1304 '''yield every hg repository under path, recursively.'''
1303 def errhandler(err):
1305 def errhandler(err):
1304 if err.filename == path:
1306 if err.filename == path:
1305 raise err
1307 raise err
1306 if followsym and hasattr(os.path, 'samestat'):
1308 if followsym and hasattr(os.path, 'samestat'):
1307 def _add_dir_if_not_there(dirlst, dirname):
1309 def _add_dir_if_not_there(dirlst, dirname):
1308 match = False
1310 match = False
1309 samestat = os.path.samestat
1311 samestat = os.path.samestat
1310 dirstat = os.stat(dirname)
1312 dirstat = os.stat(dirname)
1311 for lstdirstat in dirlst:
1313 for lstdirstat in dirlst:
1312 if samestat(dirstat, lstdirstat):
1314 if samestat(dirstat, lstdirstat):
1313 match = True
1315 match = True
1314 break
1316 break
1315 if not match:
1317 if not match:
1316 dirlst.append(dirstat)
1318 dirlst.append(dirstat)
1317 return not match
1319 return not match
1318 else:
1320 else:
1319 followsym = False
1321 followsym = False
1320
1322
1321 if (seen_dirs is None) and followsym:
1323 if (seen_dirs is None) and followsym:
1322 seen_dirs = []
1324 seen_dirs = []
1323 _add_dir_if_not_there(seen_dirs, path)
1325 _add_dir_if_not_there(seen_dirs, path)
1324 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1326 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1325 dirs.sort()
1327 dirs.sort()
1326 if '.hg' in dirs:
1328 if '.hg' in dirs:
1327 yield root # found a repository
1329 yield root # found a repository
1328 qroot = os.path.join(root, '.hg', 'patches')
1330 qroot = os.path.join(root, '.hg', 'patches')
1329 if os.path.isdir(os.path.join(qroot, '.hg')):
1331 if os.path.isdir(os.path.join(qroot, '.hg')):
1330 yield qroot # we have a patch queue repo here
1332 yield qroot # we have a patch queue repo here
1331 if recurse:
1333 if recurse:
1332 # avoid recursing inside the .hg directory
1334 # avoid recursing inside the .hg directory
1333 dirs.remove('.hg')
1335 dirs.remove('.hg')
1334 else:
1336 else:
1335 dirs[:] = [] # don't descend further
1337 dirs[:] = [] # don't descend further
1336 elif followsym:
1338 elif followsym:
1337 newdirs = []
1339 newdirs = []
1338 for d in dirs:
1340 for d in dirs:
1339 fname = os.path.join(root, d)
1341 fname = os.path.join(root, d)
1340 if _add_dir_if_not_there(seen_dirs, fname):
1342 if _add_dir_if_not_there(seen_dirs, fname):
1341 if os.path.islink(fname):
1343 if os.path.islink(fname):
1342 for hgname in walkrepos(fname, True, seen_dirs):
1344 for hgname in walkrepos(fname, True, seen_dirs):
1343 yield hgname
1345 yield hgname
1344 else:
1346 else:
1345 newdirs.append(d)
1347 newdirs.append(d)
1346 dirs[:] = newdirs
1348 dirs[:] = newdirs
1347
1349
1348 _rcpath = None
1350 _rcpath = None
1349
1351
1350 def os_rcpath():
1352 def os_rcpath():
1351 '''return default os-specific hgrc search path'''
1353 '''return default os-specific hgrc search path'''
1352 path = system_rcpath()
1354 path = system_rcpath()
1353 path.extend(user_rcpath())
1355 path.extend(user_rcpath())
1354 path = [os.path.normpath(f) for f in path]
1356 path = [os.path.normpath(f) for f in path]
1355 return path
1357 return path
1356
1358
1357 def rcpath():
1359 def rcpath():
1358 '''return hgrc search path. if env var HGRCPATH is set, use it.
1360 '''return hgrc search path. if env var HGRCPATH is set, use it.
1359 for each item in path, if directory, use files ending in .rc,
1361 for each item in path, if directory, use files ending in .rc,
1360 else use item.
1362 else use item.
1361 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1363 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1362 if no HGRCPATH, use default os-specific path.'''
1364 if no HGRCPATH, use default os-specific path.'''
1363 global _rcpath
1365 global _rcpath
1364 if _rcpath is None:
1366 if _rcpath is None:
1365 if 'HGRCPATH' in os.environ:
1367 if 'HGRCPATH' in os.environ:
1366 _rcpath = []
1368 _rcpath = []
1367 for p in os.environ['HGRCPATH'].split(os.pathsep):
1369 for p in os.environ['HGRCPATH'].split(os.pathsep):
1368 if not p:
1370 if not p:
1369 continue
1371 continue
1370 p = expandpath(p)
1372 p = expandpath(p)
1371 if os.path.isdir(p):
1373 if os.path.isdir(p):
1372 for f, kind in osutil.listdir(p):
1374 for f, kind in osutil.listdir(p):
1373 if f.endswith('.rc'):
1375 if f.endswith('.rc'):
1374 _rcpath.append(os.path.join(p, f))
1376 _rcpath.append(os.path.join(p, f))
1375 else:
1377 else:
1376 _rcpath.append(p)
1378 _rcpath.append(p)
1377 else:
1379 else:
1378 _rcpath = os_rcpath()
1380 _rcpath = os_rcpath()
1379 return _rcpath
1381 return _rcpath
1380
1382
1381 def bytecount(nbytes):
1383 def bytecount(nbytes):
1382 '''return byte count formatted as readable string, with units'''
1384 '''return byte count formatted as readable string, with units'''
1383
1385
1384 units = (
1386 units = (
1385 (100, 1 << 30, _('%.0f GB')),
1387 (100, 1 << 30, _('%.0f GB')),
1386 (10, 1 << 30, _('%.1f GB')),
1388 (10, 1 << 30, _('%.1f GB')),
1387 (1, 1 << 30, _('%.2f GB')),
1389 (1, 1 << 30, _('%.2f GB')),
1388 (100, 1 << 20, _('%.0f MB')),
1390 (100, 1 << 20, _('%.0f MB')),
1389 (10, 1 << 20, _('%.1f MB')),
1391 (10, 1 << 20, _('%.1f MB')),
1390 (1, 1 << 20, _('%.2f MB')),
1392 (1, 1 << 20, _('%.2f MB')),
1391 (100, 1 << 10, _('%.0f KB')),
1393 (100, 1 << 10, _('%.0f KB')),
1392 (10, 1 << 10, _('%.1f KB')),
1394 (10, 1 << 10, _('%.1f KB')),
1393 (1, 1 << 10, _('%.2f KB')),
1395 (1, 1 << 10, _('%.2f KB')),
1394 (1, 1, _('%.0f bytes')),
1396 (1, 1, _('%.0f bytes')),
1395 )
1397 )
1396
1398
1397 for multiplier, divisor, format in units:
1399 for multiplier, divisor, format in units:
1398 if nbytes >= divisor * multiplier:
1400 if nbytes >= divisor * multiplier:
1399 return format % (nbytes / float(divisor))
1401 return format % (nbytes / float(divisor))
1400 return units[-1][2] % nbytes
1402 return units[-1][2] % nbytes
1401
1403
1402 def uirepr(s):
1404 def uirepr(s):
1403 # Avoid double backslash in Windows path repr()
1405 # Avoid double backslash in Windows path repr()
1404 return repr(s).replace('\\\\', '\\')
1406 return repr(s).replace('\\\\', '\\')
1405
1407
1406 # delay import of textwrap
1408 # delay import of textwrap
1407 def MBTextWrapper(**kwargs):
1409 def MBTextWrapper(**kwargs):
1408 class tw(textwrap.TextWrapper):
1410 class tw(textwrap.TextWrapper):
1409 """
1411 """
1410 Extend TextWrapper for double-width characters.
1412 Extend TextWrapper for double-width characters.
1411
1413
1412 Some Asian characters use two terminal columns instead of one.
1414 Some Asian characters use two terminal columns instead of one.
1413 A good example of this behavior can be seen with u'\u65e5\u672c',
1415 A good example of this behavior can be seen with u'\u65e5\u672c',
1414 the two Japanese characters for "Japan":
1416 the two Japanese characters for "Japan":
1415 len() returns 2, but when printed to a terminal, they eat 4 columns.
1417 len() returns 2, but when printed to a terminal, they eat 4 columns.
1416
1418
1417 (Note that this has nothing to do whatsoever with unicode
1419 (Note that this has nothing to do whatsoever with unicode
1418 representation, or encoding of the underlying string)
1420 representation, or encoding of the underlying string)
1419 """
1421 """
1420 def __init__(self, **kwargs):
1422 def __init__(self, **kwargs):
1421 textwrap.TextWrapper.__init__(self, **kwargs)
1423 textwrap.TextWrapper.__init__(self, **kwargs)
1422
1424
1423 def _cutdown(self, str, space_left):
1425 def _cutdown(self, str, space_left):
1424 l = 0
1426 l = 0
1425 ucstr = unicode(str, encoding.encoding)
1427 ucstr = unicode(str, encoding.encoding)
1426 colwidth = unicodedata.east_asian_width
1428 colwidth = unicodedata.east_asian_width
1427 for i in xrange(len(ucstr)):
1429 for i in xrange(len(ucstr)):
1428 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1430 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1429 if space_left < l:
1431 if space_left < l:
1430 return (ucstr[:i].encode(encoding.encoding),
1432 return (ucstr[:i].encode(encoding.encoding),
1431 ucstr[i:].encode(encoding.encoding))
1433 ucstr[i:].encode(encoding.encoding))
1432 return str, ''
1434 return str, ''
1433
1435
1434 # overriding of base class
1436 # overriding of base class
1435 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1437 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1436 space_left = max(width - cur_len, 1)
1438 space_left = max(width - cur_len, 1)
1437
1439
1438 if self.break_long_words:
1440 if self.break_long_words:
1439 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1441 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1440 cur_line.append(cut)
1442 cur_line.append(cut)
1441 reversed_chunks[-1] = res
1443 reversed_chunks[-1] = res
1442 elif not cur_line:
1444 elif not cur_line:
1443 cur_line.append(reversed_chunks.pop())
1445 cur_line.append(reversed_chunks.pop())
1444
1446
1445 global MBTextWrapper
1447 global MBTextWrapper
1446 MBTextWrapper = tw
1448 MBTextWrapper = tw
1447 return tw(**kwargs)
1449 return tw(**kwargs)
1448
1450
1449 def wrap(line, width, initindent='', hangindent=''):
1451 def wrap(line, width, initindent='', hangindent=''):
1450 maxindent = max(len(hangindent), len(initindent))
1452 maxindent = max(len(hangindent), len(initindent))
1451 if width <= maxindent:
1453 if width <= maxindent:
1452 # adjust for weird terminal size
1454 # adjust for weird terminal size
1453 width = max(78, maxindent + 1)
1455 width = max(78, maxindent + 1)
1454 wrapper = MBTextWrapper(width=width,
1456 wrapper = MBTextWrapper(width=width,
1455 initial_indent=initindent,
1457 initial_indent=initindent,
1456 subsequent_indent=hangindent)
1458 subsequent_indent=hangindent)
1457 return wrapper.fill(line)
1459 return wrapper.fill(line)
1458
1460
1459 def iterlines(iterator):
1461 def iterlines(iterator):
1460 for chunk in iterator:
1462 for chunk in iterator:
1461 for line in chunk.splitlines():
1463 for line in chunk.splitlines():
1462 yield line
1464 yield line
1463
1465
1464 def expandpath(path):
1466 def expandpath(path):
1465 return os.path.expanduser(os.path.expandvars(path))
1467 return os.path.expanduser(os.path.expandvars(path))
1466
1468
1467 def hgcmd():
1469 def hgcmd():
1468 """Return the command used to execute current hg
1470 """Return the command used to execute current hg
1469
1471
1470 This is different from hgexecutable() because on Windows we want
1472 This is different from hgexecutable() because on Windows we want
1471 to avoid things opening new shell windows like batch files, so we
1473 to avoid things opening new shell windows like batch files, so we
1472 get either the python call or current executable.
1474 get either the python call or current executable.
1473 """
1475 """
1474 if main_is_frozen():
1476 if main_is_frozen():
1475 return [sys.executable]
1477 return [sys.executable]
1476 return gethgcmd()
1478 return gethgcmd()
1477
1479
1478 def rundetached(args, condfn):
1480 def rundetached(args, condfn):
1479 """Execute the argument list in a detached process.
1481 """Execute the argument list in a detached process.
1480
1482
1481 condfn is a callable which is called repeatedly and should return
1483 condfn is a callable which is called repeatedly and should return
1482 True once the child process is known to have started successfully.
1484 True once the child process is known to have started successfully.
1483 At this point, the child process PID is returned. If the child
1485 At this point, the child process PID is returned. If the child
1484 process fails to start or finishes before condfn() evaluates to
1486 process fails to start or finishes before condfn() evaluates to
1485 True, return -1.
1487 True, return -1.
1486 """
1488 """
1487 # Windows case is easier because the child process is either
1489 # Windows case is easier because the child process is either
1488 # successfully starting and validating the condition or exiting
1490 # successfully starting and validating the condition or exiting
1489 # on failure. We just poll on its PID. On Unix, if the child
1491 # on failure. We just poll on its PID. On Unix, if the child
1490 # process fails to start, it will be left in a zombie state until
1492 # process fails to start, it will be left in a zombie state until
1491 # the parent wait on it, which we cannot do since we expect a long
1493 # the parent wait on it, which we cannot do since we expect a long
1492 # running process on success. Instead we listen for SIGCHLD telling
1494 # running process on success. Instead we listen for SIGCHLD telling
1493 # us our child process terminated.
1495 # us our child process terminated.
1494 terminated = set()
1496 terminated = set()
1495 def handler(signum, frame):
1497 def handler(signum, frame):
1496 terminated.add(os.wait())
1498 terminated.add(os.wait())
1497 prevhandler = None
1499 prevhandler = None
1498 if hasattr(signal, 'SIGCHLD'):
1500 if hasattr(signal, 'SIGCHLD'):
1499 prevhandler = signal.signal(signal.SIGCHLD, handler)
1501 prevhandler = signal.signal(signal.SIGCHLD, handler)
1500 try:
1502 try:
1501 pid = spawndetached(args)
1503 pid = spawndetached(args)
1502 while not condfn():
1504 while not condfn():
1503 if ((pid in terminated or not testpid(pid))
1505 if ((pid in terminated or not testpid(pid))
1504 and not condfn()):
1506 and not condfn()):
1505 return -1
1507 return -1
1506 time.sleep(0.1)
1508 time.sleep(0.1)
1507 return pid
1509 return pid
1508 finally:
1510 finally:
1509 if prevhandler is not None:
1511 if prevhandler is not None:
1510 signal.signal(signal.SIGCHLD, prevhandler)
1512 signal.signal(signal.SIGCHLD, prevhandler)
1511
1513
1512 try:
1514 try:
1513 any, all = any, all
1515 any, all = any, all
1514 except NameError:
1516 except NameError:
1515 def any(iterable):
1517 def any(iterable):
1516 for i in iterable:
1518 for i in iterable:
1517 if i:
1519 if i:
1518 return True
1520 return True
1519 return False
1521 return False
1520
1522
1521 def all(iterable):
1523 def all(iterable):
1522 for i in iterable:
1524 for i in iterable:
1523 if not i:
1525 if not i:
1524 return False
1526 return False
1525 return True
1527 return True
1526
1528
1527 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1529 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1528 """Return the result of interpolating items in the mapping into string s.
1530 """Return the result of interpolating items in the mapping into string s.
1529
1531
1530 prefix is a single character string, or a two character string with
1532 prefix is a single character string, or a two character string with
1531 a backslash as the first character if the prefix needs to be escaped in
1533 a backslash as the first character if the prefix needs to be escaped in
1532 a regular expression.
1534 a regular expression.
1533
1535
1534 fn is an optional function that will be applied to the replacement text
1536 fn is an optional function that will be applied to the replacement text
1535 just before replacement.
1537 just before replacement.
1536
1538
1537 escape_prefix is an optional flag that allows using doubled prefix for
1539 escape_prefix is an optional flag that allows using doubled prefix for
1538 its escaping.
1540 its escaping.
1539 """
1541 """
1540 fn = fn or (lambda s: s)
1542 fn = fn or (lambda s: s)
1541 patterns = '|'.join(mapping.keys())
1543 patterns = '|'.join(mapping.keys())
1542 if escape_prefix:
1544 if escape_prefix:
1543 patterns += '|' + prefix
1545 patterns += '|' + prefix
1544 if len(prefix) > 1:
1546 if len(prefix) > 1:
1545 prefix_char = prefix[1:]
1547 prefix_char = prefix[1:]
1546 else:
1548 else:
1547 prefix_char = prefix
1549 prefix_char = prefix
1548 mapping[prefix_char] = prefix_char
1550 mapping[prefix_char] = prefix_char
1549 r = re.compile(r'%s(%s)' % (prefix, patterns))
1551 r = re.compile(r'%s(%s)' % (prefix, patterns))
1550 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1552 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1551
1553
1552 def getport(port):
1554 def getport(port):
1553 """Return the port for a given network service.
1555 """Return the port for a given network service.
1554
1556
1555 If port is an integer, it's returned as is. If it's a string, it's
1557 If port is an integer, it's returned as is. If it's a string, it's
1556 looked up using socket.getservbyname(). If there's no matching
1558 looked up using socket.getservbyname(). If there's no matching
1557 service, util.Abort is raised.
1559 service, util.Abort is raised.
1558 """
1560 """
1559 try:
1561 try:
1560 return int(port)
1562 return int(port)
1561 except ValueError:
1563 except ValueError:
1562 pass
1564 pass
1563
1565
1564 try:
1566 try:
1565 return socket.getservbyname(port)
1567 return socket.getservbyname(port)
1566 except socket.error:
1568 except socket.error:
1567 raise Abort(_("no port number associated with service '%s'") % port)
1569 raise Abort(_("no port number associated with service '%s'") % port)
1568
1570
1569 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1571 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1570 '0': False, 'no': False, 'false': False, 'off': False,
1572 '0': False, 'no': False, 'false': False, 'off': False,
1571 'never': False}
1573 'never': False}
1572
1574
1573 def parsebool(s):
1575 def parsebool(s):
1574 """Parse s into a boolean.
1576 """Parse s into a boolean.
1575
1577
1576 If s is not a valid boolean, returns None.
1578 If s is not a valid boolean, returns None.
1577 """
1579 """
1578 return _booleans.get(s.lower(), None)
1580 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now