##// END OF EJS Templates
util: move checkosfilename call from path_auditor to opener...
Adrian Buehlmann -
r13926:61ba09d8 default
parent child Browse files
Show More
@@ -1,1578 +1,1578
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except:
209 except:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except:
214 except:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 def canonpath(root, cwd, myname, auditor=None):
298 def canonpath(root, cwd, myname, auditor=None):
299 """return the canonical path of myname, given cwd and root"""
299 """return the canonical path of myname, given cwd and root"""
300 if endswithsep(root):
300 if endswithsep(root):
301 rootsep = root
301 rootsep = root
302 else:
302 else:
303 rootsep = root + os.sep
303 rootsep = root + os.sep
304 name = myname
304 name = myname
305 if not os.path.isabs(name):
305 if not os.path.isabs(name):
306 name = os.path.join(root, cwd, name)
306 name = os.path.join(root, cwd, name)
307 name = os.path.normpath(name)
307 name = os.path.normpath(name)
308 if auditor is None:
308 if auditor is None:
309 auditor = path_auditor(root)
309 auditor = path_auditor(root)
310 if name != rootsep and name.startswith(rootsep):
310 if name != rootsep and name.startswith(rootsep):
311 name = name[len(rootsep):]
311 name = name[len(rootsep):]
312 auditor(name)
312 auditor(name)
313 return pconvert(name)
313 return pconvert(name)
314 elif name == root:
314 elif name == root:
315 return ''
315 return ''
316 else:
316 else:
317 # Determine whether `name' is in the hierarchy at or beneath `root',
317 # Determine whether `name' is in the hierarchy at or beneath `root',
318 # by iterating name=dirname(name) until that causes no change (can't
318 # by iterating name=dirname(name) until that causes no change (can't
319 # check name == '/', because that doesn't work on windows). For each
319 # check name == '/', because that doesn't work on windows). For each
320 # `name', compare dev/inode numbers. If they match, the list `rel'
320 # `name', compare dev/inode numbers. If they match, the list `rel'
321 # holds the reversed list of components making up the relative file
321 # holds the reversed list of components making up the relative file
322 # name we want.
322 # name we want.
323 root_st = os.stat(root)
323 root_st = os.stat(root)
324 rel = []
324 rel = []
325 while True:
325 while True:
326 try:
326 try:
327 name_st = os.stat(name)
327 name_st = os.stat(name)
328 except OSError:
328 except OSError:
329 break
329 break
330 if samestat(name_st, root_st):
330 if samestat(name_st, root_st):
331 if not rel:
331 if not rel:
332 # name was actually the same as root (maybe a symlink)
332 # name was actually the same as root (maybe a symlink)
333 return ''
333 return ''
334 rel.reverse()
334 rel.reverse()
335 name = os.path.join(*rel)
335 name = os.path.join(*rel)
336 auditor(name)
336 auditor(name)
337 return pconvert(name)
337 return pconvert(name)
338 dirname, basename = os.path.split(name)
338 dirname, basename = os.path.split(name)
339 rel.append(basename)
339 rel.append(basename)
340 if dirname == name:
340 if dirname == name:
341 break
341 break
342 name = dirname
342 name = dirname
343
343
344 raise Abort('%s not under root' % myname)
344 raise Abort('%s not under root' % myname)
345
345
346 _hgexecutable = None
346 _hgexecutable = None
347
347
348 def main_is_frozen():
348 def main_is_frozen():
349 """return True if we are a frozen executable.
349 """return True if we are a frozen executable.
350
350
351 The code supports py2exe (most common, Windows only) and tools/freeze
351 The code supports py2exe (most common, Windows only) and tools/freeze
352 (portable, not much used).
352 (portable, not much used).
353 """
353 """
354 return (hasattr(sys, "frozen") or # new py2exe
354 return (hasattr(sys, "frozen") or # new py2exe
355 hasattr(sys, "importers") or # old py2exe
355 hasattr(sys, "importers") or # old py2exe
356 imp.is_frozen("__main__")) # tools/freeze
356 imp.is_frozen("__main__")) # tools/freeze
357
357
358 def hgexecutable():
358 def hgexecutable():
359 """return location of the 'hg' executable.
359 """return location of the 'hg' executable.
360
360
361 Defaults to $HG or 'hg' in the search path.
361 Defaults to $HG or 'hg' in the search path.
362 """
362 """
363 if _hgexecutable is None:
363 if _hgexecutable is None:
364 hg = os.environ.get('HG')
364 hg = os.environ.get('HG')
365 if hg:
365 if hg:
366 set_hgexecutable(hg)
366 set_hgexecutable(hg)
367 elif main_is_frozen():
367 elif main_is_frozen():
368 set_hgexecutable(sys.executable)
368 set_hgexecutable(sys.executable)
369 else:
369 else:
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
370 exe = find_exe('hg') or os.path.basename(sys.argv[0])
371 set_hgexecutable(exe)
371 set_hgexecutable(exe)
372 return _hgexecutable
372 return _hgexecutable
373
373
374 def set_hgexecutable(path):
374 def set_hgexecutable(path):
375 """set location of the 'hg' executable"""
375 """set location of the 'hg' executable"""
376 global _hgexecutable
376 global _hgexecutable
377 _hgexecutable = path
377 _hgexecutable = path
378
378
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
379 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
380 '''enhanced shell command execution.
380 '''enhanced shell command execution.
381 run with environment maybe modified, maybe in different dir.
381 run with environment maybe modified, maybe in different dir.
382
382
383 if command fails and onerr is None, return status. if ui object,
383 if command fails and onerr is None, return status. if ui object,
384 print error message and return status, else raise onerr object as
384 print error message and return status, else raise onerr object as
385 exception.
385 exception.
386
386
387 if out is specified, it is assumed to be a file-like object that has a
387 if out is specified, it is assumed to be a file-like object that has a
388 write() method. stdout and stderr will be redirected to out.'''
388 write() method. stdout and stderr will be redirected to out.'''
389 try:
389 try:
390 sys.stdout.flush()
390 sys.stdout.flush()
391 except Exception:
391 except Exception:
392 pass
392 pass
393 def py2shell(val):
393 def py2shell(val):
394 'convert python object into string that is useful to shell'
394 'convert python object into string that is useful to shell'
395 if val is None or val is False:
395 if val is None or val is False:
396 return '0'
396 return '0'
397 if val is True:
397 if val is True:
398 return '1'
398 return '1'
399 return str(val)
399 return str(val)
400 origcmd = cmd
400 origcmd = cmd
401 cmd = quotecommand(cmd)
401 cmd = quotecommand(cmd)
402 env = dict(os.environ)
402 env = dict(os.environ)
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
403 env.update((k, py2shell(v)) for k, v in environ.iteritems())
404 env['HG'] = hgexecutable()
404 env['HG'] = hgexecutable()
405 if out is None:
405 if out is None:
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
406 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
407 env=env, cwd=cwd)
407 env=env, cwd=cwd)
408 else:
408 else:
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
409 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
410 env=env, cwd=cwd, stdout=subprocess.PIPE,
411 stderr=subprocess.STDOUT)
411 stderr=subprocess.STDOUT)
412 for line in proc.stdout:
412 for line in proc.stdout:
413 out.write(line)
413 out.write(line)
414 proc.wait()
414 proc.wait()
415 rc = proc.returncode
415 rc = proc.returncode
416 if sys.platform == 'OpenVMS' and rc & 1:
416 if sys.platform == 'OpenVMS' and rc & 1:
417 rc = 0
417 rc = 0
418 if rc and onerr:
418 if rc and onerr:
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
419 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
420 explain_exit(rc)[0])
420 explain_exit(rc)[0])
421 if errprefix:
421 if errprefix:
422 errmsg = '%s: %s' % (errprefix, errmsg)
422 errmsg = '%s: %s' % (errprefix, errmsg)
423 try:
423 try:
424 onerr.warn(errmsg + '\n')
424 onerr.warn(errmsg + '\n')
425 except AttributeError:
425 except AttributeError:
426 raise onerr(errmsg)
426 raise onerr(errmsg)
427 return rc
427 return rc
428
428
429 def checksignature(func):
429 def checksignature(func):
430 '''wrap a function with code to check for calling errors'''
430 '''wrap a function with code to check for calling errors'''
431 def check(*args, **kwargs):
431 def check(*args, **kwargs):
432 try:
432 try:
433 return func(*args, **kwargs)
433 return func(*args, **kwargs)
434 except TypeError:
434 except TypeError:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
435 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
436 raise error.SignatureError
436 raise error.SignatureError
437 raise
437 raise
438
438
439 return check
439 return check
440
440
441 def makedir(path, notindexed):
441 def makedir(path, notindexed):
442 os.mkdir(path)
442 os.mkdir(path)
443
443
444 def unlinkpath(f):
444 def unlinkpath(f):
445 """unlink and remove the directory if it is empty"""
445 """unlink and remove the directory if it is empty"""
446 os.unlink(f)
446 os.unlink(f)
447 # try removing directories that might now be empty
447 # try removing directories that might now be empty
448 try:
448 try:
449 os.removedirs(os.path.dirname(f))
449 os.removedirs(os.path.dirname(f))
450 except OSError:
450 except OSError:
451 pass
451 pass
452
452
453 def copyfile(src, dest):
453 def copyfile(src, dest):
454 "copy a file, preserving mode and atime/mtime"
454 "copy a file, preserving mode and atime/mtime"
455 if os.path.islink(src):
455 if os.path.islink(src):
456 try:
456 try:
457 os.unlink(dest)
457 os.unlink(dest)
458 except:
458 except:
459 pass
459 pass
460 os.symlink(os.readlink(src), dest)
460 os.symlink(os.readlink(src), dest)
461 else:
461 else:
462 try:
462 try:
463 shutil.copyfile(src, dest)
463 shutil.copyfile(src, dest)
464 shutil.copymode(src, dest)
464 shutil.copymode(src, dest)
465 except shutil.Error, inst:
465 except shutil.Error, inst:
466 raise Abort(str(inst))
466 raise Abort(str(inst))
467
467
468 def copyfiles(src, dst, hardlink=None):
468 def copyfiles(src, dst, hardlink=None):
469 """Copy a directory tree using hardlinks if possible"""
469 """Copy a directory tree using hardlinks if possible"""
470
470
471 if hardlink is None:
471 if hardlink is None:
472 hardlink = (os.stat(src).st_dev ==
472 hardlink = (os.stat(src).st_dev ==
473 os.stat(os.path.dirname(dst)).st_dev)
473 os.stat(os.path.dirname(dst)).st_dev)
474
474
475 num = 0
475 num = 0
476 if os.path.isdir(src):
476 if os.path.isdir(src):
477 os.mkdir(dst)
477 os.mkdir(dst)
478 for name, kind in osutil.listdir(src):
478 for name, kind in osutil.listdir(src):
479 srcname = os.path.join(src, name)
479 srcname = os.path.join(src, name)
480 dstname = os.path.join(dst, name)
480 dstname = os.path.join(dst, name)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
481 hardlink, n = copyfiles(srcname, dstname, hardlink)
482 num += n
482 num += n
483 else:
483 else:
484 if hardlink:
484 if hardlink:
485 try:
485 try:
486 os_link(src, dst)
486 os_link(src, dst)
487 except (IOError, OSError):
487 except (IOError, OSError):
488 hardlink = False
488 hardlink = False
489 shutil.copy(src, dst)
489 shutil.copy(src, dst)
490 else:
490 else:
491 shutil.copy(src, dst)
491 shutil.copy(src, dst)
492 num += 1
492 num += 1
493
493
494 return hardlink, num
494 return hardlink, num
495
495
496 _windows_reserved_filenames = '''con prn aux nul
496 _windows_reserved_filenames = '''con prn aux nul
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
497 com1 com2 com3 com4 com5 com6 com7 com8 com9
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
498 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
499 _windows_reserved_chars = ':*?"<>|'
499 _windows_reserved_chars = ':*?"<>|'
500 def checkwinfilename(path):
500 def checkwinfilename(path):
501 '''Check that the base-relative path is a valid filename on Windows.
501 '''Check that the base-relative path is a valid filename on Windows.
502 Returns None if the path is ok, or a UI string describing the problem.
502 Returns None if the path is ok, or a UI string describing the problem.
503
503
504 >>> checkwinfilename("just/a/normal/path")
504 >>> checkwinfilename("just/a/normal/path")
505 >>> checkwinfilename("foo/bar/con.xml")
505 >>> checkwinfilename("foo/bar/con.xml")
506 "filename contains 'con', which is reserved on Windows"
506 "filename contains 'con', which is reserved on Windows"
507 >>> checkwinfilename("foo/con.xml/bar")
507 >>> checkwinfilename("foo/con.xml/bar")
508 "filename contains 'con', which is reserved on Windows"
508 "filename contains 'con', which is reserved on Windows"
509 >>> checkwinfilename("foo/bar/xml.con")
509 >>> checkwinfilename("foo/bar/xml.con")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
510 >>> checkwinfilename("foo/bar/AUX/bla.txt")
511 "filename contains 'AUX', which is reserved on Windows"
511 "filename contains 'AUX', which is reserved on Windows"
512 >>> checkwinfilename("foo/bar/bla:.txt")
512 >>> checkwinfilename("foo/bar/bla:.txt")
513 "filename contains ':', which is reserved on Windows"
513 "filename contains ':', which is reserved on Windows"
514 >>> checkwinfilename("foo/bar/b\07la.txt")
514 >>> checkwinfilename("foo/bar/b\07la.txt")
515 "filename contains '\\x07', which is invalid on Windows"
515 "filename contains '\\x07', which is invalid on Windows"
516 >>> checkwinfilename("foo/bar/bla ")
516 >>> checkwinfilename("foo/bar/bla ")
517 "filename ends with ' ', which is not allowed on Windows"
517 "filename ends with ' ', which is not allowed on Windows"
518 '''
518 '''
519 for n in path.replace('\\', '/').split('/'):
519 for n in path.replace('\\', '/').split('/'):
520 if not n:
520 if not n:
521 continue
521 continue
522 for c in n:
522 for c in n:
523 if c in _windows_reserved_chars:
523 if c in _windows_reserved_chars:
524 return _("filename contains '%s', which is reserved "
524 return _("filename contains '%s', which is reserved "
525 "on Windows") % c
525 "on Windows") % c
526 if ord(c) <= 31:
526 if ord(c) <= 31:
527 return _("filename contains '%s', which is invalid "
527 return _("filename contains '%s', which is invalid "
528 "on Windows") % c
528 "on Windows") % c
529 base = n.split('.')[0]
529 base = n.split('.')[0]
530 if base and base.lower() in _windows_reserved_filenames:
530 if base and base.lower() in _windows_reserved_filenames:
531 return _("filename contains '%s', which is reserved "
531 return _("filename contains '%s', which is reserved "
532 "on Windows") % base
532 "on Windows") % base
533 t = n[-1]
533 t = n[-1]
534 if t in '. ':
534 if t in '. ':
535 return _("filename ends with '%s', which is not allowed "
535 return _("filename ends with '%s', which is not allowed "
536 "on Windows") % t
536 "on Windows") % t
537
537
538 class path_auditor(object):
538 class path_auditor(object):
539 '''ensure that a filesystem path contains no banned components.
539 '''ensure that a filesystem path contains no banned components.
540 the following properties of a path are checked:
540 the following properties of a path are checked:
541
541
542 - ends with a directory separator
542 - ends with a directory separator
543 - under top-level .hg
543 - under top-level .hg
544 - starts at the root of a windows drive
544 - starts at the root of a windows drive
545 - contains ".."
545 - contains ".."
546 - traverses a symlink (e.g. a/symlink_here/b)
546 - traverses a symlink (e.g. a/symlink_here/b)
547 - inside a nested repository (a callback can be used to approve
547 - inside a nested repository (a callback can be used to approve
548 some nested repositories, e.g., subrepositories)
548 some nested repositories, e.g., subrepositories)
549 '''
549 '''
550
550
551 def __init__(self, root, callback=None):
551 def __init__(self, root, callback=None):
552 self.audited = set()
552 self.audited = set()
553 self.auditeddir = set()
553 self.auditeddir = set()
554 self.root = root
554 self.root = root
555 self.callback = callback
555 self.callback = callback
556
556
557 def __call__(self, path):
557 def __call__(self, path):
558 if path in self.audited:
558 if path in self.audited:
559 return
559 return
560 # AIX ignores "/" at end of path, others raise EISDIR.
560 # AIX ignores "/" at end of path, others raise EISDIR.
561 if endswithsep(path):
561 if endswithsep(path):
562 raise Abort(_("path ends in directory separator: %s") % path)
562 raise Abort(_("path ends in directory separator: %s") % path)
563 normpath = os.path.normcase(path)
563 normpath = os.path.normcase(path)
564 parts = splitpath(normpath)
564 parts = splitpath(normpath)
565 if (os.path.splitdrive(path)[0]
565 if (os.path.splitdrive(path)[0]
566 or parts[0].lower() in ('.hg', '.hg.', '')
566 or parts[0].lower() in ('.hg', '.hg.', '')
567 or os.pardir in parts):
567 or os.pardir in parts):
568 raise Abort(_("path contains illegal component: %s") % path)
568 raise Abort(_("path contains illegal component: %s") % path)
569 if '.hg' in path.lower():
569 if '.hg' in path.lower():
570 lparts = [p.lower() for p in parts]
570 lparts = [p.lower() for p in parts]
571 for p in '.hg', '.hg.':
571 for p in '.hg', '.hg.':
572 if p in lparts[1:]:
572 if p in lparts[1:]:
573 pos = lparts.index(p)
573 pos = lparts.index(p)
574 base = os.path.join(*parts[:pos])
574 base = os.path.join(*parts[:pos])
575 raise Abort(_('path %r is inside nested repo %r')
575 raise Abort(_('path %r is inside nested repo %r')
576 % (path, base))
576 % (path, base))
577 def check(prefix):
577 def check(prefix):
578 curpath = os.path.join(self.root, prefix)
578 curpath = os.path.join(self.root, prefix)
579 try:
579 try:
580 st = os.lstat(curpath)
580 st = os.lstat(curpath)
581 except OSError, err:
581 except OSError, err:
582 # EINVAL can be raised as invalid path syntax under win32.
582 # EINVAL can be raised as invalid path syntax under win32.
583 # They must be ignored for patterns can be checked too.
583 # They must be ignored for patterns can be checked too.
584 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
584 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
585 raise
585 raise
586 else:
586 else:
587 if stat.S_ISLNK(st.st_mode):
587 if stat.S_ISLNK(st.st_mode):
588 raise Abort(_('path %r traverses symbolic link %r') %
588 raise Abort(_('path %r traverses symbolic link %r') %
589 (path, prefix))
589 (path, prefix))
590 elif (stat.S_ISDIR(st.st_mode) and
590 elif (stat.S_ISDIR(st.st_mode) and
591 os.path.isdir(os.path.join(curpath, '.hg'))):
591 os.path.isdir(os.path.join(curpath, '.hg'))):
592 if not self.callback or not self.callback(curpath):
592 if not self.callback or not self.callback(curpath):
593 raise Abort(_('path %r is inside nested repo %r') %
593 raise Abort(_('path %r is inside nested repo %r') %
594 (path, prefix))
594 (path, prefix))
595 parts.pop()
595 parts.pop()
596 prefixes = []
596 prefixes = []
597 while parts:
597 while parts:
598 prefix = os.sep.join(parts)
598 prefix = os.sep.join(parts)
599 if prefix in self.auditeddir:
599 if prefix in self.auditeddir:
600 break
600 break
601 check(prefix)
601 check(prefix)
602 prefixes.append(prefix)
602 prefixes.append(prefix)
603 parts.pop()
603 parts.pop()
604
604
605 r = checkosfilename(path)
606 if r:
607 raise Abort("%s: %s" % (r, path))
608 self.audited.add(path)
605 self.audited.add(path)
609 # only add prefixes to the cache after checking everything: we don't
606 # only add prefixes to the cache after checking everything: we don't
610 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
607 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
611 self.auditeddir.update(prefixes)
608 self.auditeddir.update(prefixes)
612
609
613 def lookup_reg(key, name=None, scope=None):
610 def lookup_reg(key, name=None, scope=None):
614 return None
611 return None
615
612
616 def hidewindow():
613 def hidewindow():
617 """Hide current shell window.
614 """Hide current shell window.
618
615
619 Used to hide the window opened when starting asynchronous
616 Used to hide the window opened when starting asynchronous
620 child process under Windows, unneeded on other systems.
617 child process under Windows, unneeded on other systems.
621 """
618 """
622 pass
619 pass
623
620
624 if os.name == 'nt':
621 if os.name == 'nt':
625 checkosfilename = checkwinfilename
622 checkosfilename = checkwinfilename
626 from windows import *
623 from windows import *
627 else:
624 else:
628 from posix import *
625 from posix import *
629
626
630 def makelock(info, pathname):
627 def makelock(info, pathname):
631 try:
628 try:
632 return os.symlink(info, pathname)
629 return os.symlink(info, pathname)
633 except OSError, why:
630 except OSError, why:
634 if why.errno == errno.EEXIST:
631 if why.errno == errno.EEXIST:
635 raise
632 raise
636 except AttributeError: # no symlink in os
633 except AttributeError: # no symlink in os
637 pass
634 pass
638
635
639 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
636 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
640 os.write(ld, info)
637 os.write(ld, info)
641 os.close(ld)
638 os.close(ld)
642
639
643 def readlock(pathname):
640 def readlock(pathname):
644 try:
641 try:
645 return os.readlink(pathname)
642 return os.readlink(pathname)
646 except OSError, why:
643 except OSError, why:
647 if why.errno not in (errno.EINVAL, errno.ENOSYS):
644 if why.errno not in (errno.EINVAL, errno.ENOSYS):
648 raise
645 raise
649 except AttributeError: # no symlink in os
646 except AttributeError: # no symlink in os
650 pass
647 pass
651 fp = posixfile(pathname)
648 fp = posixfile(pathname)
652 r = fp.read()
649 r = fp.read()
653 fp.close()
650 fp.close()
654 return r
651 return r
655
652
656 def fstat(fp):
653 def fstat(fp):
657 '''stat file object that may not have fileno method.'''
654 '''stat file object that may not have fileno method.'''
658 try:
655 try:
659 return os.fstat(fp.fileno())
656 return os.fstat(fp.fileno())
660 except AttributeError:
657 except AttributeError:
661 return os.stat(fp.name)
658 return os.stat(fp.name)
662
659
663 # File system features
660 # File system features
664
661
665 def checkcase(path):
662 def checkcase(path):
666 """
663 """
667 Check whether the given path is on a case-sensitive filesystem
664 Check whether the given path is on a case-sensitive filesystem
668
665
669 Requires a path (like /foo/.hg) ending with a foldable final
666 Requires a path (like /foo/.hg) ending with a foldable final
670 directory component.
667 directory component.
671 """
668 """
672 s1 = os.stat(path)
669 s1 = os.stat(path)
673 d, b = os.path.split(path)
670 d, b = os.path.split(path)
674 p2 = os.path.join(d, b.upper())
671 p2 = os.path.join(d, b.upper())
675 if path == p2:
672 if path == p2:
676 p2 = os.path.join(d, b.lower())
673 p2 = os.path.join(d, b.lower())
677 try:
674 try:
678 s2 = os.stat(p2)
675 s2 = os.stat(p2)
679 if s2 == s1:
676 if s2 == s1:
680 return False
677 return False
681 return True
678 return True
682 except:
679 except:
683 return True
680 return True
684
681
685 _fspathcache = {}
682 _fspathcache = {}
686 def fspath(name, root):
683 def fspath(name, root):
687 '''Get name in the case stored in the filesystem
684 '''Get name in the case stored in the filesystem
688
685
689 The name is either relative to root, or it is an absolute path starting
686 The name is either relative to root, or it is an absolute path starting
690 with root. Note that this function is unnecessary, and should not be
687 with root. Note that this function is unnecessary, and should not be
691 called, for case-sensitive filesystems (simply because it's expensive).
688 called, for case-sensitive filesystems (simply because it's expensive).
692 '''
689 '''
693 # If name is absolute, make it relative
690 # If name is absolute, make it relative
694 if name.lower().startswith(root.lower()):
691 if name.lower().startswith(root.lower()):
695 l = len(root)
692 l = len(root)
696 if name[l] == os.sep or name[l] == os.altsep:
693 if name[l] == os.sep or name[l] == os.altsep:
697 l = l + 1
694 l = l + 1
698 name = name[l:]
695 name = name[l:]
699
696
700 if not os.path.lexists(os.path.join(root, name)):
697 if not os.path.lexists(os.path.join(root, name)):
701 return None
698 return None
702
699
703 seps = os.sep
700 seps = os.sep
704 if os.altsep:
701 if os.altsep:
705 seps = seps + os.altsep
702 seps = seps + os.altsep
706 # Protect backslashes. This gets silly very quickly.
703 # Protect backslashes. This gets silly very quickly.
707 seps.replace('\\','\\\\')
704 seps.replace('\\','\\\\')
708 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
705 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
709 dir = os.path.normcase(os.path.normpath(root))
706 dir = os.path.normcase(os.path.normpath(root))
710 result = []
707 result = []
711 for part, sep in pattern.findall(name):
708 for part, sep in pattern.findall(name):
712 if sep:
709 if sep:
713 result.append(sep)
710 result.append(sep)
714 continue
711 continue
715
712
716 if dir not in _fspathcache:
713 if dir not in _fspathcache:
717 _fspathcache[dir] = os.listdir(dir)
714 _fspathcache[dir] = os.listdir(dir)
718 contents = _fspathcache[dir]
715 contents = _fspathcache[dir]
719
716
720 lpart = part.lower()
717 lpart = part.lower()
721 lenp = len(part)
718 lenp = len(part)
722 for n in contents:
719 for n in contents:
723 if lenp == len(n) and n.lower() == lpart:
720 if lenp == len(n) and n.lower() == lpart:
724 result.append(n)
721 result.append(n)
725 break
722 break
726 else:
723 else:
727 # Cannot happen, as the file exists!
724 # Cannot happen, as the file exists!
728 result.append(part)
725 result.append(part)
729 dir = os.path.join(dir, lpart)
726 dir = os.path.join(dir, lpart)
730
727
731 return ''.join(result)
728 return ''.join(result)
732
729
733 def checknlink(testfile):
730 def checknlink(testfile):
734 '''check whether hardlink count reporting works properly'''
731 '''check whether hardlink count reporting works properly'''
735
732
736 # testfile may be open, so we need a separate file for checking to
733 # testfile may be open, so we need a separate file for checking to
737 # work around issue2543 (or testfile may get lost on Samba shares)
734 # work around issue2543 (or testfile may get lost on Samba shares)
738 f1 = testfile + ".hgtmp1"
735 f1 = testfile + ".hgtmp1"
739 if os.path.lexists(f1):
736 if os.path.lexists(f1):
740 return False
737 return False
741 try:
738 try:
742 posixfile(f1, 'w').close()
739 posixfile(f1, 'w').close()
743 except IOError:
740 except IOError:
744 return False
741 return False
745
742
746 f2 = testfile + ".hgtmp2"
743 f2 = testfile + ".hgtmp2"
747 fd = None
744 fd = None
748 try:
745 try:
749 try:
746 try:
750 os_link(f1, f2)
747 os_link(f1, f2)
751 except OSError:
748 except OSError:
752 return False
749 return False
753
750
754 # nlinks() may behave differently for files on Windows shares if
751 # nlinks() may behave differently for files on Windows shares if
755 # the file is open.
752 # the file is open.
756 fd = posixfile(f2)
753 fd = posixfile(f2)
757 return nlinks(f2) > 1
754 return nlinks(f2) > 1
758 finally:
755 finally:
759 if fd is not None:
756 if fd is not None:
760 fd.close()
757 fd.close()
761 for f in (f1, f2):
758 for f in (f1, f2):
762 try:
759 try:
763 os.unlink(f)
760 os.unlink(f)
764 except OSError:
761 except OSError:
765 pass
762 pass
766
763
767 return False
764 return False
768
765
769 def endswithsep(path):
766 def endswithsep(path):
770 '''Check path ends with os.sep or os.altsep.'''
767 '''Check path ends with os.sep or os.altsep.'''
771 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
768 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
772
769
773 def splitpath(path):
770 def splitpath(path):
774 '''Split path by os.sep.
771 '''Split path by os.sep.
775 Note that this function does not use os.altsep because this is
772 Note that this function does not use os.altsep because this is
776 an alternative of simple "xxx.split(os.sep)".
773 an alternative of simple "xxx.split(os.sep)".
777 It is recommended to use os.path.normpath() before using this
774 It is recommended to use os.path.normpath() before using this
778 function if need.'''
775 function if need.'''
779 return path.split(os.sep)
776 return path.split(os.sep)
780
777
781 def gui():
778 def gui():
782 '''Are we running in a GUI?'''
779 '''Are we running in a GUI?'''
783 if sys.platform == 'darwin':
780 if sys.platform == 'darwin':
784 if 'SSH_CONNECTION' in os.environ:
781 if 'SSH_CONNECTION' in os.environ:
785 # handle SSH access to a box where the user is logged in
782 # handle SSH access to a box where the user is logged in
786 return False
783 return False
787 elif getattr(osutil, 'isgui', None):
784 elif getattr(osutil, 'isgui', None):
788 # check if a CoreGraphics session is available
785 # check if a CoreGraphics session is available
789 return osutil.isgui()
786 return osutil.isgui()
790 else:
787 else:
791 # pure build; use a safe default
788 # pure build; use a safe default
792 return True
789 return True
793 else:
790 else:
794 return os.name == "nt" or os.environ.get("DISPLAY")
791 return os.name == "nt" or os.environ.get("DISPLAY")
795
792
796 def mktempcopy(name, emptyok=False, createmode=None):
793 def mktempcopy(name, emptyok=False, createmode=None):
797 """Create a temporary file with the same contents from name
794 """Create a temporary file with the same contents from name
798
795
799 The permission bits are copied from the original file.
796 The permission bits are copied from the original file.
800
797
801 If the temporary file is going to be truncated immediately, you
798 If the temporary file is going to be truncated immediately, you
802 can use emptyok=True as an optimization.
799 can use emptyok=True as an optimization.
803
800
804 Returns the name of the temporary file.
801 Returns the name of the temporary file.
805 """
802 """
806 d, fn = os.path.split(name)
803 d, fn = os.path.split(name)
807 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
804 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
808 os.close(fd)
805 os.close(fd)
809 # Temporary files are created with mode 0600, which is usually not
806 # Temporary files are created with mode 0600, which is usually not
810 # what we want. If the original file already exists, just copy
807 # what we want. If the original file already exists, just copy
811 # its mode. Otherwise, manually obey umask.
808 # its mode. Otherwise, manually obey umask.
812 try:
809 try:
813 st_mode = os.lstat(name).st_mode & 0777
810 st_mode = os.lstat(name).st_mode & 0777
814 except OSError, inst:
811 except OSError, inst:
815 if inst.errno != errno.ENOENT:
812 if inst.errno != errno.ENOENT:
816 raise
813 raise
817 st_mode = createmode
814 st_mode = createmode
818 if st_mode is None:
815 if st_mode is None:
819 st_mode = ~umask
816 st_mode = ~umask
820 st_mode &= 0666
817 st_mode &= 0666
821 os.chmod(temp, st_mode)
818 os.chmod(temp, st_mode)
822 if emptyok:
819 if emptyok:
823 return temp
820 return temp
824 try:
821 try:
825 try:
822 try:
826 ifp = posixfile(name, "rb")
823 ifp = posixfile(name, "rb")
827 except IOError, inst:
824 except IOError, inst:
828 if inst.errno == errno.ENOENT:
825 if inst.errno == errno.ENOENT:
829 return temp
826 return temp
830 if not getattr(inst, 'filename', None):
827 if not getattr(inst, 'filename', None):
831 inst.filename = name
828 inst.filename = name
832 raise
829 raise
833 ofp = posixfile(temp, "wb")
830 ofp = posixfile(temp, "wb")
834 for chunk in filechunkiter(ifp):
831 for chunk in filechunkiter(ifp):
835 ofp.write(chunk)
832 ofp.write(chunk)
836 ifp.close()
833 ifp.close()
837 ofp.close()
834 ofp.close()
838 except:
835 except:
839 try: os.unlink(temp)
836 try: os.unlink(temp)
840 except: pass
837 except: pass
841 raise
838 raise
842 return temp
839 return temp
843
840
844 class atomictempfile(object):
841 class atomictempfile(object):
845 """file-like object that atomically updates a file
842 """file-like object that atomically updates a file
846
843
847 All writes will be redirected to a temporary copy of the original
844 All writes will be redirected to a temporary copy of the original
848 file. When rename is called, the copy is renamed to the original
845 file. When rename is called, the copy is renamed to the original
849 name, making the changes visible.
846 name, making the changes visible.
850 """
847 """
851 def __init__(self, name, mode='w+b', createmode=None):
848 def __init__(self, name, mode='w+b', createmode=None):
852 self.__name = name
849 self.__name = name
853 self._fp = None
850 self._fp = None
854 self.temp = mktempcopy(name, emptyok=('w' in mode),
851 self.temp = mktempcopy(name, emptyok=('w' in mode),
855 createmode=createmode)
852 createmode=createmode)
856 self._fp = posixfile(self.temp, mode)
853 self._fp = posixfile(self.temp, mode)
857
854
858 def __getattr__(self, name):
855 def __getattr__(self, name):
859 return getattr(self._fp, name)
856 return getattr(self._fp, name)
860
857
861 def rename(self):
858 def rename(self):
862 if not self._fp.closed:
859 if not self._fp.closed:
863 self._fp.close()
860 self._fp.close()
864 rename(self.temp, localpath(self.__name))
861 rename(self.temp, localpath(self.__name))
865
862
866 def close(self):
863 def close(self):
867 if not self._fp:
864 if not self._fp:
868 return
865 return
869 if not self._fp.closed:
866 if not self._fp.closed:
870 try:
867 try:
871 os.unlink(self.temp)
868 os.unlink(self.temp)
872 except: pass
869 except: pass
873 self._fp.close()
870 self._fp.close()
874
871
875 def __del__(self):
872 def __del__(self):
876 self.close()
873 self.close()
877
874
878 def makedirs(name, mode=None):
875 def makedirs(name, mode=None):
879 """recursive directory creation with parent mode inheritance"""
876 """recursive directory creation with parent mode inheritance"""
880 parent = os.path.abspath(os.path.dirname(name))
877 parent = os.path.abspath(os.path.dirname(name))
881 try:
878 try:
882 os.mkdir(name)
879 os.mkdir(name)
883 if mode is not None:
880 if mode is not None:
884 os.chmod(name, mode)
881 os.chmod(name, mode)
885 return
882 return
886 except OSError, err:
883 except OSError, err:
887 if err.errno == errno.EEXIST:
884 if err.errno == errno.EEXIST:
888 return
885 return
889 if not name or parent == name or err.errno != errno.ENOENT:
886 if not name or parent == name or err.errno != errno.ENOENT:
890 raise
887 raise
891 makedirs(parent, mode)
888 makedirs(parent, mode)
892 makedirs(name, mode)
889 makedirs(name, mode)
893
890
894 class opener(object):
891 class opener(object):
895 """Open files relative to a base directory
892 """Open files relative to a base directory
896
893
897 This class is used to hide the details of COW semantics and
894 This class is used to hide the details of COW semantics and
898 remote file access from higher level code.
895 remote file access from higher level code.
899 """
896 """
900 def __init__(self, base, audit=True):
897 def __init__(self, base, audit=True):
901 self.base = base
898 self.base = base
902 if audit:
899 if audit:
903 self.auditor = path_auditor(base)
900 self.auditor = path_auditor(base)
904 else:
901 else:
905 self.auditor = always
902 self.auditor = always
906 self.createmode = None
903 self.createmode = None
907 self._trustnlink = None
904 self._trustnlink = None
908
905
909 @propertycache
906 @propertycache
910 def _can_symlink(self):
907 def _can_symlink(self):
911 return checklink(self.base)
908 return checklink(self.base)
912
909
913 def _fixfilemode(self, name):
910 def _fixfilemode(self, name):
914 if self.createmode is None:
911 if self.createmode is None:
915 return
912 return
916 os.chmod(name, self.createmode & 0666)
913 os.chmod(name, self.createmode & 0666)
917
914
918 def __call__(self, path, mode="r", text=False, atomictemp=False):
915 def __call__(self, path, mode="r", text=False, atomictemp=False):
916 r = checkosfilename(path)
917 if r:
918 raise Abort("%s: %s" % (r, path))
919 self.auditor(path)
919 self.auditor(path)
920 f = os.path.join(self.base, path)
920 f = os.path.join(self.base, path)
921
921
922 if not text and "b" not in mode:
922 if not text and "b" not in mode:
923 mode += "b" # for that other OS
923 mode += "b" # for that other OS
924
924
925 nlink = -1
925 nlink = -1
926 dirname, basename = os.path.split(f)
926 dirname, basename = os.path.split(f)
927 # If basename is empty, then the path is malformed because it points
927 # If basename is empty, then the path is malformed because it points
928 # to a directory. Let the posixfile() call below raise IOError.
928 # to a directory. Let the posixfile() call below raise IOError.
929 if basename and mode not in ('r', 'rb'):
929 if basename and mode not in ('r', 'rb'):
930 if atomictemp:
930 if atomictemp:
931 if not os.path.isdir(dirname):
931 if not os.path.isdir(dirname):
932 makedirs(dirname, self.createmode)
932 makedirs(dirname, self.createmode)
933 return atomictempfile(f, mode, self.createmode)
933 return atomictempfile(f, mode, self.createmode)
934 try:
934 try:
935 if 'w' in mode:
935 if 'w' in mode:
936 unlink(f)
936 unlink(f)
937 nlink = 0
937 nlink = 0
938 else:
938 else:
939 # nlinks() may behave differently for files on Windows
939 # nlinks() may behave differently for files on Windows
940 # shares if the file is open.
940 # shares if the file is open.
941 fd = posixfile(f)
941 fd = posixfile(f)
942 nlink = nlinks(f)
942 nlink = nlinks(f)
943 if nlink < 1:
943 if nlink < 1:
944 nlink = 2 # force mktempcopy (issue1922)
944 nlink = 2 # force mktempcopy (issue1922)
945 fd.close()
945 fd.close()
946 except (OSError, IOError), e:
946 except (OSError, IOError), e:
947 if e.errno != errno.ENOENT:
947 if e.errno != errno.ENOENT:
948 raise
948 raise
949 nlink = 0
949 nlink = 0
950 if not os.path.isdir(dirname):
950 if not os.path.isdir(dirname):
951 makedirs(dirname, self.createmode)
951 makedirs(dirname, self.createmode)
952 if nlink > 0:
952 if nlink > 0:
953 if self._trustnlink is None:
953 if self._trustnlink is None:
954 self._trustnlink = nlink > 1 or checknlink(f)
954 self._trustnlink = nlink > 1 or checknlink(f)
955 if nlink > 1 or not self._trustnlink:
955 if nlink > 1 or not self._trustnlink:
956 rename(mktempcopy(f), f)
956 rename(mktempcopy(f), f)
957 fp = posixfile(f, mode)
957 fp = posixfile(f, mode)
958 if nlink == 0:
958 if nlink == 0:
959 self._fixfilemode(f)
959 self._fixfilemode(f)
960 return fp
960 return fp
961
961
962 def symlink(self, src, dst):
962 def symlink(self, src, dst):
963 self.auditor(dst)
963 self.auditor(dst)
964 linkname = os.path.join(self.base, dst)
964 linkname = os.path.join(self.base, dst)
965 try:
965 try:
966 os.unlink(linkname)
966 os.unlink(linkname)
967 except OSError:
967 except OSError:
968 pass
968 pass
969
969
970 dirname = os.path.dirname(linkname)
970 dirname = os.path.dirname(linkname)
971 if not os.path.exists(dirname):
971 if not os.path.exists(dirname):
972 makedirs(dirname, self.createmode)
972 makedirs(dirname, self.createmode)
973
973
974 if self._can_symlink:
974 if self._can_symlink:
975 try:
975 try:
976 os.symlink(src, linkname)
976 os.symlink(src, linkname)
977 except OSError, err:
977 except OSError, err:
978 raise OSError(err.errno, _('could not symlink to %r: %s') %
978 raise OSError(err.errno, _('could not symlink to %r: %s') %
979 (src, err.strerror), linkname)
979 (src, err.strerror), linkname)
980 else:
980 else:
981 f = self(dst, "w")
981 f = self(dst, "w")
982 f.write(src)
982 f.write(src)
983 f.close()
983 f.close()
984 self._fixfilemode(dst)
984 self._fixfilemode(dst)
985
985
986 class chunkbuffer(object):
986 class chunkbuffer(object):
987 """Allow arbitrary sized chunks of data to be efficiently read from an
987 """Allow arbitrary sized chunks of data to be efficiently read from an
988 iterator over chunks of arbitrary size."""
988 iterator over chunks of arbitrary size."""
989
989
990 def __init__(self, in_iter):
990 def __init__(self, in_iter):
991 """in_iter is the iterator that's iterating over the input chunks.
991 """in_iter is the iterator that's iterating over the input chunks.
992 targetsize is how big a buffer to try to maintain."""
992 targetsize is how big a buffer to try to maintain."""
993 def splitbig(chunks):
993 def splitbig(chunks):
994 for chunk in chunks:
994 for chunk in chunks:
995 if len(chunk) > 2**20:
995 if len(chunk) > 2**20:
996 pos = 0
996 pos = 0
997 while pos < len(chunk):
997 while pos < len(chunk):
998 end = pos + 2 ** 18
998 end = pos + 2 ** 18
999 yield chunk[pos:end]
999 yield chunk[pos:end]
1000 pos = end
1000 pos = end
1001 else:
1001 else:
1002 yield chunk
1002 yield chunk
1003 self.iter = splitbig(in_iter)
1003 self.iter = splitbig(in_iter)
1004 self._queue = []
1004 self._queue = []
1005
1005
1006 def read(self, l):
1006 def read(self, l):
1007 """Read L bytes of data from the iterator of chunks of data.
1007 """Read L bytes of data from the iterator of chunks of data.
1008 Returns less than L bytes if the iterator runs dry."""
1008 Returns less than L bytes if the iterator runs dry."""
1009 left = l
1009 left = l
1010 buf = ''
1010 buf = ''
1011 queue = self._queue
1011 queue = self._queue
1012 while left > 0:
1012 while left > 0:
1013 # refill the queue
1013 # refill the queue
1014 if not queue:
1014 if not queue:
1015 target = 2**18
1015 target = 2**18
1016 for chunk in self.iter:
1016 for chunk in self.iter:
1017 queue.append(chunk)
1017 queue.append(chunk)
1018 target -= len(chunk)
1018 target -= len(chunk)
1019 if target <= 0:
1019 if target <= 0:
1020 break
1020 break
1021 if not queue:
1021 if not queue:
1022 break
1022 break
1023
1023
1024 chunk = queue.pop(0)
1024 chunk = queue.pop(0)
1025 left -= len(chunk)
1025 left -= len(chunk)
1026 if left < 0:
1026 if left < 0:
1027 queue.insert(0, chunk[left:])
1027 queue.insert(0, chunk[left:])
1028 buf += chunk[:left]
1028 buf += chunk[:left]
1029 else:
1029 else:
1030 buf += chunk
1030 buf += chunk
1031
1031
1032 return buf
1032 return buf
1033
1033
1034 def filechunkiter(f, size=65536, limit=None):
1034 def filechunkiter(f, size=65536, limit=None):
1035 """Create a generator that produces the data in the file size
1035 """Create a generator that produces the data in the file size
1036 (default 65536) bytes at a time, up to optional limit (default is
1036 (default 65536) bytes at a time, up to optional limit (default is
1037 to read all data). Chunks may be less than size bytes if the
1037 to read all data). Chunks may be less than size bytes if the
1038 chunk is the last chunk in the file, or the file is a socket or
1038 chunk is the last chunk in the file, or the file is a socket or
1039 some other type of file that sometimes reads less data than is
1039 some other type of file that sometimes reads less data than is
1040 requested."""
1040 requested."""
1041 assert size >= 0
1041 assert size >= 0
1042 assert limit is None or limit >= 0
1042 assert limit is None or limit >= 0
1043 while True:
1043 while True:
1044 if limit is None:
1044 if limit is None:
1045 nbytes = size
1045 nbytes = size
1046 else:
1046 else:
1047 nbytes = min(limit, size)
1047 nbytes = min(limit, size)
1048 s = nbytes and f.read(nbytes)
1048 s = nbytes and f.read(nbytes)
1049 if not s:
1049 if not s:
1050 break
1050 break
1051 if limit:
1051 if limit:
1052 limit -= len(s)
1052 limit -= len(s)
1053 yield s
1053 yield s
1054
1054
1055 def makedate():
1055 def makedate():
1056 lt = time.localtime()
1056 lt = time.localtime()
1057 if lt[8] == 1 and time.daylight:
1057 if lt[8] == 1 and time.daylight:
1058 tz = time.altzone
1058 tz = time.altzone
1059 else:
1059 else:
1060 tz = time.timezone
1060 tz = time.timezone
1061 t = time.mktime(lt)
1061 t = time.mktime(lt)
1062 if t < 0:
1062 if t < 0:
1063 hint = _("check your clock")
1063 hint = _("check your clock")
1064 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1064 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1065 return t, tz
1065 return t, tz
1066
1066
1067 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1067 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1068 """represent a (unixtime, offset) tuple as a localized time.
1068 """represent a (unixtime, offset) tuple as a localized time.
1069 unixtime is seconds since the epoch, and offset is the time zone's
1069 unixtime is seconds since the epoch, and offset is the time zone's
1070 number of seconds away from UTC. if timezone is false, do not
1070 number of seconds away from UTC. if timezone is false, do not
1071 append time zone to string."""
1071 append time zone to string."""
1072 t, tz = date or makedate()
1072 t, tz = date or makedate()
1073 if t < 0:
1073 if t < 0:
1074 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1074 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1075 tz = 0
1075 tz = 0
1076 if "%1" in format or "%2" in format:
1076 if "%1" in format or "%2" in format:
1077 sign = (tz > 0) and "-" or "+"
1077 sign = (tz > 0) and "-" or "+"
1078 minutes = abs(tz) // 60
1078 minutes = abs(tz) // 60
1079 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1079 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1080 format = format.replace("%2", "%02d" % (minutes % 60))
1080 format = format.replace("%2", "%02d" % (minutes % 60))
1081 s = time.strftime(format, time.gmtime(float(t) - tz))
1081 s = time.strftime(format, time.gmtime(float(t) - tz))
1082 return s
1082 return s
1083
1083
1084 def shortdate(date=None):
1084 def shortdate(date=None):
1085 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1085 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1086 return datestr(date, format='%Y-%m-%d')
1086 return datestr(date, format='%Y-%m-%d')
1087
1087
1088 def strdate(string, format, defaults=[]):
1088 def strdate(string, format, defaults=[]):
1089 """parse a localized time string and return a (unixtime, offset) tuple.
1089 """parse a localized time string and return a (unixtime, offset) tuple.
1090 if the string cannot be parsed, ValueError is raised."""
1090 if the string cannot be parsed, ValueError is raised."""
1091 def timezone(string):
1091 def timezone(string):
1092 tz = string.split()[-1]
1092 tz = string.split()[-1]
1093 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1093 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1094 sign = (tz[0] == "+") and 1 or -1
1094 sign = (tz[0] == "+") and 1 or -1
1095 hours = int(tz[1:3])
1095 hours = int(tz[1:3])
1096 minutes = int(tz[3:5])
1096 minutes = int(tz[3:5])
1097 return -sign * (hours * 60 + minutes) * 60
1097 return -sign * (hours * 60 + minutes) * 60
1098 if tz == "GMT" or tz == "UTC":
1098 if tz == "GMT" or tz == "UTC":
1099 return 0
1099 return 0
1100 return None
1100 return None
1101
1101
1102 # NOTE: unixtime = localunixtime + offset
1102 # NOTE: unixtime = localunixtime + offset
1103 offset, date = timezone(string), string
1103 offset, date = timezone(string), string
1104 if offset is not None:
1104 if offset is not None:
1105 date = " ".join(string.split()[:-1])
1105 date = " ".join(string.split()[:-1])
1106
1106
1107 # add missing elements from defaults
1107 # add missing elements from defaults
1108 usenow = False # default to using biased defaults
1108 usenow = False # default to using biased defaults
1109 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1109 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1110 found = [True for p in part if ("%"+p) in format]
1110 found = [True for p in part if ("%"+p) in format]
1111 if not found:
1111 if not found:
1112 date += "@" + defaults[part][usenow]
1112 date += "@" + defaults[part][usenow]
1113 format += "@%" + part[0]
1113 format += "@%" + part[0]
1114 else:
1114 else:
1115 # We've found a specific time element, less specific time
1115 # We've found a specific time element, less specific time
1116 # elements are relative to today
1116 # elements are relative to today
1117 usenow = True
1117 usenow = True
1118
1118
1119 timetuple = time.strptime(date, format)
1119 timetuple = time.strptime(date, format)
1120 localunixtime = int(calendar.timegm(timetuple))
1120 localunixtime = int(calendar.timegm(timetuple))
1121 if offset is None:
1121 if offset is None:
1122 # local timezone
1122 # local timezone
1123 unixtime = int(time.mktime(timetuple))
1123 unixtime = int(time.mktime(timetuple))
1124 offset = unixtime - localunixtime
1124 offset = unixtime - localunixtime
1125 else:
1125 else:
1126 unixtime = localunixtime + offset
1126 unixtime = localunixtime + offset
1127 return unixtime, offset
1127 return unixtime, offset
1128
1128
1129 def parsedate(date, formats=None, bias={}):
1129 def parsedate(date, formats=None, bias={}):
1130 """parse a localized date/time and return a (unixtime, offset) tuple.
1130 """parse a localized date/time and return a (unixtime, offset) tuple.
1131
1131
1132 The date may be a "unixtime offset" string or in one of the specified
1132 The date may be a "unixtime offset" string or in one of the specified
1133 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1133 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1134 """
1134 """
1135 if not date:
1135 if not date:
1136 return 0, 0
1136 return 0, 0
1137 if isinstance(date, tuple) and len(date) == 2:
1137 if isinstance(date, tuple) and len(date) == 2:
1138 return date
1138 return date
1139 if not formats:
1139 if not formats:
1140 formats = defaultdateformats
1140 formats = defaultdateformats
1141 date = date.strip()
1141 date = date.strip()
1142 try:
1142 try:
1143 when, offset = map(int, date.split(' '))
1143 when, offset = map(int, date.split(' '))
1144 except ValueError:
1144 except ValueError:
1145 # fill out defaults
1145 # fill out defaults
1146 now = makedate()
1146 now = makedate()
1147 defaults = {}
1147 defaults = {}
1148 nowmap = {}
1148 nowmap = {}
1149 for part in ("d", "mb", "yY", "HI", "M", "S"):
1149 for part in ("d", "mb", "yY", "HI", "M", "S"):
1150 # this piece is for rounding the specific end of unknowns
1150 # this piece is for rounding the specific end of unknowns
1151 b = bias.get(part)
1151 b = bias.get(part)
1152 if b is None:
1152 if b is None:
1153 if part[0] in "HMS":
1153 if part[0] in "HMS":
1154 b = "00"
1154 b = "00"
1155 else:
1155 else:
1156 b = "0"
1156 b = "0"
1157
1157
1158 # this piece is for matching the generic end to today's date
1158 # this piece is for matching the generic end to today's date
1159 n = datestr(now, "%" + part[0])
1159 n = datestr(now, "%" + part[0])
1160
1160
1161 defaults[part] = (b, n)
1161 defaults[part] = (b, n)
1162
1162
1163 for format in formats:
1163 for format in formats:
1164 try:
1164 try:
1165 when, offset = strdate(date, format, defaults)
1165 when, offset = strdate(date, format, defaults)
1166 except (ValueError, OverflowError):
1166 except (ValueError, OverflowError):
1167 pass
1167 pass
1168 else:
1168 else:
1169 break
1169 break
1170 else:
1170 else:
1171 raise Abort(_('invalid date: %r') % date)
1171 raise Abort(_('invalid date: %r') % date)
1172 # validate explicit (probably user-specified) date and
1172 # validate explicit (probably user-specified) date and
1173 # time zone offset. values must fit in signed 32 bits for
1173 # time zone offset. values must fit in signed 32 bits for
1174 # current 32-bit linux runtimes. timezones go from UTC-12
1174 # current 32-bit linux runtimes. timezones go from UTC-12
1175 # to UTC+14
1175 # to UTC+14
1176 if abs(when) > 0x7fffffff:
1176 if abs(when) > 0x7fffffff:
1177 raise Abort(_('date exceeds 32 bits: %d') % when)
1177 raise Abort(_('date exceeds 32 bits: %d') % when)
1178 if when < 0:
1178 if when < 0:
1179 raise Abort(_('negative date value: %d') % when)
1179 raise Abort(_('negative date value: %d') % when)
1180 if offset < -50400 or offset > 43200:
1180 if offset < -50400 or offset > 43200:
1181 raise Abort(_('impossible time zone offset: %d') % offset)
1181 raise Abort(_('impossible time zone offset: %d') % offset)
1182 return when, offset
1182 return when, offset
1183
1183
1184 def matchdate(date):
1184 def matchdate(date):
1185 """Return a function that matches a given date match specifier
1185 """Return a function that matches a given date match specifier
1186
1186
1187 Formats include:
1187 Formats include:
1188
1188
1189 '{date}' match a given date to the accuracy provided
1189 '{date}' match a given date to the accuracy provided
1190
1190
1191 '<{date}' on or before a given date
1191 '<{date}' on or before a given date
1192
1192
1193 '>{date}' on or after a given date
1193 '>{date}' on or after a given date
1194
1194
1195 >>> p1 = parsedate("10:29:59")
1195 >>> p1 = parsedate("10:29:59")
1196 >>> p2 = parsedate("10:30:00")
1196 >>> p2 = parsedate("10:30:00")
1197 >>> p3 = parsedate("10:30:59")
1197 >>> p3 = parsedate("10:30:59")
1198 >>> p4 = parsedate("10:31:00")
1198 >>> p4 = parsedate("10:31:00")
1199 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1199 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1200 >>> f = matchdate("10:30")
1200 >>> f = matchdate("10:30")
1201 >>> f(p1[0])
1201 >>> f(p1[0])
1202 False
1202 False
1203 >>> f(p2[0])
1203 >>> f(p2[0])
1204 True
1204 True
1205 >>> f(p3[0])
1205 >>> f(p3[0])
1206 True
1206 True
1207 >>> f(p4[0])
1207 >>> f(p4[0])
1208 False
1208 False
1209 >>> f(p5[0])
1209 >>> f(p5[0])
1210 False
1210 False
1211 """
1211 """
1212
1212
1213 def lower(date):
1213 def lower(date):
1214 d = dict(mb="1", d="1")
1214 d = dict(mb="1", d="1")
1215 return parsedate(date, extendeddateformats, d)[0]
1215 return parsedate(date, extendeddateformats, d)[0]
1216
1216
1217 def upper(date):
1217 def upper(date):
1218 d = dict(mb="12", HI="23", M="59", S="59")
1218 d = dict(mb="12", HI="23", M="59", S="59")
1219 for days in ("31", "30", "29"):
1219 for days in ("31", "30", "29"):
1220 try:
1220 try:
1221 d["d"] = days
1221 d["d"] = days
1222 return parsedate(date, extendeddateformats, d)[0]
1222 return parsedate(date, extendeddateformats, d)[0]
1223 except:
1223 except:
1224 pass
1224 pass
1225 d["d"] = "28"
1225 d["d"] = "28"
1226 return parsedate(date, extendeddateformats, d)[0]
1226 return parsedate(date, extendeddateformats, d)[0]
1227
1227
1228 date = date.strip()
1228 date = date.strip()
1229
1229
1230 if not date:
1230 if not date:
1231 raise Abort(_("dates cannot consist entirely of whitespace"))
1231 raise Abort(_("dates cannot consist entirely of whitespace"))
1232 elif date[0] == "<":
1232 elif date[0] == "<":
1233 if not date[1:]:
1233 if not date[1:]:
1234 raise Abort(_("invalid day spec, use '<DATE'"))
1234 raise Abort(_("invalid day spec, use '<DATE'"))
1235 when = upper(date[1:])
1235 when = upper(date[1:])
1236 return lambda x: x <= when
1236 return lambda x: x <= when
1237 elif date[0] == ">":
1237 elif date[0] == ">":
1238 if not date[1:]:
1238 if not date[1:]:
1239 raise Abort(_("invalid day spec, use '>DATE'"))
1239 raise Abort(_("invalid day spec, use '>DATE'"))
1240 when = lower(date[1:])
1240 when = lower(date[1:])
1241 return lambda x: x >= when
1241 return lambda x: x >= when
1242 elif date[0] == "-":
1242 elif date[0] == "-":
1243 try:
1243 try:
1244 days = int(date[1:])
1244 days = int(date[1:])
1245 except ValueError:
1245 except ValueError:
1246 raise Abort(_("invalid day spec: %s") % date[1:])
1246 raise Abort(_("invalid day spec: %s") % date[1:])
1247 if days < 0:
1247 if days < 0:
1248 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1248 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1249 % date[1:])
1249 % date[1:])
1250 when = makedate()[0] - days * 3600 * 24
1250 when = makedate()[0] - days * 3600 * 24
1251 return lambda x: x >= when
1251 return lambda x: x >= when
1252 elif " to " in date:
1252 elif " to " in date:
1253 a, b = date.split(" to ")
1253 a, b = date.split(" to ")
1254 start, stop = lower(a), upper(b)
1254 start, stop = lower(a), upper(b)
1255 return lambda x: x >= start and x <= stop
1255 return lambda x: x >= start and x <= stop
1256 else:
1256 else:
1257 start, stop = lower(date), upper(date)
1257 start, stop = lower(date), upper(date)
1258 return lambda x: x >= start and x <= stop
1258 return lambda x: x >= start and x <= stop
1259
1259
1260 def shortuser(user):
1260 def shortuser(user):
1261 """Return a short representation of a user name or email address."""
1261 """Return a short representation of a user name or email address."""
1262 f = user.find('@')
1262 f = user.find('@')
1263 if f >= 0:
1263 if f >= 0:
1264 user = user[:f]
1264 user = user[:f]
1265 f = user.find('<')
1265 f = user.find('<')
1266 if f >= 0:
1266 if f >= 0:
1267 user = user[f + 1:]
1267 user = user[f + 1:]
1268 f = user.find(' ')
1268 f = user.find(' ')
1269 if f >= 0:
1269 if f >= 0:
1270 user = user[:f]
1270 user = user[:f]
1271 f = user.find('.')
1271 f = user.find('.')
1272 if f >= 0:
1272 if f >= 0:
1273 user = user[:f]
1273 user = user[:f]
1274 return user
1274 return user
1275
1275
1276 def email(author):
1276 def email(author):
1277 '''get email of author.'''
1277 '''get email of author.'''
1278 r = author.find('>')
1278 r = author.find('>')
1279 if r == -1:
1279 if r == -1:
1280 r = None
1280 r = None
1281 return author[author.find('<') + 1:r]
1281 return author[author.find('<') + 1:r]
1282
1282
1283 def _ellipsis(text, maxlength):
1283 def _ellipsis(text, maxlength):
1284 if len(text) <= maxlength:
1284 if len(text) <= maxlength:
1285 return text, False
1285 return text, False
1286 else:
1286 else:
1287 return "%s..." % (text[:maxlength - 3]), True
1287 return "%s..." % (text[:maxlength - 3]), True
1288
1288
1289 def ellipsis(text, maxlength=400):
1289 def ellipsis(text, maxlength=400):
1290 """Trim string to at most maxlength (default: 400) characters."""
1290 """Trim string to at most maxlength (default: 400) characters."""
1291 try:
1291 try:
1292 # use unicode not to split at intermediate multi-byte sequence
1292 # use unicode not to split at intermediate multi-byte sequence
1293 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1293 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1294 maxlength)
1294 maxlength)
1295 if not truncated:
1295 if not truncated:
1296 return text
1296 return text
1297 return utext.encode(encoding.encoding)
1297 return utext.encode(encoding.encoding)
1298 except (UnicodeDecodeError, UnicodeEncodeError):
1298 except (UnicodeDecodeError, UnicodeEncodeError):
1299 return _ellipsis(text, maxlength)[0]
1299 return _ellipsis(text, maxlength)[0]
1300
1300
1301 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1301 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1302 '''yield every hg repository under path, recursively.'''
1302 '''yield every hg repository under path, recursively.'''
1303 def errhandler(err):
1303 def errhandler(err):
1304 if err.filename == path:
1304 if err.filename == path:
1305 raise err
1305 raise err
1306 if followsym and hasattr(os.path, 'samestat'):
1306 if followsym and hasattr(os.path, 'samestat'):
1307 def _add_dir_if_not_there(dirlst, dirname):
1307 def _add_dir_if_not_there(dirlst, dirname):
1308 match = False
1308 match = False
1309 samestat = os.path.samestat
1309 samestat = os.path.samestat
1310 dirstat = os.stat(dirname)
1310 dirstat = os.stat(dirname)
1311 for lstdirstat in dirlst:
1311 for lstdirstat in dirlst:
1312 if samestat(dirstat, lstdirstat):
1312 if samestat(dirstat, lstdirstat):
1313 match = True
1313 match = True
1314 break
1314 break
1315 if not match:
1315 if not match:
1316 dirlst.append(dirstat)
1316 dirlst.append(dirstat)
1317 return not match
1317 return not match
1318 else:
1318 else:
1319 followsym = False
1319 followsym = False
1320
1320
1321 if (seen_dirs is None) and followsym:
1321 if (seen_dirs is None) and followsym:
1322 seen_dirs = []
1322 seen_dirs = []
1323 _add_dir_if_not_there(seen_dirs, path)
1323 _add_dir_if_not_there(seen_dirs, path)
1324 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1324 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1325 dirs.sort()
1325 dirs.sort()
1326 if '.hg' in dirs:
1326 if '.hg' in dirs:
1327 yield root # found a repository
1327 yield root # found a repository
1328 qroot = os.path.join(root, '.hg', 'patches')
1328 qroot = os.path.join(root, '.hg', 'patches')
1329 if os.path.isdir(os.path.join(qroot, '.hg')):
1329 if os.path.isdir(os.path.join(qroot, '.hg')):
1330 yield qroot # we have a patch queue repo here
1330 yield qroot # we have a patch queue repo here
1331 if recurse:
1331 if recurse:
1332 # avoid recursing inside the .hg directory
1332 # avoid recursing inside the .hg directory
1333 dirs.remove('.hg')
1333 dirs.remove('.hg')
1334 else:
1334 else:
1335 dirs[:] = [] # don't descend further
1335 dirs[:] = [] # don't descend further
1336 elif followsym:
1336 elif followsym:
1337 newdirs = []
1337 newdirs = []
1338 for d in dirs:
1338 for d in dirs:
1339 fname = os.path.join(root, d)
1339 fname = os.path.join(root, d)
1340 if _add_dir_if_not_there(seen_dirs, fname):
1340 if _add_dir_if_not_there(seen_dirs, fname):
1341 if os.path.islink(fname):
1341 if os.path.islink(fname):
1342 for hgname in walkrepos(fname, True, seen_dirs):
1342 for hgname in walkrepos(fname, True, seen_dirs):
1343 yield hgname
1343 yield hgname
1344 else:
1344 else:
1345 newdirs.append(d)
1345 newdirs.append(d)
1346 dirs[:] = newdirs
1346 dirs[:] = newdirs
1347
1347
1348 _rcpath = None
1348 _rcpath = None
1349
1349
1350 def os_rcpath():
1350 def os_rcpath():
1351 '''return default os-specific hgrc search path'''
1351 '''return default os-specific hgrc search path'''
1352 path = system_rcpath()
1352 path = system_rcpath()
1353 path.extend(user_rcpath())
1353 path.extend(user_rcpath())
1354 path = [os.path.normpath(f) for f in path]
1354 path = [os.path.normpath(f) for f in path]
1355 return path
1355 return path
1356
1356
1357 def rcpath():
1357 def rcpath():
1358 '''return hgrc search path. if env var HGRCPATH is set, use it.
1358 '''return hgrc search path. if env var HGRCPATH is set, use it.
1359 for each item in path, if directory, use files ending in .rc,
1359 for each item in path, if directory, use files ending in .rc,
1360 else use item.
1360 else use item.
1361 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1361 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1362 if no HGRCPATH, use default os-specific path.'''
1362 if no HGRCPATH, use default os-specific path.'''
1363 global _rcpath
1363 global _rcpath
1364 if _rcpath is None:
1364 if _rcpath is None:
1365 if 'HGRCPATH' in os.environ:
1365 if 'HGRCPATH' in os.environ:
1366 _rcpath = []
1366 _rcpath = []
1367 for p in os.environ['HGRCPATH'].split(os.pathsep):
1367 for p in os.environ['HGRCPATH'].split(os.pathsep):
1368 if not p:
1368 if not p:
1369 continue
1369 continue
1370 p = expandpath(p)
1370 p = expandpath(p)
1371 if os.path.isdir(p):
1371 if os.path.isdir(p):
1372 for f, kind in osutil.listdir(p):
1372 for f, kind in osutil.listdir(p):
1373 if f.endswith('.rc'):
1373 if f.endswith('.rc'):
1374 _rcpath.append(os.path.join(p, f))
1374 _rcpath.append(os.path.join(p, f))
1375 else:
1375 else:
1376 _rcpath.append(p)
1376 _rcpath.append(p)
1377 else:
1377 else:
1378 _rcpath = os_rcpath()
1378 _rcpath = os_rcpath()
1379 return _rcpath
1379 return _rcpath
1380
1380
1381 def bytecount(nbytes):
1381 def bytecount(nbytes):
1382 '''return byte count formatted as readable string, with units'''
1382 '''return byte count formatted as readable string, with units'''
1383
1383
1384 units = (
1384 units = (
1385 (100, 1 << 30, _('%.0f GB')),
1385 (100, 1 << 30, _('%.0f GB')),
1386 (10, 1 << 30, _('%.1f GB')),
1386 (10, 1 << 30, _('%.1f GB')),
1387 (1, 1 << 30, _('%.2f GB')),
1387 (1, 1 << 30, _('%.2f GB')),
1388 (100, 1 << 20, _('%.0f MB')),
1388 (100, 1 << 20, _('%.0f MB')),
1389 (10, 1 << 20, _('%.1f MB')),
1389 (10, 1 << 20, _('%.1f MB')),
1390 (1, 1 << 20, _('%.2f MB')),
1390 (1, 1 << 20, _('%.2f MB')),
1391 (100, 1 << 10, _('%.0f KB')),
1391 (100, 1 << 10, _('%.0f KB')),
1392 (10, 1 << 10, _('%.1f KB')),
1392 (10, 1 << 10, _('%.1f KB')),
1393 (1, 1 << 10, _('%.2f KB')),
1393 (1, 1 << 10, _('%.2f KB')),
1394 (1, 1, _('%.0f bytes')),
1394 (1, 1, _('%.0f bytes')),
1395 )
1395 )
1396
1396
1397 for multiplier, divisor, format in units:
1397 for multiplier, divisor, format in units:
1398 if nbytes >= divisor * multiplier:
1398 if nbytes >= divisor * multiplier:
1399 return format % (nbytes / float(divisor))
1399 return format % (nbytes / float(divisor))
1400 return units[-1][2] % nbytes
1400 return units[-1][2] % nbytes
1401
1401
1402 def uirepr(s):
1402 def uirepr(s):
1403 # Avoid double backslash in Windows path repr()
1403 # Avoid double backslash in Windows path repr()
1404 return repr(s).replace('\\\\', '\\')
1404 return repr(s).replace('\\\\', '\\')
1405
1405
1406 # delay import of textwrap
1406 # delay import of textwrap
1407 def MBTextWrapper(**kwargs):
1407 def MBTextWrapper(**kwargs):
1408 class tw(textwrap.TextWrapper):
1408 class tw(textwrap.TextWrapper):
1409 """
1409 """
1410 Extend TextWrapper for double-width characters.
1410 Extend TextWrapper for double-width characters.
1411
1411
1412 Some Asian characters use two terminal columns instead of one.
1412 Some Asian characters use two terminal columns instead of one.
1413 A good example of this behavior can be seen with u'\u65e5\u672c',
1413 A good example of this behavior can be seen with u'\u65e5\u672c',
1414 the two Japanese characters for "Japan":
1414 the two Japanese characters for "Japan":
1415 len() returns 2, but when printed to a terminal, they eat 4 columns.
1415 len() returns 2, but when printed to a terminal, they eat 4 columns.
1416
1416
1417 (Note that this has nothing to do whatsoever with unicode
1417 (Note that this has nothing to do whatsoever with unicode
1418 representation, or encoding of the underlying string)
1418 representation, or encoding of the underlying string)
1419 """
1419 """
1420 def __init__(self, **kwargs):
1420 def __init__(self, **kwargs):
1421 textwrap.TextWrapper.__init__(self, **kwargs)
1421 textwrap.TextWrapper.__init__(self, **kwargs)
1422
1422
1423 def _cutdown(self, str, space_left):
1423 def _cutdown(self, str, space_left):
1424 l = 0
1424 l = 0
1425 ucstr = unicode(str, encoding.encoding)
1425 ucstr = unicode(str, encoding.encoding)
1426 colwidth = unicodedata.east_asian_width
1426 colwidth = unicodedata.east_asian_width
1427 for i in xrange(len(ucstr)):
1427 for i in xrange(len(ucstr)):
1428 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1428 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1429 if space_left < l:
1429 if space_left < l:
1430 return (ucstr[:i].encode(encoding.encoding),
1430 return (ucstr[:i].encode(encoding.encoding),
1431 ucstr[i:].encode(encoding.encoding))
1431 ucstr[i:].encode(encoding.encoding))
1432 return str, ''
1432 return str, ''
1433
1433
1434 # overriding of base class
1434 # overriding of base class
1435 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1435 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1436 space_left = max(width - cur_len, 1)
1436 space_left = max(width - cur_len, 1)
1437
1437
1438 if self.break_long_words:
1438 if self.break_long_words:
1439 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1439 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1440 cur_line.append(cut)
1440 cur_line.append(cut)
1441 reversed_chunks[-1] = res
1441 reversed_chunks[-1] = res
1442 elif not cur_line:
1442 elif not cur_line:
1443 cur_line.append(reversed_chunks.pop())
1443 cur_line.append(reversed_chunks.pop())
1444
1444
1445 global MBTextWrapper
1445 global MBTextWrapper
1446 MBTextWrapper = tw
1446 MBTextWrapper = tw
1447 return tw(**kwargs)
1447 return tw(**kwargs)
1448
1448
1449 def wrap(line, width, initindent='', hangindent=''):
1449 def wrap(line, width, initindent='', hangindent=''):
1450 maxindent = max(len(hangindent), len(initindent))
1450 maxindent = max(len(hangindent), len(initindent))
1451 if width <= maxindent:
1451 if width <= maxindent:
1452 # adjust for weird terminal size
1452 # adjust for weird terminal size
1453 width = max(78, maxindent + 1)
1453 width = max(78, maxindent + 1)
1454 wrapper = MBTextWrapper(width=width,
1454 wrapper = MBTextWrapper(width=width,
1455 initial_indent=initindent,
1455 initial_indent=initindent,
1456 subsequent_indent=hangindent)
1456 subsequent_indent=hangindent)
1457 return wrapper.fill(line)
1457 return wrapper.fill(line)
1458
1458
1459 def iterlines(iterator):
1459 def iterlines(iterator):
1460 for chunk in iterator:
1460 for chunk in iterator:
1461 for line in chunk.splitlines():
1461 for line in chunk.splitlines():
1462 yield line
1462 yield line
1463
1463
1464 def expandpath(path):
1464 def expandpath(path):
1465 return os.path.expanduser(os.path.expandvars(path))
1465 return os.path.expanduser(os.path.expandvars(path))
1466
1466
1467 def hgcmd():
1467 def hgcmd():
1468 """Return the command used to execute current hg
1468 """Return the command used to execute current hg
1469
1469
1470 This is different from hgexecutable() because on Windows we want
1470 This is different from hgexecutable() because on Windows we want
1471 to avoid things opening new shell windows like batch files, so we
1471 to avoid things opening new shell windows like batch files, so we
1472 get either the python call or current executable.
1472 get either the python call or current executable.
1473 """
1473 """
1474 if main_is_frozen():
1474 if main_is_frozen():
1475 return [sys.executable]
1475 return [sys.executable]
1476 return gethgcmd()
1476 return gethgcmd()
1477
1477
1478 def rundetached(args, condfn):
1478 def rundetached(args, condfn):
1479 """Execute the argument list in a detached process.
1479 """Execute the argument list in a detached process.
1480
1480
1481 condfn is a callable which is called repeatedly and should return
1481 condfn is a callable which is called repeatedly and should return
1482 True once the child process is known to have started successfully.
1482 True once the child process is known to have started successfully.
1483 At this point, the child process PID is returned. If the child
1483 At this point, the child process PID is returned. If the child
1484 process fails to start or finishes before condfn() evaluates to
1484 process fails to start or finishes before condfn() evaluates to
1485 True, return -1.
1485 True, return -1.
1486 """
1486 """
1487 # Windows case is easier because the child process is either
1487 # Windows case is easier because the child process is either
1488 # successfully starting and validating the condition or exiting
1488 # successfully starting and validating the condition or exiting
1489 # on failure. We just poll on its PID. On Unix, if the child
1489 # on failure. We just poll on its PID. On Unix, if the child
1490 # process fails to start, it will be left in a zombie state until
1490 # process fails to start, it will be left in a zombie state until
1491 # the parent wait on it, which we cannot do since we expect a long
1491 # the parent wait on it, which we cannot do since we expect a long
1492 # running process on success. Instead we listen for SIGCHLD telling
1492 # running process on success. Instead we listen for SIGCHLD telling
1493 # us our child process terminated.
1493 # us our child process terminated.
1494 terminated = set()
1494 terminated = set()
1495 def handler(signum, frame):
1495 def handler(signum, frame):
1496 terminated.add(os.wait())
1496 terminated.add(os.wait())
1497 prevhandler = None
1497 prevhandler = None
1498 if hasattr(signal, 'SIGCHLD'):
1498 if hasattr(signal, 'SIGCHLD'):
1499 prevhandler = signal.signal(signal.SIGCHLD, handler)
1499 prevhandler = signal.signal(signal.SIGCHLD, handler)
1500 try:
1500 try:
1501 pid = spawndetached(args)
1501 pid = spawndetached(args)
1502 while not condfn():
1502 while not condfn():
1503 if ((pid in terminated or not testpid(pid))
1503 if ((pid in terminated or not testpid(pid))
1504 and not condfn()):
1504 and not condfn()):
1505 return -1
1505 return -1
1506 time.sleep(0.1)
1506 time.sleep(0.1)
1507 return pid
1507 return pid
1508 finally:
1508 finally:
1509 if prevhandler is not None:
1509 if prevhandler is not None:
1510 signal.signal(signal.SIGCHLD, prevhandler)
1510 signal.signal(signal.SIGCHLD, prevhandler)
1511
1511
1512 try:
1512 try:
1513 any, all = any, all
1513 any, all = any, all
1514 except NameError:
1514 except NameError:
1515 def any(iterable):
1515 def any(iterable):
1516 for i in iterable:
1516 for i in iterable:
1517 if i:
1517 if i:
1518 return True
1518 return True
1519 return False
1519 return False
1520
1520
1521 def all(iterable):
1521 def all(iterable):
1522 for i in iterable:
1522 for i in iterable:
1523 if not i:
1523 if not i:
1524 return False
1524 return False
1525 return True
1525 return True
1526
1526
1527 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1527 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1528 """Return the result of interpolating items in the mapping into string s.
1528 """Return the result of interpolating items in the mapping into string s.
1529
1529
1530 prefix is a single character string, or a two character string with
1530 prefix is a single character string, or a two character string with
1531 a backslash as the first character if the prefix needs to be escaped in
1531 a backslash as the first character if the prefix needs to be escaped in
1532 a regular expression.
1532 a regular expression.
1533
1533
1534 fn is an optional function that will be applied to the replacement text
1534 fn is an optional function that will be applied to the replacement text
1535 just before replacement.
1535 just before replacement.
1536
1536
1537 escape_prefix is an optional flag that allows using doubled prefix for
1537 escape_prefix is an optional flag that allows using doubled prefix for
1538 its escaping.
1538 its escaping.
1539 """
1539 """
1540 fn = fn or (lambda s: s)
1540 fn = fn or (lambda s: s)
1541 patterns = '|'.join(mapping.keys())
1541 patterns = '|'.join(mapping.keys())
1542 if escape_prefix:
1542 if escape_prefix:
1543 patterns += '|' + prefix
1543 patterns += '|' + prefix
1544 if len(prefix) > 1:
1544 if len(prefix) > 1:
1545 prefix_char = prefix[1:]
1545 prefix_char = prefix[1:]
1546 else:
1546 else:
1547 prefix_char = prefix
1547 prefix_char = prefix
1548 mapping[prefix_char] = prefix_char
1548 mapping[prefix_char] = prefix_char
1549 r = re.compile(r'%s(%s)' % (prefix, patterns))
1549 r = re.compile(r'%s(%s)' % (prefix, patterns))
1550 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1550 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1551
1551
1552 def getport(port):
1552 def getport(port):
1553 """Return the port for a given network service.
1553 """Return the port for a given network service.
1554
1554
1555 If port is an integer, it's returned as is. If it's a string, it's
1555 If port is an integer, it's returned as is. If it's a string, it's
1556 looked up using socket.getservbyname(). If there's no matching
1556 looked up using socket.getservbyname(). If there's no matching
1557 service, util.Abort is raised.
1557 service, util.Abort is raised.
1558 """
1558 """
1559 try:
1559 try:
1560 return int(port)
1560 return int(port)
1561 except ValueError:
1561 except ValueError:
1562 pass
1562 pass
1563
1563
1564 try:
1564 try:
1565 return socket.getservbyname(port)
1565 return socket.getservbyname(port)
1566 except socket.error:
1566 except socket.error:
1567 raise Abort(_("no port number associated with service '%s'") % port)
1567 raise Abort(_("no port number associated with service '%s'") % port)
1568
1568
1569 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1569 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1570 '0': False, 'no': False, 'false': False, 'off': False,
1570 '0': False, 'no': False, 'false': False, 'off': False,
1571 'never': False}
1571 'never': False}
1572
1572
1573 def parsebool(s):
1573 def parsebool(s):
1574 """Parse s into a boolean.
1574 """Parse s into a boolean.
1575
1575
1576 If s is not a valid boolean, returns None.
1576 If s is not a valid boolean, returns None.
1577 """
1577 """
1578 return _booleans.get(s.lower(), None)
1578 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now