##// END OF EJS Templates
opener: force copy on 'a'ppend if nlinks() returns 0 (issue1922)...
Adrian Buehlmann -
r13305:035684c6 stable
parent child Browse files
Show More
@@ -1,1538 +1,1540 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 return open(outname, 'rb').read()
201 return open(outname, 'rb').read()
202 finally:
202 finally:
203 try:
203 try:
204 if inname:
204 if inname:
205 os.unlink(inname)
205 os.unlink(inname)
206 except:
206 except:
207 pass
207 pass
208 try:
208 try:
209 if outname:
209 if outname:
210 os.unlink(outname)
210 os.unlink(outname)
211 except:
211 except:
212 pass
212 pass
213
213
214 filtertable = {
214 filtertable = {
215 'tempfile:': tempfilter,
215 'tempfile:': tempfilter,
216 'pipe:': pipefilter,
216 'pipe:': pipefilter,
217 }
217 }
218
218
219 def filter(s, cmd):
219 def filter(s, cmd):
220 "filter a string through a command that transforms its input to its output"
220 "filter a string through a command that transforms its input to its output"
221 for name, fn in filtertable.iteritems():
221 for name, fn in filtertable.iteritems():
222 if cmd.startswith(name):
222 if cmd.startswith(name):
223 return fn(s, cmd[len(name):].lstrip())
223 return fn(s, cmd[len(name):].lstrip())
224 return pipefilter(s, cmd)
224 return pipefilter(s, cmd)
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data"""
227 """return true if a string is binary data"""
228 return bool(s and '\0' in s)
228 return bool(s and '\0' in s)
229
229
230 def increasingchunks(source, min=1024, max=65536):
230 def increasingchunks(source, min=1024, max=65536):
231 '''return no less than min bytes per chunk while data remains,
231 '''return no less than min bytes per chunk while data remains,
232 doubling min after each chunk until it reaches max'''
232 doubling min after each chunk until it reaches max'''
233 def log2(x):
233 def log2(x):
234 if not x:
234 if not x:
235 return 0
235 return 0
236 i = 0
236 i = 0
237 while x:
237 while x:
238 x >>= 1
238 x >>= 1
239 i += 1
239 i += 1
240 return i - 1
240 return i - 1
241
241
242 buf = []
242 buf = []
243 blen = 0
243 blen = 0
244 for chunk in source:
244 for chunk in source:
245 buf.append(chunk)
245 buf.append(chunk)
246 blen += len(chunk)
246 blen += len(chunk)
247 if blen >= min:
247 if blen >= min:
248 if min < max:
248 if min < max:
249 min = min << 1
249 min = min << 1
250 nmin = 1 << log2(blen)
250 nmin = 1 << log2(blen)
251 if nmin > min:
251 if nmin > min:
252 min = nmin
252 min = nmin
253 if min > max:
253 if min > max:
254 min = max
254 min = max
255 yield ''.join(buf)
255 yield ''.join(buf)
256 blen = 0
256 blen = 0
257 buf = []
257 buf = []
258 if buf:
258 if buf:
259 yield ''.join(buf)
259 yield ''.join(buf)
260
260
261 Abort = error.Abort
261 Abort = error.Abort
262
262
263 def always(fn):
263 def always(fn):
264 return True
264 return True
265
265
266 def never(fn):
266 def never(fn):
267 return False
267 return False
268
268
269 def pathto(root, n1, n2):
269 def pathto(root, n1, n2):
270 '''return the relative path from one place to another.
270 '''return the relative path from one place to another.
271 root should use os.sep to separate directories
271 root should use os.sep to separate directories
272 n1 should use os.sep to separate directories
272 n1 should use os.sep to separate directories
273 n2 should use "/" to separate directories
273 n2 should use "/" to separate directories
274 returns an os.sep-separated path.
274 returns an os.sep-separated path.
275
275
276 If n1 is a relative path, it's assumed it's
276 If n1 is a relative path, it's assumed it's
277 relative to root.
277 relative to root.
278 n2 should always be relative to root.
278 n2 should always be relative to root.
279 '''
279 '''
280 if not n1:
280 if not n1:
281 return localpath(n2)
281 return localpath(n2)
282 if os.path.isabs(n1):
282 if os.path.isabs(n1):
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 return os.path.join(root, localpath(n2))
284 return os.path.join(root, localpath(n2))
285 n2 = '/'.join((pconvert(root), n2))
285 n2 = '/'.join((pconvert(root), n2))
286 a, b = splitpath(n1), n2.split('/')
286 a, b = splitpath(n1), n2.split('/')
287 a.reverse()
287 a.reverse()
288 b.reverse()
288 b.reverse()
289 while a and b and a[-1] == b[-1]:
289 while a and b and a[-1] == b[-1]:
290 a.pop()
290 a.pop()
291 b.pop()
291 b.pop()
292 b.reverse()
292 b.reverse()
293 return os.sep.join((['..'] * len(a)) + b) or '.'
293 return os.sep.join((['..'] * len(a)) + b) or '.'
294
294
295 def canonpath(root, cwd, myname, auditor=None):
295 def canonpath(root, cwd, myname, auditor=None):
296 """return the canonical path of myname, given cwd and root"""
296 """return the canonical path of myname, given cwd and root"""
297 if endswithsep(root):
297 if endswithsep(root):
298 rootsep = root
298 rootsep = root
299 else:
299 else:
300 rootsep = root + os.sep
300 rootsep = root + os.sep
301 name = myname
301 name = myname
302 if not os.path.isabs(name):
302 if not os.path.isabs(name):
303 name = os.path.join(root, cwd, name)
303 name = os.path.join(root, cwd, name)
304 name = os.path.normpath(name)
304 name = os.path.normpath(name)
305 if auditor is None:
305 if auditor is None:
306 auditor = path_auditor(root)
306 auditor = path_auditor(root)
307 if name != rootsep and name.startswith(rootsep):
307 if name != rootsep and name.startswith(rootsep):
308 name = name[len(rootsep):]
308 name = name[len(rootsep):]
309 auditor(name)
309 auditor(name)
310 return pconvert(name)
310 return pconvert(name)
311 elif name == root:
311 elif name == root:
312 return ''
312 return ''
313 else:
313 else:
314 # Determine whether `name' is in the hierarchy at or beneath `root',
314 # Determine whether `name' is in the hierarchy at or beneath `root',
315 # by iterating name=dirname(name) until that causes no change (can't
315 # by iterating name=dirname(name) until that causes no change (can't
316 # check name == '/', because that doesn't work on windows). For each
316 # check name == '/', because that doesn't work on windows). For each
317 # `name', compare dev/inode numbers. If they match, the list `rel'
317 # `name', compare dev/inode numbers. If they match, the list `rel'
318 # holds the reversed list of components making up the relative file
318 # holds the reversed list of components making up the relative file
319 # name we want.
319 # name we want.
320 root_st = os.stat(root)
320 root_st = os.stat(root)
321 rel = []
321 rel = []
322 while True:
322 while True:
323 try:
323 try:
324 name_st = os.stat(name)
324 name_st = os.stat(name)
325 except OSError:
325 except OSError:
326 break
326 break
327 if samestat(name_st, root_st):
327 if samestat(name_st, root_st):
328 if not rel:
328 if not rel:
329 # name was actually the same as root (maybe a symlink)
329 # name was actually the same as root (maybe a symlink)
330 return ''
330 return ''
331 rel.reverse()
331 rel.reverse()
332 name = os.path.join(*rel)
332 name = os.path.join(*rel)
333 auditor(name)
333 auditor(name)
334 return pconvert(name)
334 return pconvert(name)
335 dirname, basename = os.path.split(name)
335 dirname, basename = os.path.split(name)
336 rel.append(basename)
336 rel.append(basename)
337 if dirname == name:
337 if dirname == name:
338 break
338 break
339 name = dirname
339 name = dirname
340
340
341 raise Abort('%s not under root' % myname)
341 raise Abort('%s not under root' % myname)
342
342
343 _hgexecutable = None
343 _hgexecutable = None
344
344
345 def main_is_frozen():
345 def main_is_frozen():
346 """return True if we are a frozen executable.
346 """return True if we are a frozen executable.
347
347
348 The code supports py2exe (most common, Windows only) and tools/freeze
348 The code supports py2exe (most common, Windows only) and tools/freeze
349 (portable, not much used).
349 (portable, not much used).
350 """
350 """
351 return (hasattr(sys, "frozen") or # new py2exe
351 return (hasattr(sys, "frozen") or # new py2exe
352 hasattr(sys, "importers") or # old py2exe
352 hasattr(sys, "importers") or # old py2exe
353 imp.is_frozen("__main__")) # tools/freeze
353 imp.is_frozen("__main__")) # tools/freeze
354
354
355 def hgexecutable():
355 def hgexecutable():
356 """return location of the 'hg' executable.
356 """return location of the 'hg' executable.
357
357
358 Defaults to $HG or 'hg' in the search path.
358 Defaults to $HG or 'hg' in the search path.
359 """
359 """
360 if _hgexecutable is None:
360 if _hgexecutable is None:
361 hg = os.environ.get('HG')
361 hg = os.environ.get('HG')
362 if hg:
362 if hg:
363 set_hgexecutable(hg)
363 set_hgexecutable(hg)
364 elif main_is_frozen():
364 elif main_is_frozen():
365 set_hgexecutable(sys.executable)
365 set_hgexecutable(sys.executable)
366 else:
366 else:
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
368 set_hgexecutable(exe)
368 set_hgexecutable(exe)
369 return _hgexecutable
369 return _hgexecutable
370
370
371 def set_hgexecutable(path):
371 def set_hgexecutable(path):
372 """set location of the 'hg' executable"""
372 """set location of the 'hg' executable"""
373 global _hgexecutable
373 global _hgexecutable
374 _hgexecutable = path
374 _hgexecutable = path
375
375
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
377 '''enhanced shell command execution.
377 '''enhanced shell command execution.
378 run with environment maybe modified, maybe in different dir.
378 run with environment maybe modified, maybe in different dir.
379
379
380 if command fails and onerr is None, return status. if ui object,
380 if command fails and onerr is None, return status. if ui object,
381 print error message and return status, else raise onerr object as
381 print error message and return status, else raise onerr object as
382 exception.
382 exception.
383
383
384 if out is specified, it is assumed to be a file-like object that has a
384 if out is specified, it is assumed to be a file-like object that has a
385 write() method. stdout and stderr will be redirected to out.'''
385 write() method. stdout and stderr will be redirected to out.'''
386 def py2shell(val):
386 def py2shell(val):
387 'convert python object into string that is useful to shell'
387 'convert python object into string that is useful to shell'
388 if val is None or val is False:
388 if val is None or val is False:
389 return '0'
389 return '0'
390 if val is True:
390 if val is True:
391 return '1'
391 return '1'
392 return str(val)
392 return str(val)
393 origcmd = cmd
393 origcmd = cmd
394 cmd = quotecommand(cmd)
394 cmd = quotecommand(cmd)
395 env = dict(os.environ)
395 env = dict(os.environ)
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
397 env['HG'] = hgexecutable()
397 env['HG'] = hgexecutable()
398 if out is None:
398 if out is None:
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
400 env=env, cwd=cwd)
400 env=env, cwd=cwd)
401 else:
401 else:
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
404 stderr=subprocess.STDOUT)
404 stderr=subprocess.STDOUT)
405 for line in proc.stdout:
405 for line in proc.stdout:
406 out.write(line)
406 out.write(line)
407 proc.wait()
407 proc.wait()
408 rc = proc.returncode
408 rc = proc.returncode
409 if sys.platform == 'OpenVMS' and rc & 1:
409 if sys.platform == 'OpenVMS' and rc & 1:
410 rc = 0
410 rc = 0
411 if rc and onerr:
411 if rc and onerr:
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
413 explain_exit(rc)[0])
413 explain_exit(rc)[0])
414 if errprefix:
414 if errprefix:
415 errmsg = '%s: %s' % (errprefix, errmsg)
415 errmsg = '%s: %s' % (errprefix, errmsg)
416 try:
416 try:
417 onerr.warn(errmsg + '\n')
417 onerr.warn(errmsg + '\n')
418 except AttributeError:
418 except AttributeError:
419 raise onerr(errmsg)
419 raise onerr(errmsg)
420 return rc
420 return rc
421
421
422 def checksignature(func):
422 def checksignature(func):
423 '''wrap a function with code to check for calling errors'''
423 '''wrap a function with code to check for calling errors'''
424 def check(*args, **kwargs):
424 def check(*args, **kwargs):
425 try:
425 try:
426 return func(*args, **kwargs)
426 return func(*args, **kwargs)
427 except TypeError:
427 except TypeError:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
429 raise error.SignatureError
429 raise error.SignatureError
430 raise
430 raise
431
431
432 return check
432 return check
433
433
434 def unlink(f):
434 def unlink(f):
435 """unlink and remove the directory if it is empty"""
435 """unlink and remove the directory if it is empty"""
436 os.unlink(f)
436 os.unlink(f)
437 # try removing directories that might now be empty
437 # try removing directories that might now be empty
438 try:
438 try:
439 os.removedirs(os.path.dirname(f))
439 os.removedirs(os.path.dirname(f))
440 except OSError:
440 except OSError:
441 pass
441 pass
442
442
443 def copyfile(src, dest):
443 def copyfile(src, dest):
444 "copy a file, preserving mode and atime/mtime"
444 "copy a file, preserving mode and atime/mtime"
445 if os.path.islink(src):
445 if os.path.islink(src):
446 try:
446 try:
447 os.unlink(dest)
447 os.unlink(dest)
448 except:
448 except:
449 pass
449 pass
450 os.symlink(os.readlink(src), dest)
450 os.symlink(os.readlink(src), dest)
451 else:
451 else:
452 try:
452 try:
453 shutil.copyfile(src, dest)
453 shutil.copyfile(src, dest)
454 shutil.copystat(src, dest)
454 shutil.copystat(src, dest)
455 except shutil.Error, inst:
455 except shutil.Error, inst:
456 raise Abort(str(inst))
456 raise Abort(str(inst))
457
457
458 def copyfiles(src, dst, hardlink=None):
458 def copyfiles(src, dst, hardlink=None):
459 """Copy a directory tree using hardlinks if possible"""
459 """Copy a directory tree using hardlinks if possible"""
460
460
461 if hardlink is None:
461 if hardlink is None:
462 hardlink = (os.stat(src).st_dev ==
462 hardlink = (os.stat(src).st_dev ==
463 os.stat(os.path.dirname(dst)).st_dev)
463 os.stat(os.path.dirname(dst)).st_dev)
464
464
465 num = 0
465 num = 0
466 if os.path.isdir(src):
466 if os.path.isdir(src):
467 os.mkdir(dst)
467 os.mkdir(dst)
468 for name, kind in osutil.listdir(src):
468 for name, kind in osutil.listdir(src):
469 srcname = os.path.join(src, name)
469 srcname = os.path.join(src, name)
470 dstname = os.path.join(dst, name)
470 dstname = os.path.join(dst, name)
471 hardlink, n = copyfiles(srcname, dstname, hardlink)
471 hardlink, n = copyfiles(srcname, dstname, hardlink)
472 num += n
472 num += n
473 else:
473 else:
474 if hardlink:
474 if hardlink:
475 try:
475 try:
476 os_link(src, dst)
476 os_link(src, dst)
477 except (IOError, OSError):
477 except (IOError, OSError):
478 hardlink = False
478 hardlink = False
479 shutil.copy(src, dst)
479 shutil.copy(src, dst)
480 else:
480 else:
481 shutil.copy(src, dst)
481 shutil.copy(src, dst)
482 num += 1
482 num += 1
483
483
484 return hardlink, num
484 return hardlink, num
485
485
486 class path_auditor(object):
486 class path_auditor(object):
487 '''ensure that a filesystem path contains no banned components.
487 '''ensure that a filesystem path contains no banned components.
488 the following properties of a path are checked:
488 the following properties of a path are checked:
489
489
490 - under top-level .hg
490 - under top-level .hg
491 - starts at the root of a windows drive
491 - starts at the root of a windows drive
492 - contains ".."
492 - contains ".."
493 - traverses a symlink (e.g. a/symlink_here/b)
493 - traverses a symlink (e.g. a/symlink_here/b)
494 - inside a nested repository (a callback can be used to approve
494 - inside a nested repository (a callback can be used to approve
495 some nested repositories, e.g., subrepositories)
495 some nested repositories, e.g., subrepositories)
496 '''
496 '''
497
497
498 def __init__(self, root, callback=None):
498 def __init__(self, root, callback=None):
499 self.audited = set()
499 self.audited = set()
500 self.auditeddir = set()
500 self.auditeddir = set()
501 self.root = root
501 self.root = root
502 self.callback = callback
502 self.callback = callback
503
503
504 def __call__(self, path):
504 def __call__(self, path):
505 if path in self.audited:
505 if path in self.audited:
506 return
506 return
507 normpath = os.path.normcase(path)
507 normpath = os.path.normcase(path)
508 parts = splitpath(normpath)
508 parts = splitpath(normpath)
509 if (os.path.splitdrive(path)[0]
509 if (os.path.splitdrive(path)[0]
510 or parts[0].lower() in ('.hg', '.hg.', '')
510 or parts[0].lower() in ('.hg', '.hg.', '')
511 or os.pardir in parts):
511 or os.pardir in parts):
512 raise Abort(_("path contains illegal component: %s") % path)
512 raise Abort(_("path contains illegal component: %s") % path)
513 if '.hg' in path.lower():
513 if '.hg' in path.lower():
514 lparts = [p.lower() for p in parts]
514 lparts = [p.lower() for p in parts]
515 for p in '.hg', '.hg.':
515 for p in '.hg', '.hg.':
516 if p in lparts[1:]:
516 if p in lparts[1:]:
517 pos = lparts.index(p)
517 pos = lparts.index(p)
518 base = os.path.join(*parts[:pos])
518 base = os.path.join(*parts[:pos])
519 raise Abort(_('path %r is inside repo %r') % (path, base))
519 raise Abort(_('path %r is inside repo %r') % (path, base))
520 def check(prefix):
520 def check(prefix):
521 curpath = os.path.join(self.root, prefix)
521 curpath = os.path.join(self.root, prefix)
522 try:
522 try:
523 st = os.lstat(curpath)
523 st = os.lstat(curpath)
524 except OSError, err:
524 except OSError, err:
525 # EINVAL can be raised as invalid path syntax under win32.
525 # EINVAL can be raised as invalid path syntax under win32.
526 # They must be ignored for patterns can be checked too.
526 # They must be ignored for patterns can be checked too.
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
528 raise
528 raise
529 else:
529 else:
530 if stat.S_ISLNK(st.st_mode):
530 if stat.S_ISLNK(st.st_mode):
531 raise Abort(_('path %r traverses symbolic link %r') %
531 raise Abort(_('path %r traverses symbolic link %r') %
532 (path, prefix))
532 (path, prefix))
533 elif (stat.S_ISDIR(st.st_mode) and
533 elif (stat.S_ISDIR(st.st_mode) and
534 os.path.isdir(os.path.join(curpath, '.hg'))):
534 os.path.isdir(os.path.join(curpath, '.hg'))):
535 if not self.callback or not self.callback(curpath):
535 if not self.callback or not self.callback(curpath):
536 raise Abort(_('path %r is inside repo %r') %
536 raise Abort(_('path %r is inside repo %r') %
537 (path, prefix))
537 (path, prefix))
538 parts.pop()
538 parts.pop()
539 prefixes = []
539 prefixes = []
540 while parts:
540 while parts:
541 prefix = os.sep.join(parts)
541 prefix = os.sep.join(parts)
542 if prefix in self.auditeddir:
542 if prefix in self.auditeddir:
543 break
543 break
544 check(prefix)
544 check(prefix)
545 prefixes.append(prefix)
545 prefixes.append(prefix)
546 parts.pop()
546 parts.pop()
547
547
548 self.audited.add(path)
548 self.audited.add(path)
549 # only add prefixes to the cache after checking everything: we don't
549 # only add prefixes to the cache after checking everything: we don't
550 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
550 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
551 self.auditeddir.update(prefixes)
551 self.auditeddir.update(prefixes)
552
552
553 def nlinks(pathname):
553 def nlinks(pathname):
554 """Return number of hardlinks for the given file."""
554 """Return number of hardlinks for the given file."""
555 return os.lstat(pathname).st_nlink
555 return os.lstat(pathname).st_nlink
556
556
557 if hasattr(os, 'link'):
557 if hasattr(os, 'link'):
558 os_link = os.link
558 os_link = os.link
559 else:
559 else:
560 def os_link(src, dst):
560 def os_link(src, dst):
561 raise OSError(0, _("Hardlinks not supported"))
561 raise OSError(0, _("Hardlinks not supported"))
562
562
563 def lookup_reg(key, name=None, scope=None):
563 def lookup_reg(key, name=None, scope=None):
564 return None
564 return None
565
565
566 def hidewindow():
566 def hidewindow():
567 """Hide current shell window.
567 """Hide current shell window.
568
568
569 Used to hide the window opened when starting asynchronous
569 Used to hide the window opened when starting asynchronous
570 child process under Windows, unneeded on other systems.
570 child process under Windows, unneeded on other systems.
571 """
571 """
572 pass
572 pass
573
573
574 if os.name == 'nt':
574 if os.name == 'nt':
575 from windows import *
575 from windows import *
576 else:
576 else:
577 from posix import *
577 from posix import *
578
578
579 def makelock(info, pathname):
579 def makelock(info, pathname):
580 try:
580 try:
581 return os.symlink(info, pathname)
581 return os.symlink(info, pathname)
582 except OSError, why:
582 except OSError, why:
583 if why.errno == errno.EEXIST:
583 if why.errno == errno.EEXIST:
584 raise
584 raise
585 except AttributeError: # no symlink in os
585 except AttributeError: # no symlink in os
586 pass
586 pass
587
587
588 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
588 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
589 os.write(ld, info)
589 os.write(ld, info)
590 os.close(ld)
590 os.close(ld)
591
591
592 def readlock(pathname):
592 def readlock(pathname):
593 try:
593 try:
594 return os.readlink(pathname)
594 return os.readlink(pathname)
595 except OSError, why:
595 except OSError, why:
596 if why.errno not in (errno.EINVAL, errno.ENOSYS):
596 if why.errno not in (errno.EINVAL, errno.ENOSYS):
597 raise
597 raise
598 except AttributeError: # no symlink in os
598 except AttributeError: # no symlink in os
599 pass
599 pass
600 return posixfile(pathname).read()
600 return posixfile(pathname).read()
601
601
602 def fstat(fp):
602 def fstat(fp):
603 '''stat file object that may not have fileno method.'''
603 '''stat file object that may not have fileno method.'''
604 try:
604 try:
605 return os.fstat(fp.fileno())
605 return os.fstat(fp.fileno())
606 except AttributeError:
606 except AttributeError:
607 return os.stat(fp.name)
607 return os.stat(fp.name)
608
608
609 # File system features
609 # File system features
610
610
611 def checkcase(path):
611 def checkcase(path):
612 """
612 """
613 Check whether the given path is on a case-sensitive filesystem
613 Check whether the given path is on a case-sensitive filesystem
614
614
615 Requires a path (like /foo/.hg) ending with a foldable final
615 Requires a path (like /foo/.hg) ending with a foldable final
616 directory component.
616 directory component.
617 """
617 """
618 s1 = os.stat(path)
618 s1 = os.stat(path)
619 d, b = os.path.split(path)
619 d, b = os.path.split(path)
620 p2 = os.path.join(d, b.upper())
620 p2 = os.path.join(d, b.upper())
621 if path == p2:
621 if path == p2:
622 p2 = os.path.join(d, b.lower())
622 p2 = os.path.join(d, b.lower())
623 try:
623 try:
624 s2 = os.stat(p2)
624 s2 = os.stat(p2)
625 if s2 == s1:
625 if s2 == s1:
626 return False
626 return False
627 return True
627 return True
628 except:
628 except:
629 return True
629 return True
630
630
631 _fspathcache = {}
631 _fspathcache = {}
632 def fspath(name, root):
632 def fspath(name, root):
633 '''Get name in the case stored in the filesystem
633 '''Get name in the case stored in the filesystem
634
634
635 The name is either relative to root, or it is an absolute path starting
635 The name is either relative to root, or it is an absolute path starting
636 with root. Note that this function is unnecessary, and should not be
636 with root. Note that this function is unnecessary, and should not be
637 called, for case-sensitive filesystems (simply because it's expensive).
637 called, for case-sensitive filesystems (simply because it's expensive).
638 '''
638 '''
639 # If name is absolute, make it relative
639 # If name is absolute, make it relative
640 if name.lower().startswith(root.lower()):
640 if name.lower().startswith(root.lower()):
641 l = len(root)
641 l = len(root)
642 if name[l] == os.sep or name[l] == os.altsep:
642 if name[l] == os.sep or name[l] == os.altsep:
643 l = l + 1
643 l = l + 1
644 name = name[l:]
644 name = name[l:]
645
645
646 if not os.path.lexists(os.path.join(root, name)):
646 if not os.path.lexists(os.path.join(root, name)):
647 return None
647 return None
648
648
649 seps = os.sep
649 seps = os.sep
650 if os.altsep:
650 if os.altsep:
651 seps = seps + os.altsep
651 seps = seps + os.altsep
652 # Protect backslashes. This gets silly very quickly.
652 # Protect backslashes. This gets silly very quickly.
653 seps.replace('\\','\\\\')
653 seps.replace('\\','\\\\')
654 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
654 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
655 dir = os.path.normcase(os.path.normpath(root))
655 dir = os.path.normcase(os.path.normpath(root))
656 result = []
656 result = []
657 for part, sep in pattern.findall(name):
657 for part, sep in pattern.findall(name):
658 if sep:
658 if sep:
659 result.append(sep)
659 result.append(sep)
660 continue
660 continue
661
661
662 if dir not in _fspathcache:
662 if dir not in _fspathcache:
663 _fspathcache[dir] = os.listdir(dir)
663 _fspathcache[dir] = os.listdir(dir)
664 contents = _fspathcache[dir]
664 contents = _fspathcache[dir]
665
665
666 lpart = part.lower()
666 lpart = part.lower()
667 lenp = len(part)
667 lenp = len(part)
668 for n in contents:
668 for n in contents:
669 if lenp == len(n) and n.lower() == lpart:
669 if lenp == len(n) and n.lower() == lpart:
670 result.append(n)
670 result.append(n)
671 break
671 break
672 else:
672 else:
673 # Cannot happen, as the file exists!
673 # Cannot happen, as the file exists!
674 result.append(part)
674 result.append(part)
675 dir = os.path.join(dir, lpart)
675 dir = os.path.join(dir, lpart)
676
676
677 return ''.join(result)
677 return ''.join(result)
678
678
679 def checkexec(path):
679 def checkexec(path):
680 """
680 """
681 Check whether the given path is on a filesystem with UNIX-like exec flags
681 Check whether the given path is on a filesystem with UNIX-like exec flags
682
682
683 Requires a directory (like /foo/.hg)
683 Requires a directory (like /foo/.hg)
684 """
684 """
685
685
686 # VFAT on some Linux versions can flip mode but it doesn't persist
686 # VFAT on some Linux versions can flip mode but it doesn't persist
687 # a FS remount. Frequently we can detect it if files are created
687 # a FS remount. Frequently we can detect it if files are created
688 # with exec bit on.
688 # with exec bit on.
689
689
690 try:
690 try:
691 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
691 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
692 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
692 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
693 try:
693 try:
694 os.close(fh)
694 os.close(fh)
695 m = os.stat(fn).st_mode & 0777
695 m = os.stat(fn).st_mode & 0777
696 new_file_has_exec = m & EXECFLAGS
696 new_file_has_exec = m & EXECFLAGS
697 os.chmod(fn, m ^ EXECFLAGS)
697 os.chmod(fn, m ^ EXECFLAGS)
698 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
698 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
699 finally:
699 finally:
700 os.unlink(fn)
700 os.unlink(fn)
701 except (IOError, OSError):
701 except (IOError, OSError):
702 # we don't care, the user probably won't be able to commit anyway
702 # we don't care, the user probably won't be able to commit anyway
703 return False
703 return False
704 return not (new_file_has_exec or exec_flags_cannot_flip)
704 return not (new_file_has_exec or exec_flags_cannot_flip)
705
705
706 def checklink(path):
706 def checklink(path):
707 """check whether the given path is on a symlink-capable filesystem"""
707 """check whether the given path is on a symlink-capable filesystem"""
708 # mktemp is not racy because symlink creation will fail if the
708 # mktemp is not racy because symlink creation will fail if the
709 # file already exists
709 # file already exists
710 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
710 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
711 try:
711 try:
712 os.symlink(".", name)
712 os.symlink(".", name)
713 os.unlink(name)
713 os.unlink(name)
714 return True
714 return True
715 except (OSError, AttributeError):
715 except (OSError, AttributeError):
716 return False
716 return False
717
717
718 def checknlink(testfile):
718 def checknlink(testfile):
719 '''check whether hardlink count reporting works properly'''
719 '''check whether hardlink count reporting works properly'''
720
720
721 # testfile may be open, so we need a separate file for checking to
721 # testfile may be open, so we need a separate file for checking to
722 # work around issue2543 (or testfile may get lost on Samba shares)
722 # work around issue2543 (or testfile may get lost on Samba shares)
723 f1 = testfile + ".hgtmp1"
723 f1 = testfile + ".hgtmp1"
724 if os.path.lexists(f1):
724 if os.path.lexists(f1):
725 return False
725 return False
726 try:
726 try:
727 posixfile(f1, 'w').close()
727 posixfile(f1, 'w').close()
728 except IOError:
728 except IOError:
729 return False
729 return False
730
730
731 f2 = testfile + ".hgtmp2"
731 f2 = testfile + ".hgtmp2"
732 fd = None
732 fd = None
733 try:
733 try:
734 try:
734 try:
735 os_link(f1, f2)
735 os_link(f1, f2)
736 except OSError:
736 except OSError:
737 return False
737 return False
738
738
739 # nlinks() may behave differently for files on Windows shares if
739 # nlinks() may behave differently for files on Windows shares if
740 # the file is open.
740 # the file is open.
741 fd = open(f2)
741 fd = open(f2)
742 return nlinks(f2) > 1
742 return nlinks(f2) > 1
743 finally:
743 finally:
744 if fd is not None:
744 if fd is not None:
745 fd.close()
745 fd.close()
746 for f in (f1, f2):
746 for f in (f1, f2):
747 try:
747 try:
748 os.unlink(f)
748 os.unlink(f)
749 except OSError:
749 except OSError:
750 pass
750 pass
751
751
752 return False
752 return False
753
753
754 def endswithsep(path):
754 def endswithsep(path):
755 '''Check path ends with os.sep or os.altsep.'''
755 '''Check path ends with os.sep or os.altsep.'''
756 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
756 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
757
757
758 def splitpath(path):
758 def splitpath(path):
759 '''Split path by os.sep.
759 '''Split path by os.sep.
760 Note that this function does not use os.altsep because this is
760 Note that this function does not use os.altsep because this is
761 an alternative of simple "xxx.split(os.sep)".
761 an alternative of simple "xxx.split(os.sep)".
762 It is recommended to use os.path.normpath() before using this
762 It is recommended to use os.path.normpath() before using this
763 function if need.'''
763 function if need.'''
764 return path.split(os.sep)
764 return path.split(os.sep)
765
765
766 def gui():
766 def gui():
767 '''Are we running in a GUI?'''
767 '''Are we running in a GUI?'''
768 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
768 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
769
769
770 def mktempcopy(name, emptyok=False, createmode=None):
770 def mktempcopy(name, emptyok=False, createmode=None):
771 """Create a temporary file with the same contents from name
771 """Create a temporary file with the same contents from name
772
772
773 The permission bits are copied from the original file.
773 The permission bits are copied from the original file.
774
774
775 If the temporary file is going to be truncated immediately, you
775 If the temporary file is going to be truncated immediately, you
776 can use emptyok=True as an optimization.
776 can use emptyok=True as an optimization.
777
777
778 Returns the name of the temporary file.
778 Returns the name of the temporary file.
779 """
779 """
780 d, fn = os.path.split(name)
780 d, fn = os.path.split(name)
781 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
781 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
782 os.close(fd)
782 os.close(fd)
783 # Temporary files are created with mode 0600, which is usually not
783 # Temporary files are created with mode 0600, which is usually not
784 # what we want. If the original file already exists, just copy
784 # what we want. If the original file already exists, just copy
785 # its mode. Otherwise, manually obey umask.
785 # its mode. Otherwise, manually obey umask.
786 try:
786 try:
787 st_mode = os.lstat(name).st_mode & 0777
787 st_mode = os.lstat(name).st_mode & 0777
788 except OSError, inst:
788 except OSError, inst:
789 if inst.errno != errno.ENOENT:
789 if inst.errno != errno.ENOENT:
790 raise
790 raise
791 st_mode = createmode
791 st_mode = createmode
792 if st_mode is None:
792 if st_mode is None:
793 st_mode = ~umask
793 st_mode = ~umask
794 st_mode &= 0666
794 st_mode &= 0666
795 os.chmod(temp, st_mode)
795 os.chmod(temp, st_mode)
796 if emptyok:
796 if emptyok:
797 return temp
797 return temp
798 try:
798 try:
799 try:
799 try:
800 ifp = posixfile(name, "rb")
800 ifp = posixfile(name, "rb")
801 except IOError, inst:
801 except IOError, inst:
802 if inst.errno == errno.ENOENT:
802 if inst.errno == errno.ENOENT:
803 return temp
803 return temp
804 if not getattr(inst, 'filename', None):
804 if not getattr(inst, 'filename', None):
805 inst.filename = name
805 inst.filename = name
806 raise
806 raise
807 ofp = posixfile(temp, "wb")
807 ofp = posixfile(temp, "wb")
808 for chunk in filechunkiter(ifp):
808 for chunk in filechunkiter(ifp):
809 ofp.write(chunk)
809 ofp.write(chunk)
810 ifp.close()
810 ifp.close()
811 ofp.close()
811 ofp.close()
812 except:
812 except:
813 try: os.unlink(temp)
813 try: os.unlink(temp)
814 except: pass
814 except: pass
815 raise
815 raise
816 return temp
816 return temp
817
817
818 class atomictempfile(object):
818 class atomictempfile(object):
819 """file-like object that atomically updates a file
819 """file-like object that atomically updates a file
820
820
821 All writes will be redirected to a temporary copy of the original
821 All writes will be redirected to a temporary copy of the original
822 file. When rename is called, the copy is renamed to the original
822 file. When rename is called, the copy is renamed to the original
823 name, making the changes visible.
823 name, making the changes visible.
824 """
824 """
825 def __init__(self, name, mode='w+b', createmode=None):
825 def __init__(self, name, mode='w+b', createmode=None):
826 self.__name = name
826 self.__name = name
827 self._fp = None
827 self._fp = None
828 self.temp = mktempcopy(name, emptyok=('w' in mode),
828 self.temp = mktempcopy(name, emptyok=('w' in mode),
829 createmode=createmode)
829 createmode=createmode)
830 self._fp = posixfile(self.temp, mode)
830 self._fp = posixfile(self.temp, mode)
831
831
832 def __getattr__(self, name):
832 def __getattr__(self, name):
833 return getattr(self._fp, name)
833 return getattr(self._fp, name)
834
834
835 def rename(self):
835 def rename(self):
836 if not self._fp.closed:
836 if not self._fp.closed:
837 self._fp.close()
837 self._fp.close()
838 rename(self.temp, localpath(self.__name))
838 rename(self.temp, localpath(self.__name))
839
839
840 def __del__(self):
840 def __del__(self):
841 if not self._fp:
841 if not self._fp:
842 return
842 return
843 if not self._fp.closed:
843 if not self._fp.closed:
844 try:
844 try:
845 os.unlink(self.temp)
845 os.unlink(self.temp)
846 except: pass
846 except: pass
847 self._fp.close()
847 self._fp.close()
848
848
849 def makedirs(name, mode=None):
849 def makedirs(name, mode=None):
850 """recursive directory creation with parent mode inheritance"""
850 """recursive directory creation with parent mode inheritance"""
851 parent = os.path.abspath(os.path.dirname(name))
851 parent = os.path.abspath(os.path.dirname(name))
852 try:
852 try:
853 os.mkdir(name)
853 os.mkdir(name)
854 if mode is not None:
854 if mode is not None:
855 os.chmod(name, mode)
855 os.chmod(name, mode)
856 return
856 return
857 except OSError, err:
857 except OSError, err:
858 if err.errno == errno.EEXIST:
858 if err.errno == errno.EEXIST:
859 return
859 return
860 if not name or parent == name or err.errno != errno.ENOENT:
860 if not name or parent == name or err.errno != errno.ENOENT:
861 raise
861 raise
862 makedirs(parent, mode)
862 makedirs(parent, mode)
863 makedirs(name, mode)
863 makedirs(name, mode)
864
864
865 class opener(object):
865 class opener(object):
866 """Open files relative to a base directory
866 """Open files relative to a base directory
867
867
868 This class is used to hide the details of COW semantics and
868 This class is used to hide the details of COW semantics and
869 remote file access from higher level code.
869 remote file access from higher level code.
870 """
870 """
871 def __init__(self, base, audit=True):
871 def __init__(self, base, audit=True):
872 self.base = base
872 self.base = base
873 if audit:
873 if audit:
874 self.auditor = path_auditor(base)
874 self.auditor = path_auditor(base)
875 else:
875 else:
876 self.auditor = always
876 self.auditor = always
877 self.createmode = None
877 self.createmode = None
878 self._trustnlink = None
878 self._trustnlink = None
879
879
880 @propertycache
880 @propertycache
881 def _can_symlink(self):
881 def _can_symlink(self):
882 return checklink(self.base)
882 return checklink(self.base)
883
883
884 def _fixfilemode(self, name):
884 def _fixfilemode(self, name):
885 if self.createmode is None:
885 if self.createmode is None:
886 return
886 return
887 os.chmod(name, self.createmode & 0666)
887 os.chmod(name, self.createmode & 0666)
888
888
889 def __call__(self, path, mode="r", text=False, atomictemp=False):
889 def __call__(self, path, mode="r", text=False, atomictemp=False):
890 self.auditor(path)
890 self.auditor(path)
891 f = os.path.join(self.base, path)
891 f = os.path.join(self.base, path)
892
892
893 if not text and "b" not in mode:
893 if not text and "b" not in mode:
894 mode += "b" # for that other OS
894 mode += "b" # for that other OS
895
895
896 nlink = -1
896 nlink = -1
897 st_mode = None
897 st_mode = None
898 dirname, basename = os.path.split(f)
898 dirname, basename = os.path.split(f)
899 # If basename is empty, then the path is malformed because it points
899 # If basename is empty, then the path is malformed because it points
900 # to a directory. Let the posixfile() call below raise IOError.
900 # to a directory. Let the posixfile() call below raise IOError.
901 if basename and mode not in ('r', 'rb'):
901 if basename and mode not in ('r', 'rb'):
902 if atomictemp:
902 if atomictemp:
903 if not os.path.isdir(dirname):
903 if not os.path.isdir(dirname):
904 makedirs(dirname, self.createmode)
904 makedirs(dirname, self.createmode)
905 return atomictempfile(f, mode, self.createmode)
905 return atomictempfile(f, mode, self.createmode)
906 try:
906 try:
907 if 'w' in mode:
907 if 'w' in mode:
908 st_mode = os.lstat(f).st_mode & 0777
908 st_mode = os.lstat(f).st_mode & 0777
909 os.unlink(f)
909 os.unlink(f)
910 nlink = 0
910 nlink = 0
911 else:
911 else:
912 # nlinks() may behave differently for files on Windows
912 # nlinks() may behave differently for files on Windows
913 # shares if the file is open.
913 # shares if the file is open.
914 fd = open(f)
914 fd = open(f)
915 nlink = nlinks(f)
915 nlink = nlinks(f)
916 if nlink < 1:
917 nlink = 2 # force mktempcopy (issue1922)
916 fd.close()
918 fd.close()
917 except (OSError, IOError):
919 except (OSError, IOError):
918 nlink = 0
920 nlink = 0
919 if not os.path.isdir(dirname):
921 if not os.path.isdir(dirname):
920 makedirs(dirname, self.createmode)
922 makedirs(dirname, self.createmode)
921 if nlink > 0:
923 if nlink > 0:
922 if self._trustnlink is None:
924 if self._trustnlink is None:
923 self._trustnlink = nlink > 1 or checknlink(f)
925 self._trustnlink = nlink > 1 or checknlink(f)
924 if nlink > 1 or not self._trustnlink:
926 if nlink > 1 or not self._trustnlink:
925 rename(mktempcopy(f), f)
927 rename(mktempcopy(f), f)
926 fp = posixfile(f, mode)
928 fp = posixfile(f, mode)
927 if nlink == 0:
929 if nlink == 0:
928 if st_mode is None:
930 if st_mode is None:
929 self._fixfilemode(f)
931 self._fixfilemode(f)
930 else:
932 else:
931 os.chmod(f, st_mode)
933 os.chmod(f, st_mode)
932 return fp
934 return fp
933
935
934 def symlink(self, src, dst):
936 def symlink(self, src, dst):
935 self.auditor(dst)
937 self.auditor(dst)
936 linkname = os.path.join(self.base, dst)
938 linkname = os.path.join(self.base, dst)
937 try:
939 try:
938 os.unlink(linkname)
940 os.unlink(linkname)
939 except OSError:
941 except OSError:
940 pass
942 pass
941
943
942 dirname = os.path.dirname(linkname)
944 dirname = os.path.dirname(linkname)
943 if not os.path.exists(dirname):
945 if not os.path.exists(dirname):
944 makedirs(dirname, self.createmode)
946 makedirs(dirname, self.createmode)
945
947
946 if self._can_symlink:
948 if self._can_symlink:
947 try:
949 try:
948 os.symlink(src, linkname)
950 os.symlink(src, linkname)
949 except OSError, err:
951 except OSError, err:
950 raise OSError(err.errno, _('could not symlink to %r: %s') %
952 raise OSError(err.errno, _('could not symlink to %r: %s') %
951 (src, err.strerror), linkname)
953 (src, err.strerror), linkname)
952 else:
954 else:
953 f = self(dst, "w")
955 f = self(dst, "w")
954 f.write(src)
956 f.write(src)
955 f.close()
957 f.close()
956 self._fixfilemode(dst)
958 self._fixfilemode(dst)
957
959
958 class chunkbuffer(object):
960 class chunkbuffer(object):
959 """Allow arbitrary sized chunks of data to be efficiently read from an
961 """Allow arbitrary sized chunks of data to be efficiently read from an
960 iterator over chunks of arbitrary size."""
962 iterator over chunks of arbitrary size."""
961
963
962 def __init__(self, in_iter):
964 def __init__(self, in_iter):
963 """in_iter is the iterator that's iterating over the input chunks.
965 """in_iter is the iterator that's iterating over the input chunks.
964 targetsize is how big a buffer to try to maintain."""
966 targetsize is how big a buffer to try to maintain."""
965 def splitbig(chunks):
967 def splitbig(chunks):
966 for chunk in chunks:
968 for chunk in chunks:
967 if len(chunk) > 2**20:
969 if len(chunk) > 2**20:
968 pos = 0
970 pos = 0
969 while pos < len(chunk):
971 while pos < len(chunk):
970 end = pos + 2 ** 18
972 end = pos + 2 ** 18
971 yield chunk[pos:end]
973 yield chunk[pos:end]
972 pos = end
974 pos = end
973 else:
975 else:
974 yield chunk
976 yield chunk
975 self.iter = splitbig(in_iter)
977 self.iter = splitbig(in_iter)
976 self._queue = []
978 self._queue = []
977
979
978 def read(self, l):
980 def read(self, l):
979 """Read L bytes of data from the iterator of chunks of data.
981 """Read L bytes of data from the iterator of chunks of data.
980 Returns less than L bytes if the iterator runs dry."""
982 Returns less than L bytes if the iterator runs dry."""
981 left = l
983 left = l
982 buf = ''
984 buf = ''
983 queue = self._queue
985 queue = self._queue
984 while left > 0:
986 while left > 0:
985 # refill the queue
987 # refill the queue
986 if not queue:
988 if not queue:
987 target = 2**18
989 target = 2**18
988 for chunk in self.iter:
990 for chunk in self.iter:
989 queue.append(chunk)
991 queue.append(chunk)
990 target -= len(chunk)
992 target -= len(chunk)
991 if target <= 0:
993 if target <= 0:
992 break
994 break
993 if not queue:
995 if not queue:
994 break
996 break
995
997
996 chunk = queue.pop(0)
998 chunk = queue.pop(0)
997 left -= len(chunk)
999 left -= len(chunk)
998 if left < 0:
1000 if left < 0:
999 queue.insert(0, chunk[left:])
1001 queue.insert(0, chunk[left:])
1000 buf += chunk[:left]
1002 buf += chunk[:left]
1001 else:
1003 else:
1002 buf += chunk
1004 buf += chunk
1003
1005
1004 return buf
1006 return buf
1005
1007
1006 def filechunkiter(f, size=65536, limit=None):
1008 def filechunkiter(f, size=65536, limit=None):
1007 """Create a generator that produces the data in the file size
1009 """Create a generator that produces the data in the file size
1008 (default 65536) bytes at a time, up to optional limit (default is
1010 (default 65536) bytes at a time, up to optional limit (default is
1009 to read all data). Chunks may be less than size bytes if the
1011 to read all data). Chunks may be less than size bytes if the
1010 chunk is the last chunk in the file, or the file is a socket or
1012 chunk is the last chunk in the file, or the file is a socket or
1011 some other type of file that sometimes reads less data than is
1013 some other type of file that sometimes reads less data than is
1012 requested."""
1014 requested."""
1013 assert size >= 0
1015 assert size >= 0
1014 assert limit is None or limit >= 0
1016 assert limit is None or limit >= 0
1015 while True:
1017 while True:
1016 if limit is None:
1018 if limit is None:
1017 nbytes = size
1019 nbytes = size
1018 else:
1020 else:
1019 nbytes = min(limit, size)
1021 nbytes = min(limit, size)
1020 s = nbytes and f.read(nbytes)
1022 s = nbytes and f.read(nbytes)
1021 if not s:
1023 if not s:
1022 break
1024 break
1023 if limit:
1025 if limit:
1024 limit -= len(s)
1026 limit -= len(s)
1025 yield s
1027 yield s
1026
1028
1027 def makedate():
1029 def makedate():
1028 lt = time.localtime()
1030 lt = time.localtime()
1029 if lt[8] == 1 and time.daylight:
1031 if lt[8] == 1 and time.daylight:
1030 tz = time.altzone
1032 tz = time.altzone
1031 else:
1033 else:
1032 tz = time.timezone
1034 tz = time.timezone
1033 t = time.mktime(lt)
1035 t = time.mktime(lt)
1034 if t < 0:
1036 if t < 0:
1035 hint = _("check your clock")
1037 hint = _("check your clock")
1036 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1038 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1037 return t, tz
1039 return t, tz
1038
1040
1039 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1041 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1040 """represent a (unixtime, offset) tuple as a localized time.
1042 """represent a (unixtime, offset) tuple as a localized time.
1041 unixtime is seconds since the epoch, and offset is the time zone's
1043 unixtime is seconds since the epoch, and offset is the time zone's
1042 number of seconds away from UTC. if timezone is false, do not
1044 number of seconds away from UTC. if timezone is false, do not
1043 append time zone to string."""
1045 append time zone to string."""
1044 t, tz = date or makedate()
1046 t, tz = date or makedate()
1045 if t < 0:
1047 if t < 0:
1046 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1048 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1047 tz = 0
1049 tz = 0
1048 if "%1" in format or "%2" in format:
1050 if "%1" in format or "%2" in format:
1049 sign = (tz > 0) and "-" or "+"
1051 sign = (tz > 0) and "-" or "+"
1050 minutes = abs(tz) // 60
1052 minutes = abs(tz) // 60
1051 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1053 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1052 format = format.replace("%2", "%02d" % (minutes % 60))
1054 format = format.replace("%2", "%02d" % (minutes % 60))
1053 s = time.strftime(format, time.gmtime(float(t) - tz))
1055 s = time.strftime(format, time.gmtime(float(t) - tz))
1054 return s
1056 return s
1055
1057
1056 def shortdate(date=None):
1058 def shortdate(date=None):
1057 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1059 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1058 return datestr(date, format='%Y-%m-%d')
1060 return datestr(date, format='%Y-%m-%d')
1059
1061
1060 def strdate(string, format, defaults=[]):
1062 def strdate(string, format, defaults=[]):
1061 """parse a localized time string and return a (unixtime, offset) tuple.
1063 """parse a localized time string and return a (unixtime, offset) tuple.
1062 if the string cannot be parsed, ValueError is raised."""
1064 if the string cannot be parsed, ValueError is raised."""
1063 def timezone(string):
1065 def timezone(string):
1064 tz = string.split()[-1]
1066 tz = string.split()[-1]
1065 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1067 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1066 sign = (tz[0] == "+") and 1 or -1
1068 sign = (tz[0] == "+") and 1 or -1
1067 hours = int(tz[1:3])
1069 hours = int(tz[1:3])
1068 minutes = int(tz[3:5])
1070 minutes = int(tz[3:5])
1069 return -sign * (hours * 60 + minutes) * 60
1071 return -sign * (hours * 60 + minutes) * 60
1070 if tz == "GMT" or tz == "UTC":
1072 if tz == "GMT" or tz == "UTC":
1071 return 0
1073 return 0
1072 return None
1074 return None
1073
1075
1074 # NOTE: unixtime = localunixtime + offset
1076 # NOTE: unixtime = localunixtime + offset
1075 offset, date = timezone(string), string
1077 offset, date = timezone(string), string
1076 if offset != None:
1078 if offset != None:
1077 date = " ".join(string.split()[:-1])
1079 date = " ".join(string.split()[:-1])
1078
1080
1079 # add missing elements from defaults
1081 # add missing elements from defaults
1080 usenow = False # default to using biased defaults
1082 usenow = False # default to using biased defaults
1081 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1083 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1082 found = [True for p in part if ("%"+p) in format]
1084 found = [True for p in part if ("%"+p) in format]
1083 if not found:
1085 if not found:
1084 date += "@" + defaults[part][usenow]
1086 date += "@" + defaults[part][usenow]
1085 format += "@%" + part[0]
1087 format += "@%" + part[0]
1086 else:
1088 else:
1087 # We've found a specific time element, less specific time
1089 # We've found a specific time element, less specific time
1088 # elements are relative to today
1090 # elements are relative to today
1089 usenow = True
1091 usenow = True
1090
1092
1091 timetuple = time.strptime(date, format)
1093 timetuple = time.strptime(date, format)
1092 localunixtime = int(calendar.timegm(timetuple))
1094 localunixtime = int(calendar.timegm(timetuple))
1093 if offset is None:
1095 if offset is None:
1094 # local timezone
1096 # local timezone
1095 unixtime = int(time.mktime(timetuple))
1097 unixtime = int(time.mktime(timetuple))
1096 offset = unixtime - localunixtime
1098 offset = unixtime - localunixtime
1097 else:
1099 else:
1098 unixtime = localunixtime + offset
1100 unixtime = localunixtime + offset
1099 return unixtime, offset
1101 return unixtime, offset
1100
1102
1101 def parsedate(date, formats=None, bias={}):
1103 def parsedate(date, formats=None, bias={}):
1102 """parse a localized date/time and return a (unixtime, offset) tuple.
1104 """parse a localized date/time and return a (unixtime, offset) tuple.
1103
1105
1104 The date may be a "unixtime offset" string or in one of the specified
1106 The date may be a "unixtime offset" string or in one of the specified
1105 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1107 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1106 """
1108 """
1107 if not date:
1109 if not date:
1108 return 0, 0
1110 return 0, 0
1109 if isinstance(date, tuple) and len(date) == 2:
1111 if isinstance(date, tuple) and len(date) == 2:
1110 return date
1112 return date
1111 if not formats:
1113 if not formats:
1112 formats = defaultdateformats
1114 formats = defaultdateformats
1113 date = date.strip()
1115 date = date.strip()
1114 try:
1116 try:
1115 when, offset = map(int, date.split(' '))
1117 when, offset = map(int, date.split(' '))
1116 except ValueError:
1118 except ValueError:
1117 # fill out defaults
1119 # fill out defaults
1118 now = makedate()
1120 now = makedate()
1119 defaults = {}
1121 defaults = {}
1120 nowmap = {}
1122 nowmap = {}
1121 for part in "d mb yY HI M S".split():
1123 for part in "d mb yY HI M S".split():
1122 # this piece is for rounding the specific end of unknowns
1124 # this piece is for rounding the specific end of unknowns
1123 b = bias.get(part)
1125 b = bias.get(part)
1124 if b is None:
1126 if b is None:
1125 if part[0] in "HMS":
1127 if part[0] in "HMS":
1126 b = "00"
1128 b = "00"
1127 else:
1129 else:
1128 b = "0"
1130 b = "0"
1129
1131
1130 # this piece is for matching the generic end to today's date
1132 # this piece is for matching the generic end to today's date
1131 n = datestr(now, "%" + part[0])
1133 n = datestr(now, "%" + part[0])
1132
1134
1133 defaults[part] = (b, n)
1135 defaults[part] = (b, n)
1134
1136
1135 for format in formats:
1137 for format in formats:
1136 try:
1138 try:
1137 when, offset = strdate(date, format, defaults)
1139 when, offset = strdate(date, format, defaults)
1138 except (ValueError, OverflowError):
1140 except (ValueError, OverflowError):
1139 pass
1141 pass
1140 else:
1142 else:
1141 break
1143 break
1142 else:
1144 else:
1143 raise Abort(_('invalid date: %r') % date)
1145 raise Abort(_('invalid date: %r') % date)
1144 # validate explicit (probably user-specified) date and
1146 # validate explicit (probably user-specified) date and
1145 # time zone offset. values must fit in signed 32 bits for
1147 # time zone offset. values must fit in signed 32 bits for
1146 # current 32-bit linux runtimes. timezones go from UTC-12
1148 # current 32-bit linux runtimes. timezones go from UTC-12
1147 # to UTC+14
1149 # to UTC+14
1148 if abs(when) > 0x7fffffff:
1150 if abs(when) > 0x7fffffff:
1149 raise Abort(_('date exceeds 32 bits: %d') % when)
1151 raise Abort(_('date exceeds 32 bits: %d') % when)
1150 if when < 0:
1152 if when < 0:
1151 raise Abort(_('negative date value: %d') % when)
1153 raise Abort(_('negative date value: %d') % when)
1152 if offset < -50400 or offset > 43200:
1154 if offset < -50400 or offset > 43200:
1153 raise Abort(_('impossible time zone offset: %d') % offset)
1155 raise Abort(_('impossible time zone offset: %d') % offset)
1154 return when, offset
1156 return when, offset
1155
1157
1156 def matchdate(date):
1158 def matchdate(date):
1157 """Return a function that matches a given date match specifier
1159 """Return a function that matches a given date match specifier
1158
1160
1159 Formats include:
1161 Formats include:
1160
1162
1161 '{date}' match a given date to the accuracy provided
1163 '{date}' match a given date to the accuracy provided
1162
1164
1163 '<{date}' on or before a given date
1165 '<{date}' on or before a given date
1164
1166
1165 '>{date}' on or after a given date
1167 '>{date}' on or after a given date
1166
1168
1167 >>> p1 = parsedate("10:29:59")
1169 >>> p1 = parsedate("10:29:59")
1168 >>> p2 = parsedate("10:30:00")
1170 >>> p2 = parsedate("10:30:00")
1169 >>> p3 = parsedate("10:30:59")
1171 >>> p3 = parsedate("10:30:59")
1170 >>> p4 = parsedate("10:31:00")
1172 >>> p4 = parsedate("10:31:00")
1171 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1173 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1172 >>> f = matchdate("10:30")
1174 >>> f = matchdate("10:30")
1173 >>> f(p1[0])
1175 >>> f(p1[0])
1174 False
1176 False
1175 >>> f(p2[0])
1177 >>> f(p2[0])
1176 True
1178 True
1177 >>> f(p3[0])
1179 >>> f(p3[0])
1178 True
1180 True
1179 >>> f(p4[0])
1181 >>> f(p4[0])
1180 False
1182 False
1181 >>> f(p5[0])
1183 >>> f(p5[0])
1182 False
1184 False
1183 """
1185 """
1184
1186
1185 def lower(date):
1187 def lower(date):
1186 d = dict(mb="1", d="1")
1188 d = dict(mb="1", d="1")
1187 return parsedate(date, extendeddateformats, d)[0]
1189 return parsedate(date, extendeddateformats, d)[0]
1188
1190
1189 def upper(date):
1191 def upper(date):
1190 d = dict(mb="12", HI="23", M="59", S="59")
1192 d = dict(mb="12", HI="23", M="59", S="59")
1191 for days in "31 30 29".split():
1193 for days in "31 30 29".split():
1192 try:
1194 try:
1193 d["d"] = days
1195 d["d"] = days
1194 return parsedate(date, extendeddateformats, d)[0]
1196 return parsedate(date, extendeddateformats, d)[0]
1195 except:
1197 except:
1196 pass
1198 pass
1197 d["d"] = "28"
1199 d["d"] = "28"
1198 return parsedate(date, extendeddateformats, d)[0]
1200 return parsedate(date, extendeddateformats, d)[0]
1199
1201
1200 date = date.strip()
1202 date = date.strip()
1201 if date[0] == "<":
1203 if date[0] == "<":
1202 when = upper(date[1:])
1204 when = upper(date[1:])
1203 return lambda x: x <= when
1205 return lambda x: x <= when
1204 elif date[0] == ">":
1206 elif date[0] == ">":
1205 when = lower(date[1:])
1207 when = lower(date[1:])
1206 return lambda x: x >= when
1208 return lambda x: x >= when
1207 elif date[0] == "-":
1209 elif date[0] == "-":
1208 try:
1210 try:
1209 days = int(date[1:])
1211 days = int(date[1:])
1210 except ValueError:
1212 except ValueError:
1211 raise Abort(_("invalid day spec: %s") % date[1:])
1213 raise Abort(_("invalid day spec: %s") % date[1:])
1212 when = makedate()[0] - days * 3600 * 24
1214 when = makedate()[0] - days * 3600 * 24
1213 return lambda x: x >= when
1215 return lambda x: x >= when
1214 elif " to " in date:
1216 elif " to " in date:
1215 a, b = date.split(" to ")
1217 a, b = date.split(" to ")
1216 start, stop = lower(a), upper(b)
1218 start, stop = lower(a), upper(b)
1217 return lambda x: x >= start and x <= stop
1219 return lambda x: x >= start and x <= stop
1218 else:
1220 else:
1219 start, stop = lower(date), upper(date)
1221 start, stop = lower(date), upper(date)
1220 return lambda x: x >= start and x <= stop
1222 return lambda x: x >= start and x <= stop
1221
1223
1222 def shortuser(user):
1224 def shortuser(user):
1223 """Return a short representation of a user name or email address."""
1225 """Return a short representation of a user name or email address."""
1224 f = user.find('@')
1226 f = user.find('@')
1225 if f >= 0:
1227 if f >= 0:
1226 user = user[:f]
1228 user = user[:f]
1227 f = user.find('<')
1229 f = user.find('<')
1228 if f >= 0:
1230 if f >= 0:
1229 user = user[f + 1:]
1231 user = user[f + 1:]
1230 f = user.find(' ')
1232 f = user.find(' ')
1231 if f >= 0:
1233 if f >= 0:
1232 user = user[:f]
1234 user = user[:f]
1233 f = user.find('.')
1235 f = user.find('.')
1234 if f >= 0:
1236 if f >= 0:
1235 user = user[:f]
1237 user = user[:f]
1236 return user
1238 return user
1237
1239
1238 def email(author):
1240 def email(author):
1239 '''get email of author.'''
1241 '''get email of author.'''
1240 r = author.find('>')
1242 r = author.find('>')
1241 if r == -1:
1243 if r == -1:
1242 r = None
1244 r = None
1243 return author[author.find('<') + 1:r]
1245 return author[author.find('<') + 1:r]
1244
1246
1245 def _ellipsis(text, maxlength):
1247 def _ellipsis(text, maxlength):
1246 if len(text) <= maxlength:
1248 if len(text) <= maxlength:
1247 return text, False
1249 return text, False
1248 else:
1250 else:
1249 return "%s..." % (text[:maxlength - 3]), True
1251 return "%s..." % (text[:maxlength - 3]), True
1250
1252
1251 def ellipsis(text, maxlength=400):
1253 def ellipsis(text, maxlength=400):
1252 """Trim string to at most maxlength (default: 400) characters."""
1254 """Trim string to at most maxlength (default: 400) characters."""
1253 try:
1255 try:
1254 # use unicode not to split at intermediate multi-byte sequence
1256 # use unicode not to split at intermediate multi-byte sequence
1255 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1257 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1256 maxlength)
1258 maxlength)
1257 if not truncated:
1259 if not truncated:
1258 return text
1260 return text
1259 return utext.encode(encoding.encoding)
1261 return utext.encode(encoding.encoding)
1260 except (UnicodeDecodeError, UnicodeEncodeError):
1262 except (UnicodeDecodeError, UnicodeEncodeError):
1261 return _ellipsis(text, maxlength)[0]
1263 return _ellipsis(text, maxlength)[0]
1262
1264
1263 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1265 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1264 '''yield every hg repository under path, recursively.'''
1266 '''yield every hg repository under path, recursively.'''
1265 def errhandler(err):
1267 def errhandler(err):
1266 if err.filename == path:
1268 if err.filename == path:
1267 raise err
1269 raise err
1268 if followsym and hasattr(os.path, 'samestat'):
1270 if followsym and hasattr(os.path, 'samestat'):
1269 def _add_dir_if_not_there(dirlst, dirname):
1271 def _add_dir_if_not_there(dirlst, dirname):
1270 match = False
1272 match = False
1271 samestat = os.path.samestat
1273 samestat = os.path.samestat
1272 dirstat = os.stat(dirname)
1274 dirstat = os.stat(dirname)
1273 for lstdirstat in dirlst:
1275 for lstdirstat in dirlst:
1274 if samestat(dirstat, lstdirstat):
1276 if samestat(dirstat, lstdirstat):
1275 match = True
1277 match = True
1276 break
1278 break
1277 if not match:
1279 if not match:
1278 dirlst.append(dirstat)
1280 dirlst.append(dirstat)
1279 return not match
1281 return not match
1280 else:
1282 else:
1281 followsym = False
1283 followsym = False
1282
1284
1283 if (seen_dirs is None) and followsym:
1285 if (seen_dirs is None) and followsym:
1284 seen_dirs = []
1286 seen_dirs = []
1285 _add_dir_if_not_there(seen_dirs, path)
1287 _add_dir_if_not_there(seen_dirs, path)
1286 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1288 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1287 dirs.sort()
1289 dirs.sort()
1288 if '.hg' in dirs:
1290 if '.hg' in dirs:
1289 yield root # found a repository
1291 yield root # found a repository
1290 qroot = os.path.join(root, '.hg', 'patches')
1292 qroot = os.path.join(root, '.hg', 'patches')
1291 if os.path.isdir(os.path.join(qroot, '.hg')):
1293 if os.path.isdir(os.path.join(qroot, '.hg')):
1292 yield qroot # we have a patch queue repo here
1294 yield qroot # we have a patch queue repo here
1293 if recurse:
1295 if recurse:
1294 # avoid recursing inside the .hg directory
1296 # avoid recursing inside the .hg directory
1295 dirs.remove('.hg')
1297 dirs.remove('.hg')
1296 else:
1298 else:
1297 dirs[:] = [] # don't descend further
1299 dirs[:] = [] # don't descend further
1298 elif followsym:
1300 elif followsym:
1299 newdirs = []
1301 newdirs = []
1300 for d in dirs:
1302 for d in dirs:
1301 fname = os.path.join(root, d)
1303 fname = os.path.join(root, d)
1302 if _add_dir_if_not_there(seen_dirs, fname):
1304 if _add_dir_if_not_there(seen_dirs, fname):
1303 if os.path.islink(fname):
1305 if os.path.islink(fname):
1304 for hgname in walkrepos(fname, True, seen_dirs):
1306 for hgname in walkrepos(fname, True, seen_dirs):
1305 yield hgname
1307 yield hgname
1306 else:
1308 else:
1307 newdirs.append(d)
1309 newdirs.append(d)
1308 dirs[:] = newdirs
1310 dirs[:] = newdirs
1309
1311
1310 _rcpath = None
1312 _rcpath = None
1311
1313
1312 def os_rcpath():
1314 def os_rcpath():
1313 '''return default os-specific hgrc search path'''
1315 '''return default os-specific hgrc search path'''
1314 path = system_rcpath()
1316 path = system_rcpath()
1315 path.extend(user_rcpath())
1317 path.extend(user_rcpath())
1316 path = [os.path.normpath(f) for f in path]
1318 path = [os.path.normpath(f) for f in path]
1317 return path
1319 return path
1318
1320
1319 def rcpath():
1321 def rcpath():
1320 '''return hgrc search path. if env var HGRCPATH is set, use it.
1322 '''return hgrc search path. if env var HGRCPATH is set, use it.
1321 for each item in path, if directory, use files ending in .rc,
1323 for each item in path, if directory, use files ending in .rc,
1322 else use item.
1324 else use item.
1323 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1325 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1324 if no HGRCPATH, use default os-specific path.'''
1326 if no HGRCPATH, use default os-specific path.'''
1325 global _rcpath
1327 global _rcpath
1326 if _rcpath is None:
1328 if _rcpath is None:
1327 if 'HGRCPATH' in os.environ:
1329 if 'HGRCPATH' in os.environ:
1328 _rcpath = []
1330 _rcpath = []
1329 for p in os.environ['HGRCPATH'].split(os.pathsep):
1331 for p in os.environ['HGRCPATH'].split(os.pathsep):
1330 if not p:
1332 if not p:
1331 continue
1333 continue
1332 p = expandpath(p)
1334 p = expandpath(p)
1333 if os.path.isdir(p):
1335 if os.path.isdir(p):
1334 for f, kind in osutil.listdir(p):
1336 for f, kind in osutil.listdir(p):
1335 if f.endswith('.rc'):
1337 if f.endswith('.rc'):
1336 _rcpath.append(os.path.join(p, f))
1338 _rcpath.append(os.path.join(p, f))
1337 else:
1339 else:
1338 _rcpath.append(p)
1340 _rcpath.append(p)
1339 else:
1341 else:
1340 _rcpath = os_rcpath()
1342 _rcpath = os_rcpath()
1341 return _rcpath
1343 return _rcpath
1342
1344
1343 def bytecount(nbytes):
1345 def bytecount(nbytes):
1344 '''return byte count formatted as readable string, with units'''
1346 '''return byte count formatted as readable string, with units'''
1345
1347
1346 units = (
1348 units = (
1347 (100, 1 << 30, _('%.0f GB')),
1349 (100, 1 << 30, _('%.0f GB')),
1348 (10, 1 << 30, _('%.1f GB')),
1350 (10, 1 << 30, _('%.1f GB')),
1349 (1, 1 << 30, _('%.2f GB')),
1351 (1, 1 << 30, _('%.2f GB')),
1350 (100, 1 << 20, _('%.0f MB')),
1352 (100, 1 << 20, _('%.0f MB')),
1351 (10, 1 << 20, _('%.1f MB')),
1353 (10, 1 << 20, _('%.1f MB')),
1352 (1, 1 << 20, _('%.2f MB')),
1354 (1, 1 << 20, _('%.2f MB')),
1353 (100, 1 << 10, _('%.0f KB')),
1355 (100, 1 << 10, _('%.0f KB')),
1354 (10, 1 << 10, _('%.1f KB')),
1356 (10, 1 << 10, _('%.1f KB')),
1355 (1, 1 << 10, _('%.2f KB')),
1357 (1, 1 << 10, _('%.2f KB')),
1356 (1, 1, _('%.0f bytes')),
1358 (1, 1, _('%.0f bytes')),
1357 )
1359 )
1358
1360
1359 for multiplier, divisor, format in units:
1361 for multiplier, divisor, format in units:
1360 if nbytes >= divisor * multiplier:
1362 if nbytes >= divisor * multiplier:
1361 return format % (nbytes / float(divisor))
1363 return format % (nbytes / float(divisor))
1362 return units[-1][2] % nbytes
1364 return units[-1][2] % nbytes
1363
1365
1364 def drop_scheme(scheme, path):
1366 def drop_scheme(scheme, path):
1365 sc = scheme + ':'
1367 sc = scheme + ':'
1366 if path.startswith(sc):
1368 if path.startswith(sc):
1367 path = path[len(sc):]
1369 path = path[len(sc):]
1368 if path.startswith('//'):
1370 if path.startswith('//'):
1369 if scheme == 'file':
1371 if scheme == 'file':
1370 i = path.find('/', 2)
1372 i = path.find('/', 2)
1371 if i == -1:
1373 if i == -1:
1372 return ''
1374 return ''
1373 # On Windows, absolute paths are rooted at the current drive
1375 # On Windows, absolute paths are rooted at the current drive
1374 # root. On POSIX they are rooted at the file system root.
1376 # root. On POSIX they are rooted at the file system root.
1375 if os.name == 'nt':
1377 if os.name == 'nt':
1376 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1378 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1377 path = os.path.join(droot, path[i + 1:])
1379 path = os.path.join(droot, path[i + 1:])
1378 else:
1380 else:
1379 path = path[i:]
1381 path = path[i:]
1380 else:
1382 else:
1381 path = path[2:]
1383 path = path[2:]
1382 return path
1384 return path
1383
1385
1384 def uirepr(s):
1386 def uirepr(s):
1385 # Avoid double backslash in Windows path repr()
1387 # Avoid double backslash in Windows path repr()
1386 return repr(s).replace('\\\\', '\\')
1388 return repr(s).replace('\\\\', '\\')
1387
1389
1388 #### naming convention of below implementation follows 'textwrap' module
1390 #### naming convention of below implementation follows 'textwrap' module
1389
1391
1390 class MBTextWrapper(textwrap.TextWrapper):
1392 class MBTextWrapper(textwrap.TextWrapper):
1391 def __init__(self, **kwargs):
1393 def __init__(self, **kwargs):
1392 textwrap.TextWrapper.__init__(self, **kwargs)
1394 textwrap.TextWrapper.__init__(self, **kwargs)
1393
1395
1394 def _cutdown(self, str, space_left):
1396 def _cutdown(self, str, space_left):
1395 l = 0
1397 l = 0
1396 ucstr = unicode(str, encoding.encoding)
1398 ucstr = unicode(str, encoding.encoding)
1397 w = unicodedata.east_asian_width
1399 w = unicodedata.east_asian_width
1398 for i in xrange(len(ucstr)):
1400 for i in xrange(len(ucstr)):
1399 l += w(ucstr[i]) in 'WFA' and 2 or 1
1401 l += w(ucstr[i]) in 'WFA' and 2 or 1
1400 if space_left < l:
1402 if space_left < l:
1401 return (ucstr[:i].encode(encoding.encoding),
1403 return (ucstr[:i].encode(encoding.encoding),
1402 ucstr[i:].encode(encoding.encoding))
1404 ucstr[i:].encode(encoding.encoding))
1403 return str, ''
1405 return str, ''
1404
1406
1405 # ----------------------------------------
1407 # ----------------------------------------
1406 # overriding of base class
1408 # overriding of base class
1407
1409
1408 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1410 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1409 space_left = max(width - cur_len, 1)
1411 space_left = max(width - cur_len, 1)
1410
1412
1411 if self.break_long_words:
1413 if self.break_long_words:
1412 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1414 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1413 cur_line.append(cut)
1415 cur_line.append(cut)
1414 reversed_chunks[-1] = res
1416 reversed_chunks[-1] = res
1415 elif not cur_line:
1417 elif not cur_line:
1416 cur_line.append(reversed_chunks.pop())
1418 cur_line.append(reversed_chunks.pop())
1417
1419
1418 #### naming convention of above implementation follows 'textwrap' module
1420 #### naming convention of above implementation follows 'textwrap' module
1419
1421
1420 def wrap(line, width, initindent='', hangindent=''):
1422 def wrap(line, width, initindent='', hangindent=''):
1421 maxindent = max(len(hangindent), len(initindent))
1423 maxindent = max(len(hangindent), len(initindent))
1422 if width <= maxindent:
1424 if width <= maxindent:
1423 # adjust for weird terminal size
1425 # adjust for weird terminal size
1424 width = max(78, maxindent + 1)
1426 width = max(78, maxindent + 1)
1425 wrapper = MBTextWrapper(width=width,
1427 wrapper = MBTextWrapper(width=width,
1426 initial_indent=initindent,
1428 initial_indent=initindent,
1427 subsequent_indent=hangindent)
1429 subsequent_indent=hangindent)
1428 return wrapper.fill(line)
1430 return wrapper.fill(line)
1429
1431
1430 def iterlines(iterator):
1432 def iterlines(iterator):
1431 for chunk in iterator:
1433 for chunk in iterator:
1432 for line in chunk.splitlines():
1434 for line in chunk.splitlines():
1433 yield line
1435 yield line
1434
1436
1435 def expandpath(path):
1437 def expandpath(path):
1436 return os.path.expanduser(os.path.expandvars(path))
1438 return os.path.expanduser(os.path.expandvars(path))
1437
1439
1438 def hgcmd():
1440 def hgcmd():
1439 """Return the command used to execute current hg
1441 """Return the command used to execute current hg
1440
1442
1441 This is different from hgexecutable() because on Windows we want
1443 This is different from hgexecutable() because on Windows we want
1442 to avoid things opening new shell windows like batch files, so we
1444 to avoid things opening new shell windows like batch files, so we
1443 get either the python call or current executable.
1445 get either the python call or current executable.
1444 """
1446 """
1445 if main_is_frozen():
1447 if main_is_frozen():
1446 return [sys.executable]
1448 return [sys.executable]
1447 return gethgcmd()
1449 return gethgcmd()
1448
1450
1449 def rundetached(args, condfn):
1451 def rundetached(args, condfn):
1450 """Execute the argument list in a detached process.
1452 """Execute the argument list in a detached process.
1451
1453
1452 condfn is a callable which is called repeatedly and should return
1454 condfn is a callable which is called repeatedly and should return
1453 True once the child process is known to have started successfully.
1455 True once the child process is known to have started successfully.
1454 At this point, the child process PID is returned. If the child
1456 At this point, the child process PID is returned. If the child
1455 process fails to start or finishes before condfn() evaluates to
1457 process fails to start or finishes before condfn() evaluates to
1456 True, return -1.
1458 True, return -1.
1457 """
1459 """
1458 # Windows case is easier because the child process is either
1460 # Windows case is easier because the child process is either
1459 # successfully starting and validating the condition or exiting
1461 # successfully starting and validating the condition or exiting
1460 # on failure. We just poll on its PID. On Unix, if the child
1462 # on failure. We just poll on its PID. On Unix, if the child
1461 # process fails to start, it will be left in a zombie state until
1463 # process fails to start, it will be left in a zombie state until
1462 # the parent wait on it, which we cannot do since we expect a long
1464 # the parent wait on it, which we cannot do since we expect a long
1463 # running process on success. Instead we listen for SIGCHLD telling
1465 # running process on success. Instead we listen for SIGCHLD telling
1464 # us our child process terminated.
1466 # us our child process terminated.
1465 terminated = set()
1467 terminated = set()
1466 def handler(signum, frame):
1468 def handler(signum, frame):
1467 terminated.add(os.wait())
1469 terminated.add(os.wait())
1468 prevhandler = None
1470 prevhandler = None
1469 if hasattr(signal, 'SIGCHLD'):
1471 if hasattr(signal, 'SIGCHLD'):
1470 prevhandler = signal.signal(signal.SIGCHLD, handler)
1472 prevhandler = signal.signal(signal.SIGCHLD, handler)
1471 try:
1473 try:
1472 pid = spawndetached(args)
1474 pid = spawndetached(args)
1473 while not condfn():
1475 while not condfn():
1474 if ((pid in terminated or not testpid(pid))
1476 if ((pid in terminated or not testpid(pid))
1475 and not condfn()):
1477 and not condfn()):
1476 return -1
1478 return -1
1477 time.sleep(0.1)
1479 time.sleep(0.1)
1478 return pid
1480 return pid
1479 finally:
1481 finally:
1480 if prevhandler is not None:
1482 if prevhandler is not None:
1481 signal.signal(signal.SIGCHLD, prevhandler)
1483 signal.signal(signal.SIGCHLD, prevhandler)
1482
1484
1483 try:
1485 try:
1484 any, all = any, all
1486 any, all = any, all
1485 except NameError:
1487 except NameError:
1486 def any(iterable):
1488 def any(iterable):
1487 for i in iterable:
1489 for i in iterable:
1488 if i:
1490 if i:
1489 return True
1491 return True
1490 return False
1492 return False
1491
1493
1492 def all(iterable):
1494 def all(iterable):
1493 for i in iterable:
1495 for i in iterable:
1494 if not i:
1496 if not i:
1495 return False
1497 return False
1496 return True
1498 return True
1497
1499
1498 def interpolate(prefix, mapping, s, fn=None):
1500 def interpolate(prefix, mapping, s, fn=None):
1499 """Return the result of interpolating items in the mapping into string s.
1501 """Return the result of interpolating items in the mapping into string s.
1500
1502
1501 prefix is a single character string, or a two character string with
1503 prefix is a single character string, or a two character string with
1502 a backslash as the first character if the prefix needs to be escaped in
1504 a backslash as the first character if the prefix needs to be escaped in
1503 a regular expression.
1505 a regular expression.
1504
1506
1505 fn is an optional function that will be applied to the replacement text
1507 fn is an optional function that will be applied to the replacement text
1506 just before replacement.
1508 just before replacement.
1507 """
1509 """
1508 fn = fn or (lambda s: s)
1510 fn = fn or (lambda s: s)
1509 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1511 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1510 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1512 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1511
1513
1512 def getport(port):
1514 def getport(port):
1513 """Return the port for a given network service.
1515 """Return the port for a given network service.
1514
1516
1515 If port is an integer, it's returned as is. If it's a string, it's
1517 If port is an integer, it's returned as is. If it's a string, it's
1516 looked up using socket.getservbyname(). If there's no matching
1518 looked up using socket.getservbyname(). If there's no matching
1517 service, util.Abort is raised.
1519 service, util.Abort is raised.
1518 """
1520 """
1519 try:
1521 try:
1520 return int(port)
1522 return int(port)
1521 except ValueError:
1523 except ValueError:
1522 pass
1524 pass
1523
1525
1524 try:
1526 try:
1525 return socket.getservbyname(port)
1527 return socket.getservbyname(port)
1526 except socket.error:
1528 except socket.error:
1527 raise Abort(_("no port number associated with service '%s'") % port)
1529 raise Abort(_("no port number associated with service '%s'") % port)
1528
1530
1529 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1531 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1530 '0': False, 'no': False, 'false': False, 'off': False,
1532 '0': False, 'no': False, 'false': False, 'off': False,
1531 'never': False}
1533 'never': False}
1532
1534
1533 def parsebool(s):
1535 def parsebool(s):
1534 """Parse s into a boolean.
1536 """Parse s into a boolean.
1535
1537
1536 If s is not a valid boolean, returns None.
1538 If s is not a valid boolean, returns None.
1537 """
1539 """
1538 return _booleans.get(s.lower(), None)
1540 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now