##// END OF EJS Templates
date: fix matching of underspecified date ranges...
Matt Mackall -
r13212:5d0a30fa stable
parent child Browse files
Show More
@@ -1,1499 +1,1527 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 return open(outname, 'rb').read()
201 return open(outname, 'rb').read()
202 finally:
202 finally:
203 try:
203 try:
204 if inname:
204 if inname:
205 os.unlink(inname)
205 os.unlink(inname)
206 except:
206 except:
207 pass
207 pass
208 try:
208 try:
209 if outname:
209 if outname:
210 os.unlink(outname)
210 os.unlink(outname)
211 except:
211 except:
212 pass
212 pass
213
213
214 filtertable = {
214 filtertable = {
215 'tempfile:': tempfilter,
215 'tempfile:': tempfilter,
216 'pipe:': pipefilter,
216 'pipe:': pipefilter,
217 }
217 }
218
218
219 def filter(s, cmd):
219 def filter(s, cmd):
220 "filter a string through a command that transforms its input to its output"
220 "filter a string through a command that transforms its input to its output"
221 for name, fn in filtertable.iteritems():
221 for name, fn in filtertable.iteritems():
222 if cmd.startswith(name):
222 if cmd.startswith(name):
223 return fn(s, cmd[len(name):].lstrip())
223 return fn(s, cmd[len(name):].lstrip())
224 return pipefilter(s, cmd)
224 return pipefilter(s, cmd)
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data"""
227 """return true if a string is binary data"""
228 return bool(s and '\0' in s)
228 return bool(s and '\0' in s)
229
229
230 def increasingchunks(source, min=1024, max=65536):
230 def increasingchunks(source, min=1024, max=65536):
231 '''return no less than min bytes per chunk while data remains,
231 '''return no less than min bytes per chunk while data remains,
232 doubling min after each chunk until it reaches max'''
232 doubling min after each chunk until it reaches max'''
233 def log2(x):
233 def log2(x):
234 if not x:
234 if not x:
235 return 0
235 return 0
236 i = 0
236 i = 0
237 while x:
237 while x:
238 x >>= 1
238 x >>= 1
239 i += 1
239 i += 1
240 return i - 1
240 return i - 1
241
241
242 buf = []
242 buf = []
243 blen = 0
243 blen = 0
244 for chunk in source:
244 for chunk in source:
245 buf.append(chunk)
245 buf.append(chunk)
246 blen += len(chunk)
246 blen += len(chunk)
247 if blen >= min:
247 if blen >= min:
248 if min < max:
248 if min < max:
249 min = min << 1
249 min = min << 1
250 nmin = 1 << log2(blen)
250 nmin = 1 << log2(blen)
251 if nmin > min:
251 if nmin > min:
252 min = nmin
252 min = nmin
253 if min > max:
253 if min > max:
254 min = max
254 min = max
255 yield ''.join(buf)
255 yield ''.join(buf)
256 blen = 0
256 blen = 0
257 buf = []
257 buf = []
258 if buf:
258 if buf:
259 yield ''.join(buf)
259 yield ''.join(buf)
260
260
261 Abort = error.Abort
261 Abort = error.Abort
262
262
263 def always(fn):
263 def always(fn):
264 return True
264 return True
265
265
266 def never(fn):
266 def never(fn):
267 return False
267 return False
268
268
269 def pathto(root, n1, n2):
269 def pathto(root, n1, n2):
270 '''return the relative path from one place to another.
270 '''return the relative path from one place to another.
271 root should use os.sep to separate directories
271 root should use os.sep to separate directories
272 n1 should use os.sep to separate directories
272 n1 should use os.sep to separate directories
273 n2 should use "/" to separate directories
273 n2 should use "/" to separate directories
274 returns an os.sep-separated path.
274 returns an os.sep-separated path.
275
275
276 If n1 is a relative path, it's assumed it's
276 If n1 is a relative path, it's assumed it's
277 relative to root.
277 relative to root.
278 n2 should always be relative to root.
278 n2 should always be relative to root.
279 '''
279 '''
280 if not n1:
280 if not n1:
281 return localpath(n2)
281 return localpath(n2)
282 if os.path.isabs(n1):
282 if os.path.isabs(n1):
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 return os.path.join(root, localpath(n2))
284 return os.path.join(root, localpath(n2))
285 n2 = '/'.join((pconvert(root), n2))
285 n2 = '/'.join((pconvert(root), n2))
286 a, b = splitpath(n1), n2.split('/')
286 a, b = splitpath(n1), n2.split('/')
287 a.reverse()
287 a.reverse()
288 b.reverse()
288 b.reverse()
289 while a and b and a[-1] == b[-1]:
289 while a and b and a[-1] == b[-1]:
290 a.pop()
290 a.pop()
291 b.pop()
291 b.pop()
292 b.reverse()
292 b.reverse()
293 return os.sep.join((['..'] * len(a)) + b) or '.'
293 return os.sep.join((['..'] * len(a)) + b) or '.'
294
294
295 def canonpath(root, cwd, myname, auditor=None):
295 def canonpath(root, cwd, myname, auditor=None):
296 """return the canonical path of myname, given cwd and root"""
296 """return the canonical path of myname, given cwd and root"""
297 if endswithsep(root):
297 if endswithsep(root):
298 rootsep = root
298 rootsep = root
299 else:
299 else:
300 rootsep = root + os.sep
300 rootsep = root + os.sep
301 name = myname
301 name = myname
302 if not os.path.isabs(name):
302 if not os.path.isabs(name):
303 name = os.path.join(root, cwd, name)
303 name = os.path.join(root, cwd, name)
304 name = os.path.normpath(name)
304 name = os.path.normpath(name)
305 if auditor is None:
305 if auditor is None:
306 auditor = path_auditor(root)
306 auditor = path_auditor(root)
307 if name != rootsep and name.startswith(rootsep):
307 if name != rootsep and name.startswith(rootsep):
308 name = name[len(rootsep):]
308 name = name[len(rootsep):]
309 auditor(name)
309 auditor(name)
310 return pconvert(name)
310 return pconvert(name)
311 elif name == root:
311 elif name == root:
312 return ''
312 return ''
313 else:
313 else:
314 # Determine whether `name' is in the hierarchy at or beneath `root',
314 # Determine whether `name' is in the hierarchy at or beneath `root',
315 # by iterating name=dirname(name) until that causes no change (can't
315 # by iterating name=dirname(name) until that causes no change (can't
316 # check name == '/', because that doesn't work on windows). For each
316 # check name == '/', because that doesn't work on windows). For each
317 # `name', compare dev/inode numbers. If they match, the list `rel'
317 # `name', compare dev/inode numbers. If they match, the list `rel'
318 # holds the reversed list of components making up the relative file
318 # holds the reversed list of components making up the relative file
319 # name we want.
319 # name we want.
320 root_st = os.stat(root)
320 root_st = os.stat(root)
321 rel = []
321 rel = []
322 while True:
322 while True:
323 try:
323 try:
324 name_st = os.stat(name)
324 name_st = os.stat(name)
325 except OSError:
325 except OSError:
326 break
326 break
327 if samestat(name_st, root_st):
327 if samestat(name_st, root_st):
328 if not rel:
328 if not rel:
329 # name was actually the same as root (maybe a symlink)
329 # name was actually the same as root (maybe a symlink)
330 return ''
330 return ''
331 rel.reverse()
331 rel.reverse()
332 name = os.path.join(*rel)
332 name = os.path.join(*rel)
333 auditor(name)
333 auditor(name)
334 return pconvert(name)
334 return pconvert(name)
335 dirname, basename = os.path.split(name)
335 dirname, basename = os.path.split(name)
336 rel.append(basename)
336 rel.append(basename)
337 if dirname == name:
337 if dirname == name:
338 break
338 break
339 name = dirname
339 name = dirname
340
340
341 raise Abort('%s not under root' % myname)
341 raise Abort('%s not under root' % myname)
342
342
343 _hgexecutable = None
343 _hgexecutable = None
344
344
345 def main_is_frozen():
345 def main_is_frozen():
346 """return True if we are a frozen executable.
346 """return True if we are a frozen executable.
347
347
348 The code supports py2exe (most common, Windows only) and tools/freeze
348 The code supports py2exe (most common, Windows only) and tools/freeze
349 (portable, not much used).
349 (portable, not much used).
350 """
350 """
351 return (hasattr(sys, "frozen") or # new py2exe
351 return (hasattr(sys, "frozen") or # new py2exe
352 hasattr(sys, "importers") or # old py2exe
352 hasattr(sys, "importers") or # old py2exe
353 imp.is_frozen("__main__")) # tools/freeze
353 imp.is_frozen("__main__")) # tools/freeze
354
354
355 def hgexecutable():
355 def hgexecutable():
356 """return location of the 'hg' executable.
356 """return location of the 'hg' executable.
357
357
358 Defaults to $HG or 'hg' in the search path.
358 Defaults to $HG or 'hg' in the search path.
359 """
359 """
360 if _hgexecutable is None:
360 if _hgexecutable is None:
361 hg = os.environ.get('HG')
361 hg = os.environ.get('HG')
362 if hg:
362 if hg:
363 set_hgexecutable(hg)
363 set_hgexecutable(hg)
364 elif main_is_frozen():
364 elif main_is_frozen():
365 set_hgexecutable(sys.executable)
365 set_hgexecutable(sys.executable)
366 else:
366 else:
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
368 set_hgexecutable(exe)
368 set_hgexecutable(exe)
369 return _hgexecutable
369 return _hgexecutable
370
370
371 def set_hgexecutable(path):
371 def set_hgexecutable(path):
372 """set location of the 'hg' executable"""
372 """set location of the 'hg' executable"""
373 global _hgexecutable
373 global _hgexecutable
374 _hgexecutable = path
374 _hgexecutable = path
375
375
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
377 '''enhanced shell command execution.
377 '''enhanced shell command execution.
378 run with environment maybe modified, maybe in different dir.
378 run with environment maybe modified, maybe in different dir.
379
379
380 if command fails and onerr is None, return status. if ui object,
380 if command fails and onerr is None, return status. if ui object,
381 print error message and return status, else raise onerr object as
381 print error message and return status, else raise onerr object as
382 exception.
382 exception.
383
383
384 if out is specified, it is assumed to be a file-like object that has a
384 if out is specified, it is assumed to be a file-like object that has a
385 write() method. stdout and stderr will be redirected to out.'''
385 write() method. stdout and stderr will be redirected to out.'''
386 def py2shell(val):
386 def py2shell(val):
387 'convert python object into string that is useful to shell'
387 'convert python object into string that is useful to shell'
388 if val is None or val is False:
388 if val is None or val is False:
389 return '0'
389 return '0'
390 if val is True:
390 if val is True:
391 return '1'
391 return '1'
392 return str(val)
392 return str(val)
393 origcmd = cmd
393 origcmd = cmd
394 cmd = quotecommand(cmd)
394 cmd = quotecommand(cmd)
395 env = dict(os.environ)
395 env = dict(os.environ)
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
397 env['HG'] = hgexecutable()
397 env['HG'] = hgexecutable()
398 if out is None:
398 if out is None:
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
400 env=env, cwd=cwd)
400 env=env, cwd=cwd)
401 else:
401 else:
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
404 stderr=subprocess.STDOUT)
404 stderr=subprocess.STDOUT)
405 for line in proc.stdout:
405 for line in proc.stdout:
406 out.write(line)
406 out.write(line)
407 proc.wait()
407 proc.wait()
408 rc = proc.returncode
408 rc = proc.returncode
409 if sys.platform == 'OpenVMS' and rc & 1:
409 if sys.platform == 'OpenVMS' and rc & 1:
410 rc = 0
410 rc = 0
411 if rc and onerr:
411 if rc and onerr:
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
413 explain_exit(rc)[0])
413 explain_exit(rc)[0])
414 if errprefix:
414 if errprefix:
415 errmsg = '%s: %s' % (errprefix, errmsg)
415 errmsg = '%s: %s' % (errprefix, errmsg)
416 try:
416 try:
417 onerr.warn(errmsg + '\n')
417 onerr.warn(errmsg + '\n')
418 except AttributeError:
418 except AttributeError:
419 raise onerr(errmsg)
419 raise onerr(errmsg)
420 return rc
420 return rc
421
421
422 def checksignature(func):
422 def checksignature(func):
423 '''wrap a function with code to check for calling errors'''
423 '''wrap a function with code to check for calling errors'''
424 def check(*args, **kwargs):
424 def check(*args, **kwargs):
425 try:
425 try:
426 return func(*args, **kwargs)
426 return func(*args, **kwargs)
427 except TypeError:
427 except TypeError:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
429 raise error.SignatureError
429 raise error.SignatureError
430 raise
430 raise
431
431
432 return check
432 return check
433
433
434 def unlink(f):
434 def unlink(f):
435 """unlink and remove the directory if it is empty"""
435 """unlink and remove the directory if it is empty"""
436 os.unlink(f)
436 os.unlink(f)
437 # try removing directories that might now be empty
437 # try removing directories that might now be empty
438 try:
438 try:
439 os.removedirs(os.path.dirname(f))
439 os.removedirs(os.path.dirname(f))
440 except OSError:
440 except OSError:
441 pass
441 pass
442
442
443 def copyfile(src, dest):
443 def copyfile(src, dest):
444 "copy a file, preserving mode and atime/mtime"
444 "copy a file, preserving mode and atime/mtime"
445 if os.path.islink(src):
445 if os.path.islink(src):
446 try:
446 try:
447 os.unlink(dest)
447 os.unlink(dest)
448 except:
448 except:
449 pass
449 pass
450 os.symlink(os.readlink(src), dest)
450 os.symlink(os.readlink(src), dest)
451 else:
451 else:
452 try:
452 try:
453 shutil.copyfile(src, dest)
453 shutil.copyfile(src, dest)
454 shutil.copystat(src, dest)
454 shutil.copystat(src, dest)
455 except shutil.Error, inst:
455 except shutil.Error, inst:
456 raise Abort(str(inst))
456 raise Abort(str(inst))
457
457
458 def copyfiles(src, dst, hardlink=None):
458 def copyfiles(src, dst, hardlink=None):
459 """Copy a directory tree using hardlinks if possible"""
459 """Copy a directory tree using hardlinks if possible"""
460
460
461 if hardlink is None:
461 if hardlink is None:
462 hardlink = (os.stat(src).st_dev ==
462 hardlink = (os.stat(src).st_dev ==
463 os.stat(os.path.dirname(dst)).st_dev)
463 os.stat(os.path.dirname(dst)).st_dev)
464
464
465 num = 0
465 num = 0
466 if os.path.isdir(src):
466 if os.path.isdir(src):
467 os.mkdir(dst)
467 os.mkdir(dst)
468 for name, kind in osutil.listdir(src):
468 for name, kind in osutil.listdir(src):
469 srcname = os.path.join(src, name)
469 srcname = os.path.join(src, name)
470 dstname = os.path.join(dst, name)
470 dstname = os.path.join(dst, name)
471 hardlink, n = copyfiles(srcname, dstname, hardlink)
471 hardlink, n = copyfiles(srcname, dstname, hardlink)
472 num += n
472 num += n
473 else:
473 else:
474 if hardlink:
474 if hardlink:
475 try:
475 try:
476 os_link(src, dst)
476 os_link(src, dst)
477 except (IOError, OSError):
477 except (IOError, OSError):
478 hardlink = False
478 hardlink = False
479 shutil.copy(src, dst)
479 shutil.copy(src, dst)
480 else:
480 else:
481 shutil.copy(src, dst)
481 shutil.copy(src, dst)
482 num += 1
482 num += 1
483
483
484 return hardlink, num
484 return hardlink, num
485
485
486 class path_auditor(object):
486 class path_auditor(object):
487 '''ensure that a filesystem path contains no banned components.
487 '''ensure that a filesystem path contains no banned components.
488 the following properties of a path are checked:
488 the following properties of a path are checked:
489
489
490 - under top-level .hg
490 - under top-level .hg
491 - starts at the root of a windows drive
491 - starts at the root of a windows drive
492 - contains ".."
492 - contains ".."
493 - traverses a symlink (e.g. a/symlink_here/b)
493 - traverses a symlink (e.g. a/symlink_here/b)
494 - inside a nested repository (a callback can be used to approve
494 - inside a nested repository (a callback can be used to approve
495 some nested repositories, e.g., subrepositories)
495 some nested repositories, e.g., subrepositories)
496 '''
496 '''
497
497
498 def __init__(self, root, callback=None):
498 def __init__(self, root, callback=None):
499 self.audited = set()
499 self.audited = set()
500 self.auditeddir = set()
500 self.auditeddir = set()
501 self.root = root
501 self.root = root
502 self.callback = callback
502 self.callback = callback
503
503
504 def __call__(self, path):
504 def __call__(self, path):
505 if path in self.audited:
505 if path in self.audited:
506 return
506 return
507 normpath = os.path.normcase(path)
507 normpath = os.path.normcase(path)
508 parts = splitpath(normpath)
508 parts = splitpath(normpath)
509 if (os.path.splitdrive(path)[0]
509 if (os.path.splitdrive(path)[0]
510 or parts[0].lower() in ('.hg', '.hg.', '')
510 or parts[0].lower() in ('.hg', '.hg.', '')
511 or os.pardir in parts):
511 or os.pardir in parts):
512 raise Abort(_("path contains illegal component: %s") % path)
512 raise Abort(_("path contains illegal component: %s") % path)
513 if '.hg' in path.lower():
513 if '.hg' in path.lower():
514 lparts = [p.lower() for p in parts]
514 lparts = [p.lower() for p in parts]
515 for p in '.hg', '.hg.':
515 for p in '.hg', '.hg.':
516 if p in lparts[1:]:
516 if p in lparts[1:]:
517 pos = lparts.index(p)
517 pos = lparts.index(p)
518 base = os.path.join(*parts[:pos])
518 base = os.path.join(*parts[:pos])
519 raise Abort(_('path %r is inside repo %r') % (path, base))
519 raise Abort(_('path %r is inside repo %r') % (path, base))
520 def check(prefix):
520 def check(prefix):
521 curpath = os.path.join(self.root, prefix)
521 curpath = os.path.join(self.root, prefix)
522 try:
522 try:
523 st = os.lstat(curpath)
523 st = os.lstat(curpath)
524 except OSError, err:
524 except OSError, err:
525 # EINVAL can be raised as invalid path syntax under win32.
525 # EINVAL can be raised as invalid path syntax under win32.
526 # They must be ignored for patterns can be checked too.
526 # They must be ignored for patterns can be checked too.
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
527 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
528 raise
528 raise
529 else:
529 else:
530 if stat.S_ISLNK(st.st_mode):
530 if stat.S_ISLNK(st.st_mode):
531 raise Abort(_('path %r traverses symbolic link %r') %
531 raise Abort(_('path %r traverses symbolic link %r') %
532 (path, prefix))
532 (path, prefix))
533 elif (stat.S_ISDIR(st.st_mode) and
533 elif (stat.S_ISDIR(st.st_mode) and
534 os.path.isdir(os.path.join(curpath, '.hg'))):
534 os.path.isdir(os.path.join(curpath, '.hg'))):
535 if not self.callback or not self.callback(curpath):
535 if not self.callback or not self.callback(curpath):
536 raise Abort(_('path %r is inside repo %r') %
536 raise Abort(_('path %r is inside repo %r') %
537 (path, prefix))
537 (path, prefix))
538 parts.pop()
538 parts.pop()
539 prefixes = []
539 prefixes = []
540 while parts:
540 while parts:
541 prefix = os.sep.join(parts)
541 prefix = os.sep.join(parts)
542 if prefix in self.auditeddir:
542 if prefix in self.auditeddir:
543 break
543 break
544 check(prefix)
544 check(prefix)
545 prefixes.append(prefix)
545 prefixes.append(prefix)
546 parts.pop()
546 parts.pop()
547
547
548 self.audited.add(path)
548 self.audited.add(path)
549 # only add prefixes to the cache after checking everything: we don't
549 # only add prefixes to the cache after checking everything: we don't
550 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
550 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
551 self.auditeddir.update(prefixes)
551 self.auditeddir.update(prefixes)
552
552
553 def nlinks(pathname):
553 def nlinks(pathname):
554 """Return number of hardlinks for the given file."""
554 """Return number of hardlinks for the given file."""
555 return os.lstat(pathname).st_nlink
555 return os.lstat(pathname).st_nlink
556
556
557 if hasattr(os, 'link'):
557 if hasattr(os, 'link'):
558 os_link = os.link
558 os_link = os.link
559 else:
559 else:
560 def os_link(src, dst):
560 def os_link(src, dst):
561 raise OSError(0, _("Hardlinks not supported"))
561 raise OSError(0, _("Hardlinks not supported"))
562
562
563 def lookup_reg(key, name=None, scope=None):
563 def lookup_reg(key, name=None, scope=None):
564 return None
564 return None
565
565
566 def hidewindow():
566 def hidewindow():
567 """Hide current shell window.
567 """Hide current shell window.
568
568
569 Used to hide the window opened when starting asynchronous
569 Used to hide the window opened when starting asynchronous
570 child process under Windows, unneeded on other systems.
570 child process under Windows, unneeded on other systems.
571 """
571 """
572 pass
572 pass
573
573
574 if os.name == 'nt':
574 if os.name == 'nt':
575 from windows import *
575 from windows import *
576 else:
576 else:
577 from posix import *
577 from posix import *
578
578
579 def makelock(info, pathname):
579 def makelock(info, pathname):
580 try:
580 try:
581 return os.symlink(info, pathname)
581 return os.symlink(info, pathname)
582 except OSError, why:
582 except OSError, why:
583 if why.errno == errno.EEXIST:
583 if why.errno == errno.EEXIST:
584 raise
584 raise
585 except AttributeError: # no symlink in os
585 except AttributeError: # no symlink in os
586 pass
586 pass
587
587
588 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
588 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
589 os.write(ld, info)
589 os.write(ld, info)
590 os.close(ld)
590 os.close(ld)
591
591
592 def readlock(pathname):
592 def readlock(pathname):
593 try:
593 try:
594 return os.readlink(pathname)
594 return os.readlink(pathname)
595 except OSError, why:
595 except OSError, why:
596 if why.errno not in (errno.EINVAL, errno.ENOSYS):
596 if why.errno not in (errno.EINVAL, errno.ENOSYS):
597 raise
597 raise
598 except AttributeError: # no symlink in os
598 except AttributeError: # no symlink in os
599 pass
599 pass
600 return posixfile(pathname).read()
600 return posixfile(pathname).read()
601
601
602 def fstat(fp):
602 def fstat(fp):
603 '''stat file object that may not have fileno method.'''
603 '''stat file object that may not have fileno method.'''
604 try:
604 try:
605 return os.fstat(fp.fileno())
605 return os.fstat(fp.fileno())
606 except AttributeError:
606 except AttributeError:
607 return os.stat(fp.name)
607 return os.stat(fp.name)
608
608
609 # File system features
609 # File system features
610
610
611 def checkcase(path):
611 def checkcase(path):
612 """
612 """
613 Check whether the given path is on a case-sensitive filesystem
613 Check whether the given path is on a case-sensitive filesystem
614
614
615 Requires a path (like /foo/.hg) ending with a foldable final
615 Requires a path (like /foo/.hg) ending with a foldable final
616 directory component.
616 directory component.
617 """
617 """
618 s1 = os.stat(path)
618 s1 = os.stat(path)
619 d, b = os.path.split(path)
619 d, b = os.path.split(path)
620 p2 = os.path.join(d, b.upper())
620 p2 = os.path.join(d, b.upper())
621 if path == p2:
621 if path == p2:
622 p2 = os.path.join(d, b.lower())
622 p2 = os.path.join(d, b.lower())
623 try:
623 try:
624 s2 = os.stat(p2)
624 s2 = os.stat(p2)
625 if s2 == s1:
625 if s2 == s1:
626 return False
626 return False
627 return True
627 return True
628 except:
628 except:
629 return True
629 return True
630
630
631 _fspathcache = {}
631 _fspathcache = {}
632 def fspath(name, root):
632 def fspath(name, root):
633 '''Get name in the case stored in the filesystem
633 '''Get name in the case stored in the filesystem
634
634
635 The name is either relative to root, or it is an absolute path starting
635 The name is either relative to root, or it is an absolute path starting
636 with root. Note that this function is unnecessary, and should not be
636 with root. Note that this function is unnecessary, and should not be
637 called, for case-sensitive filesystems (simply because it's expensive).
637 called, for case-sensitive filesystems (simply because it's expensive).
638 '''
638 '''
639 # If name is absolute, make it relative
639 # If name is absolute, make it relative
640 if name.lower().startswith(root.lower()):
640 if name.lower().startswith(root.lower()):
641 l = len(root)
641 l = len(root)
642 if name[l] == os.sep or name[l] == os.altsep:
642 if name[l] == os.sep or name[l] == os.altsep:
643 l = l + 1
643 l = l + 1
644 name = name[l:]
644 name = name[l:]
645
645
646 if not os.path.lexists(os.path.join(root, name)):
646 if not os.path.lexists(os.path.join(root, name)):
647 return None
647 return None
648
648
649 seps = os.sep
649 seps = os.sep
650 if os.altsep:
650 if os.altsep:
651 seps = seps + os.altsep
651 seps = seps + os.altsep
652 # Protect backslashes. This gets silly very quickly.
652 # Protect backslashes. This gets silly very quickly.
653 seps.replace('\\','\\\\')
653 seps.replace('\\','\\\\')
654 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
654 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
655 dir = os.path.normcase(os.path.normpath(root))
655 dir = os.path.normcase(os.path.normpath(root))
656 result = []
656 result = []
657 for part, sep in pattern.findall(name):
657 for part, sep in pattern.findall(name):
658 if sep:
658 if sep:
659 result.append(sep)
659 result.append(sep)
660 continue
660 continue
661
661
662 if dir not in _fspathcache:
662 if dir not in _fspathcache:
663 _fspathcache[dir] = os.listdir(dir)
663 _fspathcache[dir] = os.listdir(dir)
664 contents = _fspathcache[dir]
664 contents = _fspathcache[dir]
665
665
666 lpart = part.lower()
666 lpart = part.lower()
667 lenp = len(part)
667 lenp = len(part)
668 for n in contents:
668 for n in contents:
669 if lenp == len(n) and n.lower() == lpart:
669 if lenp == len(n) and n.lower() == lpart:
670 result.append(n)
670 result.append(n)
671 break
671 break
672 else:
672 else:
673 # Cannot happen, as the file exists!
673 # Cannot happen, as the file exists!
674 result.append(part)
674 result.append(part)
675 dir = os.path.join(dir, lpart)
675 dir = os.path.join(dir, lpart)
676
676
677 return ''.join(result)
677 return ''.join(result)
678
678
679 def checkexec(path):
679 def checkexec(path):
680 """
680 """
681 Check whether the given path is on a filesystem with UNIX-like exec flags
681 Check whether the given path is on a filesystem with UNIX-like exec flags
682
682
683 Requires a directory (like /foo/.hg)
683 Requires a directory (like /foo/.hg)
684 """
684 """
685
685
686 # VFAT on some Linux versions can flip mode but it doesn't persist
686 # VFAT on some Linux versions can flip mode but it doesn't persist
687 # a FS remount. Frequently we can detect it if files are created
687 # a FS remount. Frequently we can detect it if files are created
688 # with exec bit on.
688 # with exec bit on.
689
689
690 try:
690 try:
691 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
691 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
692 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
692 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
693 try:
693 try:
694 os.close(fh)
694 os.close(fh)
695 m = os.stat(fn).st_mode & 0777
695 m = os.stat(fn).st_mode & 0777
696 new_file_has_exec = m & EXECFLAGS
696 new_file_has_exec = m & EXECFLAGS
697 os.chmod(fn, m ^ EXECFLAGS)
697 os.chmod(fn, m ^ EXECFLAGS)
698 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
698 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
699 finally:
699 finally:
700 os.unlink(fn)
700 os.unlink(fn)
701 except (IOError, OSError):
701 except (IOError, OSError):
702 # we don't care, the user probably won't be able to commit anyway
702 # we don't care, the user probably won't be able to commit anyway
703 return False
703 return False
704 return not (new_file_has_exec or exec_flags_cannot_flip)
704 return not (new_file_has_exec or exec_flags_cannot_flip)
705
705
706 def checklink(path):
706 def checklink(path):
707 """check whether the given path is on a symlink-capable filesystem"""
707 """check whether the given path is on a symlink-capable filesystem"""
708 # mktemp is not racy because symlink creation will fail if the
708 # mktemp is not racy because symlink creation will fail if the
709 # file already exists
709 # file already exists
710 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
710 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
711 try:
711 try:
712 os.symlink(".", name)
712 os.symlink(".", name)
713 os.unlink(name)
713 os.unlink(name)
714 return True
714 return True
715 except (OSError, AttributeError):
715 except (OSError, AttributeError):
716 return False
716 return False
717
717
718 def checknlink(testfile):
718 def checknlink(testfile):
719 '''check whether hardlink count reporting works properly'''
719 '''check whether hardlink count reporting works properly'''
720
720
721 # testfile may be open, so we need a separate file for checking to
721 # testfile may be open, so we need a separate file for checking to
722 # work around issue2543 (or testfile may get lost on Samba shares)
722 # work around issue2543 (or testfile may get lost on Samba shares)
723 f1 = testfile + ".hgtmp1"
723 f1 = testfile + ".hgtmp1"
724 if os.path.lexists(f1):
724 if os.path.lexists(f1):
725 return False
725 return False
726 try:
726 try:
727 posixfile(f1, 'w').close()
727 posixfile(f1, 'w').close()
728 except IOError:
728 except IOError:
729 return False
729 return False
730
730
731 f2 = testfile + ".hgtmp2"
731 f2 = testfile + ".hgtmp2"
732 fd = None
732 fd = None
733 try:
733 try:
734 try:
734 try:
735 os_link(f1, f2)
735 os_link(f1, f2)
736 except OSError:
736 except OSError:
737 return False
737 return False
738
738
739 # nlinks() may behave differently for files on Windows shares if
739 # nlinks() may behave differently for files on Windows shares if
740 # the file is open.
740 # the file is open.
741 fd = open(f2)
741 fd = open(f2)
742 return nlinks(f2) > 1
742 return nlinks(f2) > 1
743 finally:
743 finally:
744 if fd is not None:
744 if fd is not None:
745 fd.close()
745 fd.close()
746 for f in (f1, f2):
746 for f in (f1, f2):
747 try:
747 try:
748 os.unlink(f)
748 os.unlink(f)
749 except OSError:
749 except OSError:
750 pass
750 pass
751
751
752 return False
752 return False
753
753
754 def endswithsep(path):
754 def endswithsep(path):
755 '''Check path ends with os.sep or os.altsep.'''
755 '''Check path ends with os.sep or os.altsep.'''
756 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
756 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
757
757
758 def splitpath(path):
758 def splitpath(path):
759 '''Split path by os.sep.
759 '''Split path by os.sep.
760 Note that this function does not use os.altsep because this is
760 Note that this function does not use os.altsep because this is
761 an alternative of simple "xxx.split(os.sep)".
761 an alternative of simple "xxx.split(os.sep)".
762 It is recommended to use os.path.normpath() before using this
762 It is recommended to use os.path.normpath() before using this
763 function if need.'''
763 function if need.'''
764 return path.split(os.sep)
764 return path.split(os.sep)
765
765
766 def gui():
766 def gui():
767 '''Are we running in a GUI?'''
767 '''Are we running in a GUI?'''
768 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
768 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
769
769
770 def mktempcopy(name, emptyok=False, createmode=None):
770 def mktempcopy(name, emptyok=False, createmode=None):
771 """Create a temporary file with the same contents from name
771 """Create a temporary file with the same contents from name
772
772
773 The permission bits are copied from the original file.
773 The permission bits are copied from the original file.
774
774
775 If the temporary file is going to be truncated immediately, you
775 If the temporary file is going to be truncated immediately, you
776 can use emptyok=True as an optimization.
776 can use emptyok=True as an optimization.
777
777
778 Returns the name of the temporary file.
778 Returns the name of the temporary file.
779 """
779 """
780 d, fn = os.path.split(name)
780 d, fn = os.path.split(name)
781 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
781 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
782 os.close(fd)
782 os.close(fd)
783 # Temporary files are created with mode 0600, which is usually not
783 # Temporary files are created with mode 0600, which is usually not
784 # what we want. If the original file already exists, just copy
784 # what we want. If the original file already exists, just copy
785 # its mode. Otherwise, manually obey umask.
785 # its mode. Otherwise, manually obey umask.
786 try:
786 try:
787 st_mode = os.lstat(name).st_mode & 0777
787 st_mode = os.lstat(name).st_mode & 0777
788 except OSError, inst:
788 except OSError, inst:
789 if inst.errno != errno.ENOENT:
789 if inst.errno != errno.ENOENT:
790 raise
790 raise
791 st_mode = createmode
791 st_mode = createmode
792 if st_mode is None:
792 if st_mode is None:
793 st_mode = ~umask
793 st_mode = ~umask
794 st_mode &= 0666
794 st_mode &= 0666
795 os.chmod(temp, st_mode)
795 os.chmod(temp, st_mode)
796 if emptyok:
796 if emptyok:
797 return temp
797 return temp
798 try:
798 try:
799 try:
799 try:
800 ifp = posixfile(name, "rb")
800 ifp = posixfile(name, "rb")
801 except IOError, inst:
801 except IOError, inst:
802 if inst.errno == errno.ENOENT:
802 if inst.errno == errno.ENOENT:
803 return temp
803 return temp
804 if not getattr(inst, 'filename', None):
804 if not getattr(inst, 'filename', None):
805 inst.filename = name
805 inst.filename = name
806 raise
806 raise
807 ofp = posixfile(temp, "wb")
807 ofp = posixfile(temp, "wb")
808 for chunk in filechunkiter(ifp):
808 for chunk in filechunkiter(ifp):
809 ofp.write(chunk)
809 ofp.write(chunk)
810 ifp.close()
810 ifp.close()
811 ofp.close()
811 ofp.close()
812 except:
812 except:
813 try: os.unlink(temp)
813 try: os.unlink(temp)
814 except: pass
814 except: pass
815 raise
815 raise
816 return temp
816 return temp
817
817
818 class atomictempfile(object):
818 class atomictempfile(object):
819 """file-like object that atomically updates a file
819 """file-like object that atomically updates a file
820
820
821 All writes will be redirected to a temporary copy of the original
821 All writes will be redirected to a temporary copy of the original
822 file. When rename is called, the copy is renamed to the original
822 file. When rename is called, the copy is renamed to the original
823 name, making the changes visible.
823 name, making the changes visible.
824 """
824 """
825 def __init__(self, name, mode='w+b', createmode=None):
825 def __init__(self, name, mode='w+b', createmode=None):
826 self.__name = name
826 self.__name = name
827 self._fp = None
827 self._fp = None
828 self.temp = mktempcopy(name, emptyok=('w' in mode),
828 self.temp = mktempcopy(name, emptyok=('w' in mode),
829 createmode=createmode)
829 createmode=createmode)
830 self._fp = posixfile(self.temp, mode)
830 self._fp = posixfile(self.temp, mode)
831
831
832 def __getattr__(self, name):
832 def __getattr__(self, name):
833 return getattr(self._fp, name)
833 return getattr(self._fp, name)
834
834
835 def rename(self):
835 def rename(self):
836 if not self._fp.closed:
836 if not self._fp.closed:
837 self._fp.close()
837 self._fp.close()
838 rename(self.temp, localpath(self.__name))
838 rename(self.temp, localpath(self.__name))
839
839
840 def __del__(self):
840 def __del__(self):
841 if not self._fp:
841 if not self._fp:
842 return
842 return
843 if not self._fp.closed:
843 if not self._fp.closed:
844 try:
844 try:
845 os.unlink(self.temp)
845 os.unlink(self.temp)
846 except: pass
846 except: pass
847 self._fp.close()
847 self._fp.close()
848
848
849 def makedirs(name, mode=None):
849 def makedirs(name, mode=None):
850 """recursive directory creation with parent mode inheritance"""
850 """recursive directory creation with parent mode inheritance"""
851 parent = os.path.abspath(os.path.dirname(name))
851 parent = os.path.abspath(os.path.dirname(name))
852 try:
852 try:
853 os.mkdir(name)
853 os.mkdir(name)
854 if mode is not None:
854 if mode is not None:
855 os.chmod(name, mode)
855 os.chmod(name, mode)
856 return
856 return
857 except OSError, err:
857 except OSError, err:
858 if err.errno == errno.EEXIST:
858 if err.errno == errno.EEXIST:
859 return
859 return
860 if not name or parent == name or err.errno != errno.ENOENT:
860 if not name or parent == name or err.errno != errno.ENOENT:
861 raise
861 raise
862 makedirs(parent, mode)
862 makedirs(parent, mode)
863 makedirs(name, mode)
863 makedirs(name, mode)
864
864
865 class opener(object):
865 class opener(object):
866 """Open files relative to a base directory
866 """Open files relative to a base directory
867
867
868 This class is used to hide the details of COW semantics and
868 This class is used to hide the details of COW semantics and
869 remote file access from higher level code.
869 remote file access from higher level code.
870 """
870 """
871 def __init__(self, base, audit=True):
871 def __init__(self, base, audit=True):
872 self.base = base
872 self.base = base
873 if audit:
873 if audit:
874 self.auditor = path_auditor(base)
874 self.auditor = path_auditor(base)
875 else:
875 else:
876 self.auditor = always
876 self.auditor = always
877 self.createmode = None
877 self.createmode = None
878 self._trustnlink = None
878 self._trustnlink = None
879
879
880 @propertycache
880 @propertycache
881 def _can_symlink(self):
881 def _can_symlink(self):
882 return checklink(self.base)
882 return checklink(self.base)
883
883
884 def _fixfilemode(self, name):
884 def _fixfilemode(self, name):
885 if self.createmode is None:
885 if self.createmode is None:
886 return
886 return
887 os.chmod(name, self.createmode & 0666)
887 os.chmod(name, self.createmode & 0666)
888
888
889 def __call__(self, path, mode="r", text=False, atomictemp=False):
889 def __call__(self, path, mode="r", text=False, atomictemp=False):
890 self.auditor(path)
890 self.auditor(path)
891 f = os.path.join(self.base, path)
891 f = os.path.join(self.base, path)
892
892
893 if not text and "b" not in mode:
893 if not text and "b" not in mode:
894 mode += "b" # for that other OS
894 mode += "b" # for that other OS
895
895
896 nlink = -1
896 nlink = -1
897 st_mode = None
897 st_mode = None
898 dirname, basename = os.path.split(f)
898 dirname, basename = os.path.split(f)
899 # If basename is empty, then the path is malformed because it points
899 # If basename is empty, then the path is malformed because it points
900 # to a directory. Let the posixfile() call below raise IOError.
900 # to a directory. Let the posixfile() call below raise IOError.
901 if basename and mode not in ('r', 'rb'):
901 if basename and mode not in ('r', 'rb'):
902 if atomictemp:
902 if atomictemp:
903 if not os.path.isdir(dirname):
903 if not os.path.isdir(dirname):
904 makedirs(dirname, self.createmode)
904 makedirs(dirname, self.createmode)
905 return atomictempfile(f, mode, self.createmode)
905 return atomictempfile(f, mode, self.createmode)
906 try:
906 try:
907 if 'w' in mode:
907 if 'w' in mode:
908 st_mode = os.lstat(f).st_mode & 0777
908 st_mode = os.lstat(f).st_mode & 0777
909 os.unlink(f)
909 os.unlink(f)
910 nlink = 0
910 nlink = 0
911 else:
911 else:
912 # nlinks() may behave differently for files on Windows
912 # nlinks() may behave differently for files on Windows
913 # shares if the file is open.
913 # shares if the file is open.
914 fd = open(f)
914 fd = open(f)
915 nlink = nlinks(f)
915 nlink = nlinks(f)
916 fd.close()
916 fd.close()
917 except (OSError, IOError):
917 except (OSError, IOError):
918 nlink = 0
918 nlink = 0
919 if not os.path.isdir(dirname):
919 if not os.path.isdir(dirname):
920 makedirs(dirname, self.createmode)
920 makedirs(dirname, self.createmode)
921 if nlink > 0:
921 if nlink > 0:
922 if self._trustnlink is None:
922 if self._trustnlink is None:
923 self._trustnlink = nlink > 1 or checknlink(f)
923 self._trustnlink = nlink > 1 or checknlink(f)
924 if nlink > 1 or not self._trustnlink:
924 if nlink > 1 or not self._trustnlink:
925 rename(mktempcopy(f), f)
925 rename(mktempcopy(f), f)
926 fp = posixfile(f, mode)
926 fp = posixfile(f, mode)
927 if nlink == 0:
927 if nlink == 0:
928 if st_mode is None:
928 if st_mode is None:
929 self._fixfilemode(f)
929 self._fixfilemode(f)
930 else:
930 else:
931 os.chmod(f, st_mode)
931 os.chmod(f, st_mode)
932 return fp
932 return fp
933
933
934 def symlink(self, src, dst):
934 def symlink(self, src, dst):
935 self.auditor(dst)
935 self.auditor(dst)
936 linkname = os.path.join(self.base, dst)
936 linkname = os.path.join(self.base, dst)
937 try:
937 try:
938 os.unlink(linkname)
938 os.unlink(linkname)
939 except OSError:
939 except OSError:
940 pass
940 pass
941
941
942 dirname = os.path.dirname(linkname)
942 dirname = os.path.dirname(linkname)
943 if not os.path.exists(dirname):
943 if not os.path.exists(dirname):
944 makedirs(dirname, self.createmode)
944 makedirs(dirname, self.createmode)
945
945
946 if self._can_symlink:
946 if self._can_symlink:
947 try:
947 try:
948 os.symlink(src, linkname)
948 os.symlink(src, linkname)
949 except OSError, err:
949 except OSError, err:
950 raise OSError(err.errno, _('could not symlink to %r: %s') %
950 raise OSError(err.errno, _('could not symlink to %r: %s') %
951 (src, err.strerror), linkname)
951 (src, err.strerror), linkname)
952 else:
952 else:
953 f = self(dst, "w")
953 f = self(dst, "w")
954 f.write(src)
954 f.write(src)
955 f.close()
955 f.close()
956 self._fixfilemode(dst)
956 self._fixfilemode(dst)
957
957
958 class chunkbuffer(object):
958 class chunkbuffer(object):
959 """Allow arbitrary sized chunks of data to be efficiently read from an
959 """Allow arbitrary sized chunks of data to be efficiently read from an
960 iterator over chunks of arbitrary size."""
960 iterator over chunks of arbitrary size."""
961
961
962 def __init__(self, in_iter):
962 def __init__(self, in_iter):
963 """in_iter is the iterator that's iterating over the input chunks.
963 """in_iter is the iterator that's iterating over the input chunks.
964 targetsize is how big a buffer to try to maintain."""
964 targetsize is how big a buffer to try to maintain."""
965 def splitbig(chunks):
965 def splitbig(chunks):
966 for chunk in chunks:
966 for chunk in chunks:
967 if len(chunk) > 2**20:
967 if len(chunk) > 2**20:
968 pos = 0
968 pos = 0
969 while pos < len(chunk):
969 while pos < len(chunk):
970 end = pos + 2 ** 18
970 end = pos + 2 ** 18
971 yield chunk[pos:end]
971 yield chunk[pos:end]
972 pos = end
972 pos = end
973 else:
973 else:
974 yield chunk
974 yield chunk
975 self.iter = splitbig(in_iter)
975 self.iter = splitbig(in_iter)
976 self._queue = []
976 self._queue = []
977
977
978 def read(self, l):
978 def read(self, l):
979 """Read L bytes of data from the iterator of chunks of data.
979 """Read L bytes of data from the iterator of chunks of data.
980 Returns less than L bytes if the iterator runs dry."""
980 Returns less than L bytes if the iterator runs dry."""
981 left = l
981 left = l
982 buf = ''
982 buf = ''
983 queue = self._queue
983 queue = self._queue
984 while left > 0:
984 while left > 0:
985 # refill the queue
985 # refill the queue
986 if not queue:
986 if not queue:
987 target = 2**18
987 target = 2**18
988 for chunk in self.iter:
988 for chunk in self.iter:
989 queue.append(chunk)
989 queue.append(chunk)
990 target -= len(chunk)
990 target -= len(chunk)
991 if target <= 0:
991 if target <= 0:
992 break
992 break
993 if not queue:
993 if not queue:
994 break
994 break
995
995
996 chunk = queue.pop(0)
996 chunk = queue.pop(0)
997 left -= len(chunk)
997 left -= len(chunk)
998 if left < 0:
998 if left < 0:
999 queue.insert(0, chunk[left:])
999 queue.insert(0, chunk[left:])
1000 buf += chunk[:left]
1000 buf += chunk[:left]
1001 else:
1001 else:
1002 buf += chunk
1002 buf += chunk
1003
1003
1004 return buf
1004 return buf
1005
1005
1006 def filechunkiter(f, size=65536, limit=None):
1006 def filechunkiter(f, size=65536, limit=None):
1007 """Create a generator that produces the data in the file size
1007 """Create a generator that produces the data in the file size
1008 (default 65536) bytes at a time, up to optional limit (default is
1008 (default 65536) bytes at a time, up to optional limit (default is
1009 to read all data). Chunks may be less than size bytes if the
1009 to read all data). Chunks may be less than size bytes if the
1010 chunk is the last chunk in the file, or the file is a socket or
1010 chunk is the last chunk in the file, or the file is a socket or
1011 some other type of file that sometimes reads less data than is
1011 some other type of file that sometimes reads less data than is
1012 requested."""
1012 requested."""
1013 assert size >= 0
1013 assert size >= 0
1014 assert limit is None or limit >= 0
1014 assert limit is None or limit >= 0
1015 while True:
1015 while True:
1016 if limit is None:
1016 if limit is None:
1017 nbytes = size
1017 nbytes = size
1018 else:
1018 else:
1019 nbytes = min(limit, size)
1019 nbytes = min(limit, size)
1020 s = nbytes and f.read(nbytes)
1020 s = nbytes and f.read(nbytes)
1021 if not s:
1021 if not s:
1022 break
1022 break
1023 if limit:
1023 if limit:
1024 limit -= len(s)
1024 limit -= len(s)
1025 yield s
1025 yield s
1026
1026
1027 def makedate():
1027 def makedate():
1028 lt = time.localtime()
1028 lt = time.localtime()
1029 if lt[8] == 1 and time.daylight:
1029 if lt[8] == 1 and time.daylight:
1030 tz = time.altzone
1030 tz = time.altzone
1031 else:
1031 else:
1032 tz = time.timezone
1032 tz = time.timezone
1033 t = time.mktime(lt)
1033 t = time.mktime(lt)
1034 if t < 0:
1034 if t < 0:
1035 hint = _("check your clock")
1035 hint = _("check your clock")
1036 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1036 raise Abort(_("negative timestamp: %d") % t, hint=hint)
1037 return t, tz
1037 return t, tz
1038
1038
1039 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1039 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1040 """represent a (unixtime, offset) tuple as a localized time.
1040 """represent a (unixtime, offset) tuple as a localized time.
1041 unixtime is seconds since the epoch, and offset is the time zone's
1041 unixtime is seconds since the epoch, and offset is the time zone's
1042 number of seconds away from UTC. if timezone is false, do not
1042 number of seconds away from UTC. if timezone is false, do not
1043 append time zone to string."""
1043 append time zone to string."""
1044 t, tz = date or makedate()
1044 t, tz = date or makedate()
1045 if t < 0:
1045 if t < 0:
1046 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1046 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1047 tz = 0
1047 tz = 0
1048 if "%1" in format or "%2" in format:
1048 if "%1" in format or "%2" in format:
1049 sign = (tz > 0) and "-" or "+"
1049 sign = (tz > 0) and "-" or "+"
1050 minutes = abs(tz) // 60
1050 minutes = abs(tz) // 60
1051 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1051 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1052 format = format.replace("%2", "%02d" % (minutes % 60))
1052 format = format.replace("%2", "%02d" % (minutes % 60))
1053 s = time.strftime(format, time.gmtime(float(t) - tz))
1053 s = time.strftime(format, time.gmtime(float(t) - tz))
1054 return s
1054 return s
1055
1055
1056 def shortdate(date=None):
1056 def shortdate(date=None):
1057 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1057 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1058 return datestr(date, format='%Y-%m-%d')
1058 return datestr(date, format='%Y-%m-%d')
1059
1059
1060 def strdate(string, format, defaults=[]):
1060 def strdate(string, format, defaults=[]):
1061 """parse a localized time string and return a (unixtime, offset) tuple.
1061 """parse a localized time string and return a (unixtime, offset) tuple.
1062 if the string cannot be parsed, ValueError is raised."""
1062 if the string cannot be parsed, ValueError is raised."""
1063 def timezone(string):
1063 def timezone(string):
1064 tz = string.split()[-1]
1064 tz = string.split()[-1]
1065 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1065 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1066 sign = (tz[0] == "+") and 1 or -1
1066 sign = (tz[0] == "+") and 1 or -1
1067 hours = int(tz[1:3])
1067 hours = int(tz[1:3])
1068 minutes = int(tz[3:5])
1068 minutes = int(tz[3:5])
1069 return -sign * (hours * 60 + minutes) * 60
1069 return -sign * (hours * 60 + minutes) * 60
1070 if tz == "GMT" or tz == "UTC":
1070 if tz == "GMT" or tz == "UTC":
1071 return 0
1071 return 0
1072 return None
1072 return None
1073
1073
1074 # NOTE: unixtime = localunixtime + offset
1074 # NOTE: unixtime = localunixtime + offset
1075 offset, date = timezone(string), string
1075 offset, date = timezone(string), string
1076 if offset != None:
1076 if offset != None:
1077 date = " ".join(string.split()[:-1])
1077 date = " ".join(string.split()[:-1])
1078
1078
1079 # add missing elements from defaults
1079 # add missing elements from defaults
1080 for part in defaults:
1080 usenow = False # default to using biased defaults
1081 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1081 found = [True for p in part if ("%"+p) in format]
1082 found = [True for p in part if ("%"+p) in format]
1082 if not found:
1083 if not found:
1083 date += "@" + defaults[part]
1084 date += "@" + defaults[part][usenow]
1084 format += "@%" + part[0]
1085 format += "@%" + part[0]
1086 else:
1087 # We've found a specific time element, less specific time
1088 # elements are relative to today
1089 usenow = True
1085
1090
1086 timetuple = time.strptime(date, format)
1091 timetuple = time.strptime(date, format)
1087 localunixtime = int(calendar.timegm(timetuple))
1092 localunixtime = int(calendar.timegm(timetuple))
1088 if offset is None:
1093 if offset is None:
1089 # local timezone
1094 # local timezone
1090 unixtime = int(time.mktime(timetuple))
1095 unixtime = int(time.mktime(timetuple))
1091 offset = unixtime - localunixtime
1096 offset = unixtime - localunixtime
1092 else:
1097 else:
1093 unixtime = localunixtime + offset
1098 unixtime = localunixtime + offset
1094 return unixtime, offset
1099 return unixtime, offset
1095
1100
1096 def parsedate(date, formats=None, defaults=None):
1101 def parsedate(date, formats=None, bias={}):
1097 """parse a localized date/time string and return a (unixtime, offset) tuple.
1102 """parse a localized date/time and return a (unixtime, offset) tuple.
1098
1103
1099 The date may be a "unixtime offset" string or in one of the specified
1104 The date may be a "unixtime offset" string or in one of the specified
1100 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1105 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1101 """
1106 """
1102 if not date:
1107 if not date:
1103 return 0, 0
1108 return 0, 0
1104 if isinstance(date, tuple) and len(date) == 2:
1109 if isinstance(date, tuple) and len(date) == 2:
1105 return date
1110 return date
1106 if not formats:
1111 if not formats:
1107 formats = defaultdateformats
1112 formats = defaultdateformats
1108 date = date.strip()
1113 date = date.strip()
1109 try:
1114 try:
1110 when, offset = map(int, date.split(' '))
1115 when, offset = map(int, date.split(' '))
1111 except ValueError:
1116 except ValueError:
1112 # fill out defaults
1117 # fill out defaults
1113 if not defaults:
1114 defaults = {}
1115 now = makedate()
1118 now = makedate()
1119 defaults = {}
1120 nowmap = {}
1116 for part in "d mb yY HI M S".split():
1121 for part in "d mb yY HI M S".split():
1117 if part not in defaults:
1122 # this piece is for rounding the specific end of unknowns
1123 b = bias.get(part)
1124 if b is None:
1118 if part[0] in "HMS":
1125 if part[0] in "HMS":
1119 defaults[part] = "00"
1126 b = "00"
1120 else:
1127 else:
1121 defaults[part] = datestr(now, "%" + part[0])
1128 b = "0"
1129
1130 # this piece is for matching the generic end to today's date
1131 n = datestr(now, "%" + part[0])
1132
1133 defaults[part] = (b, n)
1122
1134
1123 for format in formats:
1135 for format in formats:
1124 try:
1136 try:
1125 when, offset = strdate(date, format, defaults)
1137 when, offset = strdate(date, format, defaults)
1126 except (ValueError, OverflowError):
1138 except (ValueError, OverflowError):
1127 pass
1139 pass
1128 else:
1140 else:
1129 break
1141 break
1130 else:
1142 else:
1131 raise Abort(_('invalid date: %r') % date)
1143 raise Abort(_('invalid date: %r') % date)
1132 # validate explicit (probably user-specified) date and
1144 # validate explicit (probably user-specified) date and
1133 # time zone offset. values must fit in signed 32 bits for
1145 # time zone offset. values must fit in signed 32 bits for
1134 # current 32-bit linux runtimes. timezones go from UTC-12
1146 # current 32-bit linux runtimes. timezones go from UTC-12
1135 # to UTC+14
1147 # to UTC+14
1136 if abs(when) > 0x7fffffff:
1148 if abs(when) > 0x7fffffff:
1137 raise Abort(_('date exceeds 32 bits: %d') % when)
1149 raise Abort(_('date exceeds 32 bits: %d') % when)
1138 if when < 0:
1150 if when < 0:
1139 raise Abort(_('negative date value: %d') % when)
1151 raise Abort(_('negative date value: %d') % when)
1140 if offset < -50400 or offset > 43200:
1152 if offset < -50400 or offset > 43200:
1141 raise Abort(_('impossible time zone offset: %d') % offset)
1153 raise Abort(_('impossible time zone offset: %d') % offset)
1142 return when, offset
1154 return when, offset
1143
1155
1144 def matchdate(date):
1156 def matchdate(date):
1145 """Return a function that matches a given date match specifier
1157 """Return a function that matches a given date match specifier
1146
1158
1147 Formats include:
1159 Formats include:
1148
1160
1149 '{date}' match a given date to the accuracy provided
1161 '{date}' match a given date to the accuracy provided
1150
1162
1151 '<{date}' on or before a given date
1163 '<{date}' on or before a given date
1152
1164
1153 '>{date}' on or after a given date
1165 '>{date}' on or after a given date
1154
1166
1167 >>> p1 = parsedate("10:29:59")
1168 >>> p2 = parsedate("10:30:00")
1169 >>> p3 = parsedate("10:30:59")
1170 >>> p4 = parsedate("10:31:00")
1171 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1172 >>> f = matchdate("10:30")
1173 >>> f(p1[0])
1174 False
1175 >>> f(p2[0])
1176 True
1177 >>> f(p3[0])
1178 True
1179 >>> f(p4[0])
1180 False
1181 >>> f(p5[0])
1182 False
1155 """
1183 """
1156
1184
1157 def lower(date):
1185 def lower(date):
1158 d = dict(mb="1", d="1")
1186 d = dict(mb="1", d="1")
1159 return parsedate(date, extendeddateformats, d)[0]
1187 return parsedate(date, extendeddateformats, d)[0]
1160
1188
1161 def upper(date):
1189 def upper(date):
1162 d = dict(mb="12", HI="23", M="59", S="59")
1190 d = dict(mb="12", HI="23", M="59", S="59")
1163 for days in "31 30 29".split():
1191 for days in "31 30 29".split():
1164 try:
1192 try:
1165 d["d"] = days
1193 d["d"] = days
1166 return parsedate(date, extendeddateformats, d)[0]
1194 return parsedate(date, extendeddateformats, d)[0]
1167 except:
1195 except:
1168 pass
1196 pass
1169 d["d"] = "28"
1197 d["d"] = "28"
1170 return parsedate(date, extendeddateformats, d)[0]
1198 return parsedate(date, extendeddateformats, d)[0]
1171
1199
1172 date = date.strip()
1200 date = date.strip()
1173 if date[0] == "<":
1201 if date[0] == "<":
1174 when = upper(date[1:])
1202 when = upper(date[1:])
1175 return lambda x: x <= when
1203 return lambda x: x <= when
1176 elif date[0] == ">":
1204 elif date[0] == ">":
1177 when = lower(date[1:])
1205 when = lower(date[1:])
1178 return lambda x: x >= when
1206 return lambda x: x >= when
1179 elif date[0] == "-":
1207 elif date[0] == "-":
1180 try:
1208 try:
1181 days = int(date[1:])
1209 days = int(date[1:])
1182 except ValueError:
1210 except ValueError:
1183 raise Abort(_("invalid day spec: %s") % date[1:])
1211 raise Abort(_("invalid day spec: %s") % date[1:])
1184 when = makedate()[0] - days * 3600 * 24
1212 when = makedate()[0] - days * 3600 * 24
1185 return lambda x: x >= when
1213 return lambda x: x >= when
1186 elif " to " in date:
1214 elif " to " in date:
1187 a, b = date.split(" to ")
1215 a, b = date.split(" to ")
1188 start, stop = lower(a), upper(b)
1216 start, stop = lower(a), upper(b)
1189 return lambda x: x >= start and x <= stop
1217 return lambda x: x >= start and x <= stop
1190 else:
1218 else:
1191 start, stop = lower(date), upper(date)
1219 start, stop = lower(date), upper(date)
1192 return lambda x: x >= start and x <= stop
1220 return lambda x: x >= start and x <= stop
1193
1221
1194 def shortuser(user):
1222 def shortuser(user):
1195 """Return a short representation of a user name or email address."""
1223 """Return a short representation of a user name or email address."""
1196 f = user.find('@')
1224 f = user.find('@')
1197 if f >= 0:
1225 if f >= 0:
1198 user = user[:f]
1226 user = user[:f]
1199 f = user.find('<')
1227 f = user.find('<')
1200 if f >= 0:
1228 if f >= 0:
1201 user = user[f + 1:]
1229 user = user[f + 1:]
1202 f = user.find(' ')
1230 f = user.find(' ')
1203 if f >= 0:
1231 if f >= 0:
1204 user = user[:f]
1232 user = user[:f]
1205 f = user.find('.')
1233 f = user.find('.')
1206 if f >= 0:
1234 if f >= 0:
1207 user = user[:f]
1235 user = user[:f]
1208 return user
1236 return user
1209
1237
1210 def email(author):
1238 def email(author):
1211 '''get email of author.'''
1239 '''get email of author.'''
1212 r = author.find('>')
1240 r = author.find('>')
1213 if r == -1:
1241 if r == -1:
1214 r = None
1242 r = None
1215 return author[author.find('<') + 1:r]
1243 return author[author.find('<') + 1:r]
1216
1244
1217 def ellipsis(text, maxlength=400):
1245 def ellipsis(text, maxlength=400):
1218 """Trim string to at most maxlength (default: 400) characters."""
1246 """Trim string to at most maxlength (default: 400) characters."""
1219 if len(text) <= maxlength:
1247 if len(text) <= maxlength:
1220 return text
1248 return text
1221 else:
1249 else:
1222 return "%s..." % (text[:maxlength - 3])
1250 return "%s..." % (text[:maxlength - 3])
1223
1251
1224 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1252 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1225 '''yield every hg repository under path, recursively.'''
1253 '''yield every hg repository under path, recursively.'''
1226 def errhandler(err):
1254 def errhandler(err):
1227 if err.filename == path:
1255 if err.filename == path:
1228 raise err
1256 raise err
1229 if followsym and hasattr(os.path, 'samestat'):
1257 if followsym and hasattr(os.path, 'samestat'):
1230 def _add_dir_if_not_there(dirlst, dirname):
1258 def _add_dir_if_not_there(dirlst, dirname):
1231 match = False
1259 match = False
1232 samestat = os.path.samestat
1260 samestat = os.path.samestat
1233 dirstat = os.stat(dirname)
1261 dirstat = os.stat(dirname)
1234 for lstdirstat in dirlst:
1262 for lstdirstat in dirlst:
1235 if samestat(dirstat, lstdirstat):
1263 if samestat(dirstat, lstdirstat):
1236 match = True
1264 match = True
1237 break
1265 break
1238 if not match:
1266 if not match:
1239 dirlst.append(dirstat)
1267 dirlst.append(dirstat)
1240 return not match
1268 return not match
1241 else:
1269 else:
1242 followsym = False
1270 followsym = False
1243
1271
1244 if (seen_dirs is None) and followsym:
1272 if (seen_dirs is None) and followsym:
1245 seen_dirs = []
1273 seen_dirs = []
1246 _add_dir_if_not_there(seen_dirs, path)
1274 _add_dir_if_not_there(seen_dirs, path)
1247 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1275 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1248 dirs.sort()
1276 dirs.sort()
1249 if '.hg' in dirs:
1277 if '.hg' in dirs:
1250 yield root # found a repository
1278 yield root # found a repository
1251 qroot = os.path.join(root, '.hg', 'patches')
1279 qroot = os.path.join(root, '.hg', 'patches')
1252 if os.path.isdir(os.path.join(qroot, '.hg')):
1280 if os.path.isdir(os.path.join(qroot, '.hg')):
1253 yield qroot # we have a patch queue repo here
1281 yield qroot # we have a patch queue repo here
1254 if recurse:
1282 if recurse:
1255 # avoid recursing inside the .hg directory
1283 # avoid recursing inside the .hg directory
1256 dirs.remove('.hg')
1284 dirs.remove('.hg')
1257 else:
1285 else:
1258 dirs[:] = [] # don't descend further
1286 dirs[:] = [] # don't descend further
1259 elif followsym:
1287 elif followsym:
1260 newdirs = []
1288 newdirs = []
1261 for d in dirs:
1289 for d in dirs:
1262 fname = os.path.join(root, d)
1290 fname = os.path.join(root, d)
1263 if _add_dir_if_not_there(seen_dirs, fname):
1291 if _add_dir_if_not_there(seen_dirs, fname):
1264 if os.path.islink(fname):
1292 if os.path.islink(fname):
1265 for hgname in walkrepos(fname, True, seen_dirs):
1293 for hgname in walkrepos(fname, True, seen_dirs):
1266 yield hgname
1294 yield hgname
1267 else:
1295 else:
1268 newdirs.append(d)
1296 newdirs.append(d)
1269 dirs[:] = newdirs
1297 dirs[:] = newdirs
1270
1298
1271 _rcpath = None
1299 _rcpath = None
1272
1300
1273 def os_rcpath():
1301 def os_rcpath():
1274 '''return default os-specific hgrc search path'''
1302 '''return default os-specific hgrc search path'''
1275 path = system_rcpath()
1303 path = system_rcpath()
1276 path.extend(user_rcpath())
1304 path.extend(user_rcpath())
1277 path = [os.path.normpath(f) for f in path]
1305 path = [os.path.normpath(f) for f in path]
1278 return path
1306 return path
1279
1307
1280 def rcpath():
1308 def rcpath():
1281 '''return hgrc search path. if env var HGRCPATH is set, use it.
1309 '''return hgrc search path. if env var HGRCPATH is set, use it.
1282 for each item in path, if directory, use files ending in .rc,
1310 for each item in path, if directory, use files ending in .rc,
1283 else use item.
1311 else use item.
1284 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1312 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1285 if no HGRCPATH, use default os-specific path.'''
1313 if no HGRCPATH, use default os-specific path.'''
1286 global _rcpath
1314 global _rcpath
1287 if _rcpath is None:
1315 if _rcpath is None:
1288 if 'HGRCPATH' in os.environ:
1316 if 'HGRCPATH' in os.environ:
1289 _rcpath = []
1317 _rcpath = []
1290 for p in os.environ['HGRCPATH'].split(os.pathsep):
1318 for p in os.environ['HGRCPATH'].split(os.pathsep):
1291 if not p:
1319 if not p:
1292 continue
1320 continue
1293 p = expandpath(p)
1321 p = expandpath(p)
1294 if os.path.isdir(p):
1322 if os.path.isdir(p):
1295 for f, kind in osutil.listdir(p):
1323 for f, kind in osutil.listdir(p):
1296 if f.endswith('.rc'):
1324 if f.endswith('.rc'):
1297 _rcpath.append(os.path.join(p, f))
1325 _rcpath.append(os.path.join(p, f))
1298 else:
1326 else:
1299 _rcpath.append(p)
1327 _rcpath.append(p)
1300 else:
1328 else:
1301 _rcpath = os_rcpath()
1329 _rcpath = os_rcpath()
1302 return _rcpath
1330 return _rcpath
1303
1331
1304 def bytecount(nbytes):
1332 def bytecount(nbytes):
1305 '''return byte count formatted as readable string, with units'''
1333 '''return byte count formatted as readable string, with units'''
1306
1334
1307 units = (
1335 units = (
1308 (100, 1 << 30, _('%.0f GB')),
1336 (100, 1 << 30, _('%.0f GB')),
1309 (10, 1 << 30, _('%.1f GB')),
1337 (10, 1 << 30, _('%.1f GB')),
1310 (1, 1 << 30, _('%.2f GB')),
1338 (1, 1 << 30, _('%.2f GB')),
1311 (100, 1 << 20, _('%.0f MB')),
1339 (100, 1 << 20, _('%.0f MB')),
1312 (10, 1 << 20, _('%.1f MB')),
1340 (10, 1 << 20, _('%.1f MB')),
1313 (1, 1 << 20, _('%.2f MB')),
1341 (1, 1 << 20, _('%.2f MB')),
1314 (100, 1 << 10, _('%.0f KB')),
1342 (100, 1 << 10, _('%.0f KB')),
1315 (10, 1 << 10, _('%.1f KB')),
1343 (10, 1 << 10, _('%.1f KB')),
1316 (1, 1 << 10, _('%.2f KB')),
1344 (1, 1 << 10, _('%.2f KB')),
1317 (1, 1, _('%.0f bytes')),
1345 (1, 1, _('%.0f bytes')),
1318 )
1346 )
1319
1347
1320 for multiplier, divisor, format in units:
1348 for multiplier, divisor, format in units:
1321 if nbytes >= divisor * multiplier:
1349 if nbytes >= divisor * multiplier:
1322 return format % (nbytes / float(divisor))
1350 return format % (nbytes / float(divisor))
1323 return units[-1][2] % nbytes
1351 return units[-1][2] % nbytes
1324
1352
1325 def drop_scheme(scheme, path):
1353 def drop_scheme(scheme, path):
1326 sc = scheme + ':'
1354 sc = scheme + ':'
1327 if path.startswith(sc):
1355 if path.startswith(sc):
1328 path = path[len(sc):]
1356 path = path[len(sc):]
1329 if path.startswith('//'):
1357 if path.startswith('//'):
1330 if scheme == 'file':
1358 if scheme == 'file':
1331 i = path.find('/', 2)
1359 i = path.find('/', 2)
1332 if i == -1:
1360 if i == -1:
1333 return ''
1361 return ''
1334 # On Windows, absolute paths are rooted at the current drive
1362 # On Windows, absolute paths are rooted at the current drive
1335 # root. On POSIX they are rooted at the file system root.
1363 # root. On POSIX they are rooted at the file system root.
1336 if os.name == 'nt':
1364 if os.name == 'nt':
1337 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1365 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1338 path = os.path.join(droot, path[i + 1:])
1366 path = os.path.join(droot, path[i + 1:])
1339 else:
1367 else:
1340 path = path[i:]
1368 path = path[i:]
1341 else:
1369 else:
1342 path = path[2:]
1370 path = path[2:]
1343 return path
1371 return path
1344
1372
1345 def uirepr(s):
1373 def uirepr(s):
1346 # Avoid double backslash in Windows path repr()
1374 # Avoid double backslash in Windows path repr()
1347 return repr(s).replace('\\\\', '\\')
1375 return repr(s).replace('\\\\', '\\')
1348
1376
1349 #### naming convention of below implementation follows 'textwrap' module
1377 #### naming convention of below implementation follows 'textwrap' module
1350
1378
1351 class MBTextWrapper(textwrap.TextWrapper):
1379 class MBTextWrapper(textwrap.TextWrapper):
1352 def __init__(self, **kwargs):
1380 def __init__(self, **kwargs):
1353 textwrap.TextWrapper.__init__(self, **kwargs)
1381 textwrap.TextWrapper.__init__(self, **kwargs)
1354
1382
1355 def _cutdown(self, str, space_left):
1383 def _cutdown(self, str, space_left):
1356 l = 0
1384 l = 0
1357 ucstr = unicode(str, encoding.encoding)
1385 ucstr = unicode(str, encoding.encoding)
1358 w = unicodedata.east_asian_width
1386 w = unicodedata.east_asian_width
1359 for i in xrange(len(ucstr)):
1387 for i in xrange(len(ucstr)):
1360 l += w(ucstr[i]) in 'WFA' and 2 or 1
1388 l += w(ucstr[i]) in 'WFA' and 2 or 1
1361 if space_left < l:
1389 if space_left < l:
1362 return (ucstr[:i].encode(encoding.encoding),
1390 return (ucstr[:i].encode(encoding.encoding),
1363 ucstr[i:].encode(encoding.encoding))
1391 ucstr[i:].encode(encoding.encoding))
1364 return str, ''
1392 return str, ''
1365
1393
1366 # ----------------------------------------
1394 # ----------------------------------------
1367 # overriding of base class
1395 # overriding of base class
1368
1396
1369 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1397 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1370 space_left = max(width - cur_len, 1)
1398 space_left = max(width - cur_len, 1)
1371
1399
1372 if self.break_long_words:
1400 if self.break_long_words:
1373 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1401 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1374 cur_line.append(cut)
1402 cur_line.append(cut)
1375 reversed_chunks[-1] = res
1403 reversed_chunks[-1] = res
1376 elif not cur_line:
1404 elif not cur_line:
1377 cur_line.append(reversed_chunks.pop())
1405 cur_line.append(reversed_chunks.pop())
1378
1406
1379 #### naming convention of above implementation follows 'textwrap' module
1407 #### naming convention of above implementation follows 'textwrap' module
1380
1408
1381 def wrap(line, width, initindent='', hangindent=''):
1409 def wrap(line, width, initindent='', hangindent=''):
1382 maxindent = max(len(hangindent), len(initindent))
1410 maxindent = max(len(hangindent), len(initindent))
1383 if width <= maxindent:
1411 if width <= maxindent:
1384 # adjust for weird terminal size
1412 # adjust for weird terminal size
1385 width = max(78, maxindent + 1)
1413 width = max(78, maxindent + 1)
1386 wrapper = MBTextWrapper(width=width,
1414 wrapper = MBTextWrapper(width=width,
1387 initial_indent=initindent,
1415 initial_indent=initindent,
1388 subsequent_indent=hangindent)
1416 subsequent_indent=hangindent)
1389 return wrapper.fill(line)
1417 return wrapper.fill(line)
1390
1418
1391 def iterlines(iterator):
1419 def iterlines(iterator):
1392 for chunk in iterator:
1420 for chunk in iterator:
1393 for line in chunk.splitlines():
1421 for line in chunk.splitlines():
1394 yield line
1422 yield line
1395
1423
1396 def expandpath(path):
1424 def expandpath(path):
1397 return os.path.expanduser(os.path.expandvars(path))
1425 return os.path.expanduser(os.path.expandvars(path))
1398
1426
1399 def hgcmd():
1427 def hgcmd():
1400 """Return the command used to execute current hg
1428 """Return the command used to execute current hg
1401
1429
1402 This is different from hgexecutable() because on Windows we want
1430 This is different from hgexecutable() because on Windows we want
1403 to avoid things opening new shell windows like batch files, so we
1431 to avoid things opening new shell windows like batch files, so we
1404 get either the python call or current executable.
1432 get either the python call or current executable.
1405 """
1433 """
1406 if main_is_frozen():
1434 if main_is_frozen():
1407 return [sys.executable]
1435 return [sys.executable]
1408 return gethgcmd()
1436 return gethgcmd()
1409
1437
1410 def rundetached(args, condfn):
1438 def rundetached(args, condfn):
1411 """Execute the argument list in a detached process.
1439 """Execute the argument list in a detached process.
1412
1440
1413 condfn is a callable which is called repeatedly and should return
1441 condfn is a callable which is called repeatedly and should return
1414 True once the child process is known to have started successfully.
1442 True once the child process is known to have started successfully.
1415 At this point, the child process PID is returned. If the child
1443 At this point, the child process PID is returned. If the child
1416 process fails to start or finishes before condfn() evaluates to
1444 process fails to start or finishes before condfn() evaluates to
1417 True, return -1.
1445 True, return -1.
1418 """
1446 """
1419 # Windows case is easier because the child process is either
1447 # Windows case is easier because the child process is either
1420 # successfully starting and validating the condition or exiting
1448 # successfully starting and validating the condition or exiting
1421 # on failure. We just poll on its PID. On Unix, if the child
1449 # on failure. We just poll on its PID. On Unix, if the child
1422 # process fails to start, it will be left in a zombie state until
1450 # process fails to start, it will be left in a zombie state until
1423 # the parent wait on it, which we cannot do since we expect a long
1451 # the parent wait on it, which we cannot do since we expect a long
1424 # running process on success. Instead we listen for SIGCHLD telling
1452 # running process on success. Instead we listen for SIGCHLD telling
1425 # us our child process terminated.
1453 # us our child process terminated.
1426 terminated = set()
1454 terminated = set()
1427 def handler(signum, frame):
1455 def handler(signum, frame):
1428 terminated.add(os.wait())
1456 terminated.add(os.wait())
1429 prevhandler = None
1457 prevhandler = None
1430 if hasattr(signal, 'SIGCHLD'):
1458 if hasattr(signal, 'SIGCHLD'):
1431 prevhandler = signal.signal(signal.SIGCHLD, handler)
1459 prevhandler = signal.signal(signal.SIGCHLD, handler)
1432 try:
1460 try:
1433 pid = spawndetached(args)
1461 pid = spawndetached(args)
1434 while not condfn():
1462 while not condfn():
1435 if ((pid in terminated or not testpid(pid))
1463 if ((pid in terminated or not testpid(pid))
1436 and not condfn()):
1464 and not condfn()):
1437 return -1
1465 return -1
1438 time.sleep(0.1)
1466 time.sleep(0.1)
1439 return pid
1467 return pid
1440 finally:
1468 finally:
1441 if prevhandler is not None:
1469 if prevhandler is not None:
1442 signal.signal(signal.SIGCHLD, prevhandler)
1470 signal.signal(signal.SIGCHLD, prevhandler)
1443
1471
1444 try:
1472 try:
1445 any, all = any, all
1473 any, all = any, all
1446 except NameError:
1474 except NameError:
1447 def any(iterable):
1475 def any(iterable):
1448 for i in iterable:
1476 for i in iterable:
1449 if i:
1477 if i:
1450 return True
1478 return True
1451 return False
1479 return False
1452
1480
1453 def all(iterable):
1481 def all(iterable):
1454 for i in iterable:
1482 for i in iterable:
1455 if not i:
1483 if not i:
1456 return False
1484 return False
1457 return True
1485 return True
1458
1486
1459 def interpolate(prefix, mapping, s, fn=None):
1487 def interpolate(prefix, mapping, s, fn=None):
1460 """Return the result of interpolating items in the mapping into string s.
1488 """Return the result of interpolating items in the mapping into string s.
1461
1489
1462 prefix is a single character string, or a two character string with
1490 prefix is a single character string, or a two character string with
1463 a backslash as the first character if the prefix needs to be escaped in
1491 a backslash as the first character if the prefix needs to be escaped in
1464 a regular expression.
1492 a regular expression.
1465
1493
1466 fn is an optional function that will be applied to the replacement text
1494 fn is an optional function that will be applied to the replacement text
1467 just before replacement.
1495 just before replacement.
1468 """
1496 """
1469 fn = fn or (lambda s: s)
1497 fn = fn or (lambda s: s)
1470 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1498 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1471 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1499 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1472
1500
1473 def getport(port):
1501 def getport(port):
1474 """Return the port for a given network service.
1502 """Return the port for a given network service.
1475
1503
1476 If port is an integer, it's returned as is. If it's a string, it's
1504 If port is an integer, it's returned as is. If it's a string, it's
1477 looked up using socket.getservbyname(). If there's no matching
1505 looked up using socket.getservbyname(). If there's no matching
1478 service, util.Abort is raised.
1506 service, util.Abort is raised.
1479 """
1507 """
1480 try:
1508 try:
1481 return int(port)
1509 return int(port)
1482 except ValueError:
1510 except ValueError:
1483 pass
1511 pass
1484
1512
1485 try:
1513 try:
1486 return socket.getservbyname(port)
1514 return socket.getservbyname(port)
1487 except socket.error:
1515 except socket.error:
1488 raise Abort(_("no port number associated with service '%s'") % port)
1516 raise Abort(_("no port number associated with service '%s'") % port)
1489
1517
1490 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1518 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1491 '0': False, 'no': False, 'false': False, 'off': False,
1519 '0': False, 'no': False, 'false': False, 'off': False,
1492 'never': False}
1520 'never': False}
1493
1521
1494 def parsebool(s):
1522 def parsebool(s):
1495 """Parse s into a boolean.
1523 """Parse s into a boolean.
1496
1524
1497 If s is not a valid boolean, returns None.
1525 If s is not a valid boolean, returns None.
1498 """
1526 """
1499 return _booleans.get(s.lower(), None)
1527 return _booleans.get(s.lower(), None)
@@ -1,20 +1,23 b''
1 # this is hack to make sure no escape characters are inserted into the output
1 # this is hack to make sure no escape characters are inserted into the output
2 import os
2 import os
3 if 'TERM' in os.environ:
3 if 'TERM' in os.environ:
4 del os.environ['TERM']
4 del os.environ['TERM']
5 import doctest
5 import doctest
6
6
7 import mercurial.changelog
7 import mercurial.changelog
8 doctest.testmod(mercurial.changelog)
8 doctest.testmod(mercurial.changelog)
9
9
10 import mercurial.dagparser
10 import mercurial.dagparser
11 doctest.testmod(mercurial.dagparser, optionflags=doctest.NORMALIZE_WHITESPACE)
11 doctest.testmod(mercurial.dagparser, optionflags=doctest.NORMALIZE_WHITESPACE)
12
12
13 import mercurial.match
13 import mercurial.match
14 doctest.testmod(mercurial.match)
14 doctest.testmod(mercurial.match)
15
15
16 import mercurial.url
16 import mercurial.url
17 doctest.testmod(mercurial.url)
17 doctest.testmod(mercurial.url)
18
18
19 import mercurial.util
20 doctest.testmod(mercurial.util)
21
19 import hgext.convert.cvsps
22 import hgext.convert.cvsps
20 doctest.testmod(hgext.convert.cvsps)
23 doctest.testmod(hgext.convert.cvsps)
General Comments 0
You need to be logged in to leave comments. Login now