##// END OF EJS Templates
util: quicker fspath, do not lower names when the length is different
Simon Heimberg -
r9397:5b117c90 default
parent child Browse files
Show More
@@ -1,1282 +1,1283 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, random, textwrap
19 import os, stat, time, calendar, random, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 # Setting bufsize to -1 lets the system decide the buffer size.
42 # Setting bufsize to -1 lets the system decide the buffer size.
43 # The default for bufsize is 0, meaning unbuffered. This leads to
43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 close_fds=closefds,
46 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 return p.stdin, p.stdout
48 return p.stdin, p.stdout
49 def popen3(cmd):
49 def popen3(cmd):
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 close_fds=closefds,
51 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stderr=subprocess.PIPE)
53 stderr=subprocess.PIPE)
54 return p.stdin, p.stdout, p.stderr
54 return p.stdin, p.stdout, p.stderr
55
55
56 def version():
56 def version():
57 """Return version information if available."""
57 """Return version information if available."""
58 try:
58 try:
59 import __version__
59 import __version__
60 return __version__.version
60 return __version__.version
61 except ImportError:
61 except ImportError:
62 return 'unknown'
62 return 'unknown'
63
63
64 # used by parsedate
64 # used by parsedate
65 defaultdateformats = (
65 defaultdateformats = (
66 '%Y-%m-%d %H:%M:%S',
66 '%Y-%m-%d %H:%M:%S',
67 '%Y-%m-%d %I:%M:%S%p',
67 '%Y-%m-%d %I:%M:%S%p',
68 '%Y-%m-%d %H:%M',
68 '%Y-%m-%d %H:%M',
69 '%Y-%m-%d %I:%M%p',
69 '%Y-%m-%d %I:%M%p',
70 '%Y-%m-%d',
70 '%Y-%m-%d',
71 '%m-%d',
71 '%m-%d',
72 '%m/%d',
72 '%m/%d',
73 '%m/%d/%y',
73 '%m/%d/%y',
74 '%m/%d/%Y',
74 '%m/%d/%Y',
75 '%a %b %d %H:%M:%S %Y',
75 '%a %b %d %H:%M:%S %Y',
76 '%a %b %d %I:%M:%S%p %Y',
76 '%a %b %d %I:%M:%S%p %Y',
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 '%b %d %H:%M:%S %Y',
78 '%b %d %H:%M:%S %Y',
79 '%b %d %I:%M:%S%p %Y',
79 '%b %d %I:%M:%S%p %Y',
80 '%b %d %H:%M:%S',
80 '%b %d %H:%M:%S',
81 '%b %d %I:%M:%S%p',
81 '%b %d %I:%M:%S%p',
82 '%b %d %H:%M',
82 '%b %d %H:%M',
83 '%b %d %I:%M%p',
83 '%b %d %I:%M%p',
84 '%b %d %Y',
84 '%b %d %Y',
85 '%b %d',
85 '%b %d',
86 '%H:%M:%S',
86 '%H:%M:%S',
87 '%I:%M:%S%p',
87 '%I:%M:%S%p',
88 '%H:%M',
88 '%H:%M',
89 '%I:%M%p',
89 '%I:%M%p',
90 )
90 )
91
91
92 extendeddateformats = defaultdateformats + (
92 extendeddateformats = defaultdateformats + (
93 "%Y",
93 "%Y",
94 "%Y-%m",
94 "%Y-%m",
95 "%b",
95 "%b",
96 "%b %Y",
96 "%b %Y",
97 )
97 )
98
98
99 def cachefunc(func):
99 def cachefunc(func):
100 '''cache the result of function calls'''
100 '''cache the result of function calls'''
101 # XXX doesn't handle keywords args
101 # XXX doesn't handle keywords args
102 cache = {}
102 cache = {}
103 if func.func_code.co_argcount == 1:
103 if func.func_code.co_argcount == 1:
104 # we gain a small amount of time because
104 # we gain a small amount of time because
105 # we don't need to pack/unpack the list
105 # we don't need to pack/unpack the list
106 def f(arg):
106 def f(arg):
107 if arg not in cache:
107 if arg not in cache:
108 cache[arg] = func(arg)
108 cache[arg] = func(arg)
109 return cache[arg]
109 return cache[arg]
110 else:
110 else:
111 def f(*args):
111 def f(*args):
112 if args not in cache:
112 if args not in cache:
113 cache[args] = func(*args)
113 cache[args] = func(*args)
114 return cache[args]
114 return cache[args]
115
115
116 return f
116 return f
117
117
118 def lrucachefunc(func):
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
119 '''cache most recent results of function calls'''
120 cache = {}
120 cache = {}
121 order = []
121 order = []
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 def f(arg):
123 def f(arg):
124 if arg not in cache:
124 if arg not in cache:
125 if len(cache) > 20:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 else:
128 else:
129 order.remove(arg)
129 order.remove(arg)
130 order.append(arg)
130 order.append(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 if len(cache) > 20:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
137 cache[args] = func(*args)
138 else:
138 else:
139 order.remove(args)
139 order.remove(args)
140 order.append(args)
140 order.append(args)
141 return cache[args]
141 return cache[args]
142
142
143 return f
143 return f
144
144
145 class propertycache(object):
145 class propertycache(object):
146 def __init__(self, func):
146 def __init__(self, func):
147 self.func = func
147 self.func = func
148 self.name = func.__name__
148 self.name = func.__name__
149 def __get__(self, obj, type=None):
149 def __get__(self, obj, type=None):
150 result = self.func(obj)
150 result = self.func(obj)
151 setattr(obj, self.name, result)
151 setattr(obj, self.name, result)
152 return result
152 return result
153
153
154 def pipefilter(s, cmd):
154 def pipefilter(s, cmd):
155 '''filter string S through command CMD, returning its output'''
155 '''filter string S through command CMD, returning its output'''
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
158 pout, perr = p.communicate(s)
158 pout, perr = p.communicate(s)
159 return pout
159 return pout
160
160
161 def tempfilter(s, cmd):
161 def tempfilter(s, cmd):
162 '''filter string S through a pair of temporary files with CMD.
162 '''filter string S through a pair of temporary files with CMD.
163 CMD is used as a template to create the real command to be run,
163 CMD is used as a template to create the real command to be run,
164 with the strings INFILE and OUTFILE replaced by the real names of
164 with the strings INFILE and OUTFILE replaced by the real names of
165 the temporary files generated.'''
165 the temporary files generated.'''
166 inname, outname = None, None
166 inname, outname = None, None
167 try:
167 try:
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
169 fp = os.fdopen(infd, 'wb')
169 fp = os.fdopen(infd, 'wb')
170 fp.write(s)
170 fp.write(s)
171 fp.close()
171 fp.close()
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
173 os.close(outfd)
173 os.close(outfd)
174 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('INFILE', inname)
175 cmd = cmd.replace('OUTFILE', outname)
175 cmd = cmd.replace('OUTFILE', outname)
176 code = os.system(cmd)
176 code = os.system(cmd)
177 if sys.platform == 'OpenVMS' and code & 1:
177 if sys.platform == 'OpenVMS' and code & 1:
178 code = 0
178 code = 0
179 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
180 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
181 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
182 finally:
182 finally:
183 try:
183 try:
184 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
185 except: pass
185 except: pass
186 try:
186 try:
187 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
188 except: pass
188 except: pass
189
189
190 filtertable = {
190 filtertable = {
191 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
192 'pipe:': pipefilter,
192 'pipe:': pipefilter,
193 }
193 }
194
194
195 def filter(s, cmd):
195 def filter(s, cmd):
196 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
197 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
198 if cmd.startswith(name):
198 if cmd.startswith(name):
199 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
200 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
201
201
202 def binary(s):
202 def binary(s):
203 """return true if a string is binary data"""
203 """return true if a string is binary data"""
204 return bool(s and '\0' in s)
204 return bool(s and '\0' in s)
205
205
206 def increasingchunks(source, min=1024, max=65536):
206 def increasingchunks(source, min=1024, max=65536):
207 '''return no less than min bytes per chunk while data remains,
207 '''return no less than min bytes per chunk while data remains,
208 doubling min after each chunk until it reaches max'''
208 doubling min after each chunk until it reaches max'''
209 def log2(x):
209 def log2(x):
210 if not x:
210 if not x:
211 return 0
211 return 0
212 i = 0
212 i = 0
213 while x:
213 while x:
214 x >>= 1
214 x >>= 1
215 i += 1
215 i += 1
216 return i - 1
216 return i - 1
217
217
218 buf = []
218 buf = []
219 blen = 0
219 blen = 0
220 for chunk in source:
220 for chunk in source:
221 buf.append(chunk)
221 buf.append(chunk)
222 blen += len(chunk)
222 blen += len(chunk)
223 if blen >= min:
223 if blen >= min:
224 if min < max:
224 if min < max:
225 min = min << 1
225 min = min << 1
226 nmin = 1 << log2(blen)
226 nmin = 1 << log2(blen)
227 if nmin > min:
227 if nmin > min:
228 min = nmin
228 min = nmin
229 if min > max:
229 if min > max:
230 min = max
230 min = max
231 yield ''.join(buf)
231 yield ''.join(buf)
232 blen = 0
232 blen = 0
233 buf = []
233 buf = []
234 if buf:
234 if buf:
235 yield ''.join(buf)
235 yield ''.join(buf)
236
236
237 Abort = error.Abort
237 Abort = error.Abort
238
238
239 def always(fn): return True
239 def always(fn): return True
240 def never(fn): return False
240 def never(fn): return False
241
241
242 def pathto(root, n1, n2):
242 def pathto(root, n1, n2):
243 '''return the relative path from one place to another.
243 '''return the relative path from one place to another.
244 root should use os.sep to separate directories
244 root should use os.sep to separate directories
245 n1 should use os.sep to separate directories
245 n1 should use os.sep to separate directories
246 n2 should use "/" to separate directories
246 n2 should use "/" to separate directories
247 returns an os.sep-separated path.
247 returns an os.sep-separated path.
248
248
249 If n1 is a relative path, it's assumed it's
249 If n1 is a relative path, it's assumed it's
250 relative to root.
250 relative to root.
251 n2 should always be relative to root.
251 n2 should always be relative to root.
252 '''
252 '''
253 if not n1: return localpath(n2)
253 if not n1: return localpath(n2)
254 if os.path.isabs(n1):
254 if os.path.isabs(n1):
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
256 return os.path.join(root, localpath(n2))
256 return os.path.join(root, localpath(n2))
257 n2 = '/'.join((pconvert(root), n2))
257 n2 = '/'.join((pconvert(root), n2))
258 a, b = splitpath(n1), n2.split('/')
258 a, b = splitpath(n1), n2.split('/')
259 a.reverse()
259 a.reverse()
260 b.reverse()
260 b.reverse()
261 while a and b and a[-1] == b[-1]:
261 while a and b and a[-1] == b[-1]:
262 a.pop()
262 a.pop()
263 b.pop()
263 b.pop()
264 b.reverse()
264 b.reverse()
265 return os.sep.join((['..'] * len(a)) + b) or '.'
265 return os.sep.join((['..'] * len(a)) + b) or '.'
266
266
267 def canonpath(root, cwd, myname):
267 def canonpath(root, cwd, myname):
268 """return the canonical path of myname, given cwd and root"""
268 """return the canonical path of myname, given cwd and root"""
269 if endswithsep(root):
269 if endswithsep(root):
270 rootsep = root
270 rootsep = root
271 else:
271 else:
272 rootsep = root + os.sep
272 rootsep = root + os.sep
273 name = myname
273 name = myname
274 if not os.path.isabs(name):
274 if not os.path.isabs(name):
275 name = os.path.join(root, cwd, name)
275 name = os.path.join(root, cwd, name)
276 name = os.path.normpath(name)
276 name = os.path.normpath(name)
277 audit_path = path_auditor(root)
277 audit_path = path_auditor(root)
278 if name != rootsep and name.startswith(rootsep):
278 if name != rootsep and name.startswith(rootsep):
279 name = name[len(rootsep):]
279 name = name[len(rootsep):]
280 audit_path(name)
280 audit_path(name)
281 return pconvert(name)
281 return pconvert(name)
282 elif name == root:
282 elif name == root:
283 return ''
283 return ''
284 else:
284 else:
285 # Determine whether `name' is in the hierarchy at or beneath `root',
285 # Determine whether `name' is in the hierarchy at or beneath `root',
286 # by iterating name=dirname(name) until that causes no change (can't
286 # by iterating name=dirname(name) until that causes no change (can't
287 # check name == '/', because that doesn't work on windows). For each
287 # check name == '/', because that doesn't work on windows). For each
288 # `name', compare dev/inode numbers. If they match, the list `rel'
288 # `name', compare dev/inode numbers. If they match, the list `rel'
289 # holds the reversed list of components making up the relative file
289 # holds the reversed list of components making up the relative file
290 # name we want.
290 # name we want.
291 root_st = os.stat(root)
291 root_st = os.stat(root)
292 rel = []
292 rel = []
293 while True:
293 while True:
294 try:
294 try:
295 name_st = os.stat(name)
295 name_st = os.stat(name)
296 except OSError:
296 except OSError:
297 break
297 break
298 if samestat(name_st, root_st):
298 if samestat(name_st, root_st):
299 if not rel:
299 if not rel:
300 # name was actually the same as root (maybe a symlink)
300 # name was actually the same as root (maybe a symlink)
301 return ''
301 return ''
302 rel.reverse()
302 rel.reverse()
303 name = os.path.join(*rel)
303 name = os.path.join(*rel)
304 audit_path(name)
304 audit_path(name)
305 return pconvert(name)
305 return pconvert(name)
306 dirname, basename = os.path.split(name)
306 dirname, basename = os.path.split(name)
307 rel.append(basename)
307 rel.append(basename)
308 if dirname == name:
308 if dirname == name:
309 break
309 break
310 name = dirname
310 name = dirname
311
311
312 raise Abort('%s not under root' % myname)
312 raise Abort('%s not under root' % myname)
313
313
314 _hgexecutable = None
314 _hgexecutable = None
315
315
316 def main_is_frozen():
316 def main_is_frozen():
317 """return True if we are a frozen executable.
317 """return True if we are a frozen executable.
318
318
319 The code supports py2exe (most common, Windows only) and tools/freeze
319 The code supports py2exe (most common, Windows only) and tools/freeze
320 (portable, not much used).
320 (portable, not much used).
321 """
321 """
322 return (hasattr(sys, "frozen") or # new py2exe
322 return (hasattr(sys, "frozen") or # new py2exe
323 hasattr(sys, "importers") or # old py2exe
323 hasattr(sys, "importers") or # old py2exe
324 imp.is_frozen("__main__")) # tools/freeze
324 imp.is_frozen("__main__")) # tools/freeze
325
325
326 def hgexecutable():
326 def hgexecutable():
327 """return location of the 'hg' executable.
327 """return location of the 'hg' executable.
328
328
329 Defaults to $HG or 'hg' in the search path.
329 Defaults to $HG or 'hg' in the search path.
330 """
330 """
331 if _hgexecutable is None:
331 if _hgexecutable is None:
332 hg = os.environ.get('HG')
332 hg = os.environ.get('HG')
333 if hg:
333 if hg:
334 set_hgexecutable(hg)
334 set_hgexecutable(hg)
335 elif main_is_frozen():
335 elif main_is_frozen():
336 set_hgexecutable(sys.executable)
336 set_hgexecutable(sys.executable)
337 else:
337 else:
338 set_hgexecutable(find_exe('hg') or 'hg')
338 set_hgexecutable(find_exe('hg') or 'hg')
339 return _hgexecutable
339 return _hgexecutable
340
340
341 def set_hgexecutable(path):
341 def set_hgexecutable(path):
342 """set location of the 'hg' executable"""
342 """set location of the 'hg' executable"""
343 global _hgexecutable
343 global _hgexecutable
344 _hgexecutable = path
344 _hgexecutable = path
345
345
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
347 '''enhanced shell command execution.
347 '''enhanced shell command execution.
348 run with environment maybe modified, maybe in different dir.
348 run with environment maybe modified, maybe in different dir.
349
349
350 if command fails and onerr is None, return status. if ui object,
350 if command fails and onerr is None, return status. if ui object,
351 print error message and return status, else raise onerr object as
351 print error message and return status, else raise onerr object as
352 exception.'''
352 exception.'''
353 def py2shell(val):
353 def py2shell(val):
354 'convert python object into string that is useful to shell'
354 'convert python object into string that is useful to shell'
355 if val is None or val is False:
355 if val is None or val is False:
356 return '0'
356 return '0'
357 if val is True:
357 if val is True:
358 return '1'
358 return '1'
359 return str(val)
359 return str(val)
360 oldenv = {}
360 oldenv = {}
361 for k in environ:
361 for k in environ:
362 oldenv[k] = os.environ.get(k)
362 oldenv[k] = os.environ.get(k)
363 if cwd is not None:
363 if cwd is not None:
364 oldcwd = os.getcwd()
364 oldcwd = os.getcwd()
365 origcmd = cmd
365 origcmd = cmd
366 if os.name == 'nt':
366 if os.name == 'nt':
367 cmd = '"%s"' % cmd
367 cmd = '"%s"' % cmd
368 try:
368 try:
369 for k, v in environ.iteritems():
369 for k, v in environ.iteritems():
370 os.environ[k] = py2shell(v)
370 os.environ[k] = py2shell(v)
371 os.environ['HG'] = hgexecutable()
371 os.environ['HG'] = hgexecutable()
372 if cwd is not None and oldcwd != cwd:
372 if cwd is not None and oldcwd != cwd:
373 os.chdir(cwd)
373 os.chdir(cwd)
374 rc = os.system(cmd)
374 rc = os.system(cmd)
375 if sys.platform == 'OpenVMS' and rc & 1:
375 if sys.platform == 'OpenVMS' and rc & 1:
376 rc = 0
376 rc = 0
377 if rc and onerr:
377 if rc and onerr:
378 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
378 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
379 explain_exit(rc)[0])
379 explain_exit(rc)[0])
380 if errprefix:
380 if errprefix:
381 errmsg = '%s: %s' % (errprefix, errmsg)
381 errmsg = '%s: %s' % (errprefix, errmsg)
382 try:
382 try:
383 onerr.warn(errmsg + '\n')
383 onerr.warn(errmsg + '\n')
384 except AttributeError:
384 except AttributeError:
385 raise onerr(errmsg)
385 raise onerr(errmsg)
386 return rc
386 return rc
387 finally:
387 finally:
388 for k, v in oldenv.iteritems():
388 for k, v in oldenv.iteritems():
389 if v is None:
389 if v is None:
390 del os.environ[k]
390 del os.environ[k]
391 else:
391 else:
392 os.environ[k] = v
392 os.environ[k] = v
393 if cwd is not None and oldcwd != cwd:
393 if cwd is not None and oldcwd != cwd:
394 os.chdir(oldcwd)
394 os.chdir(oldcwd)
395
395
396 def checksignature(func):
396 def checksignature(func):
397 '''wrap a function with code to check for calling errors'''
397 '''wrap a function with code to check for calling errors'''
398 def check(*args, **kwargs):
398 def check(*args, **kwargs):
399 try:
399 try:
400 return func(*args, **kwargs)
400 return func(*args, **kwargs)
401 except TypeError:
401 except TypeError:
402 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
402 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
403 raise error.SignatureError
403 raise error.SignatureError
404 raise
404 raise
405
405
406 return check
406 return check
407
407
408 # os.path.lexists is not available on python2.3
408 # os.path.lexists is not available on python2.3
409 def lexists(filename):
409 def lexists(filename):
410 "test whether a file with this name exists. does not follow symlinks"
410 "test whether a file with this name exists. does not follow symlinks"
411 try:
411 try:
412 os.lstat(filename)
412 os.lstat(filename)
413 except:
413 except:
414 return False
414 return False
415 return True
415 return True
416
416
417 def rename(src, dst):
417 def rename(src, dst):
418 """forcibly rename a file"""
418 """forcibly rename a file"""
419 try:
419 try:
420 os.rename(src, dst)
420 os.rename(src, dst)
421 except OSError, err: # FIXME: check err (EEXIST ?)
421 except OSError, err: # FIXME: check err (EEXIST ?)
422
422
423 # On windows, rename to existing file is not allowed, so we
423 # On windows, rename to existing file is not allowed, so we
424 # must delete destination first. But if a file is open, unlink
424 # must delete destination first. But if a file is open, unlink
425 # schedules it for delete but does not delete it. Rename
425 # schedules it for delete but does not delete it. Rename
426 # happens immediately even for open files, so we rename
426 # happens immediately even for open files, so we rename
427 # destination to a temporary name, then delete that. Then
427 # destination to a temporary name, then delete that. Then
428 # rename is safe to do.
428 # rename is safe to do.
429 # The temporary name is chosen at random to avoid the situation
429 # The temporary name is chosen at random to avoid the situation
430 # where a file is left lying around from a previous aborted run.
430 # where a file is left lying around from a previous aborted run.
431 # The usual race condition this introduces can't be avoided as
431 # The usual race condition this introduces can't be avoided as
432 # we need the name to rename into, and not the file itself. Due
432 # we need the name to rename into, and not the file itself. Due
433 # to the nature of the operation however, any races will at worst
433 # to the nature of the operation however, any races will at worst
434 # lead to the rename failing and the current operation aborting.
434 # lead to the rename failing and the current operation aborting.
435
435
436 def tempname(prefix):
436 def tempname(prefix):
437 for tries in xrange(10):
437 for tries in xrange(10):
438 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
438 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
439 if not os.path.exists(temp):
439 if not os.path.exists(temp):
440 return temp
440 return temp
441 raise IOError, (errno.EEXIST, "No usable temporary filename found")
441 raise IOError, (errno.EEXIST, "No usable temporary filename found")
442
442
443 temp = tempname(dst)
443 temp = tempname(dst)
444 os.rename(dst, temp)
444 os.rename(dst, temp)
445 os.unlink(temp)
445 os.unlink(temp)
446 os.rename(src, dst)
446 os.rename(src, dst)
447
447
448 def unlink(f):
448 def unlink(f):
449 """unlink and remove the directory if it is empty"""
449 """unlink and remove the directory if it is empty"""
450 os.unlink(f)
450 os.unlink(f)
451 # try removing directories that might now be empty
451 # try removing directories that might now be empty
452 try:
452 try:
453 os.removedirs(os.path.dirname(f))
453 os.removedirs(os.path.dirname(f))
454 except OSError:
454 except OSError:
455 pass
455 pass
456
456
457 def copyfile(src, dest):
457 def copyfile(src, dest):
458 "copy a file, preserving mode and atime/mtime"
458 "copy a file, preserving mode and atime/mtime"
459 if os.path.islink(src):
459 if os.path.islink(src):
460 try:
460 try:
461 os.unlink(dest)
461 os.unlink(dest)
462 except:
462 except:
463 pass
463 pass
464 os.symlink(os.readlink(src), dest)
464 os.symlink(os.readlink(src), dest)
465 else:
465 else:
466 try:
466 try:
467 shutil.copyfile(src, dest)
467 shutil.copyfile(src, dest)
468 shutil.copystat(src, dest)
468 shutil.copystat(src, dest)
469 except shutil.Error, inst:
469 except shutil.Error, inst:
470 raise Abort(str(inst))
470 raise Abort(str(inst))
471
471
472 def copyfiles(src, dst, hardlink=None):
472 def copyfiles(src, dst, hardlink=None):
473 """Copy a directory tree using hardlinks if possible"""
473 """Copy a directory tree using hardlinks if possible"""
474
474
475 if hardlink is None:
475 if hardlink is None:
476 hardlink = (os.stat(src).st_dev ==
476 hardlink = (os.stat(src).st_dev ==
477 os.stat(os.path.dirname(dst)).st_dev)
477 os.stat(os.path.dirname(dst)).st_dev)
478
478
479 if os.path.isdir(src):
479 if os.path.isdir(src):
480 os.mkdir(dst)
480 os.mkdir(dst)
481 for name, kind in osutil.listdir(src):
481 for name, kind in osutil.listdir(src):
482 srcname = os.path.join(src, name)
482 srcname = os.path.join(src, name)
483 dstname = os.path.join(dst, name)
483 dstname = os.path.join(dst, name)
484 copyfiles(srcname, dstname, hardlink)
484 copyfiles(srcname, dstname, hardlink)
485 else:
485 else:
486 if hardlink:
486 if hardlink:
487 try:
487 try:
488 os_link(src, dst)
488 os_link(src, dst)
489 except (IOError, OSError):
489 except (IOError, OSError):
490 hardlink = False
490 hardlink = False
491 shutil.copy(src, dst)
491 shutil.copy(src, dst)
492 else:
492 else:
493 shutil.copy(src, dst)
493 shutil.copy(src, dst)
494
494
495 class path_auditor(object):
495 class path_auditor(object):
496 '''ensure that a filesystem path contains no banned components.
496 '''ensure that a filesystem path contains no banned components.
497 the following properties of a path are checked:
497 the following properties of a path are checked:
498
498
499 - under top-level .hg
499 - under top-level .hg
500 - starts at the root of a windows drive
500 - starts at the root of a windows drive
501 - contains ".."
501 - contains ".."
502 - traverses a symlink (e.g. a/symlink_here/b)
502 - traverses a symlink (e.g. a/symlink_here/b)
503 - inside a nested repository'''
503 - inside a nested repository'''
504
504
505 def __init__(self, root):
505 def __init__(self, root):
506 self.audited = set()
506 self.audited = set()
507 self.auditeddir = set()
507 self.auditeddir = set()
508 self.root = root
508 self.root = root
509
509
510 def __call__(self, path):
510 def __call__(self, path):
511 if path in self.audited:
511 if path in self.audited:
512 return
512 return
513 normpath = os.path.normcase(path)
513 normpath = os.path.normcase(path)
514 parts = splitpath(normpath)
514 parts = splitpath(normpath)
515 if (os.path.splitdrive(path)[0]
515 if (os.path.splitdrive(path)[0]
516 or parts[0].lower() in ('.hg', '.hg.', '')
516 or parts[0].lower() in ('.hg', '.hg.', '')
517 or os.pardir in parts):
517 or os.pardir in parts):
518 raise Abort(_("path contains illegal component: %s") % path)
518 raise Abort(_("path contains illegal component: %s") % path)
519 if '.hg' in path.lower():
519 if '.hg' in path.lower():
520 lparts = [p.lower() for p in parts]
520 lparts = [p.lower() for p in parts]
521 for p in '.hg', '.hg.':
521 for p in '.hg', '.hg.':
522 if p in lparts[1:]:
522 if p in lparts[1:]:
523 pos = lparts.index(p)
523 pos = lparts.index(p)
524 base = os.path.join(*parts[:pos])
524 base = os.path.join(*parts[:pos])
525 raise Abort(_('path %r is inside repo %r') % (path, base))
525 raise Abort(_('path %r is inside repo %r') % (path, base))
526 def check(prefix):
526 def check(prefix):
527 curpath = os.path.join(self.root, prefix)
527 curpath = os.path.join(self.root, prefix)
528 try:
528 try:
529 st = os.lstat(curpath)
529 st = os.lstat(curpath)
530 except OSError, err:
530 except OSError, err:
531 # EINVAL can be raised as invalid path syntax under win32.
531 # EINVAL can be raised as invalid path syntax under win32.
532 # They must be ignored for patterns can be checked too.
532 # They must be ignored for patterns can be checked too.
533 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
533 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
534 raise
534 raise
535 else:
535 else:
536 if stat.S_ISLNK(st.st_mode):
536 if stat.S_ISLNK(st.st_mode):
537 raise Abort(_('path %r traverses symbolic link %r') %
537 raise Abort(_('path %r traverses symbolic link %r') %
538 (path, prefix))
538 (path, prefix))
539 elif (stat.S_ISDIR(st.st_mode) and
539 elif (stat.S_ISDIR(st.st_mode) and
540 os.path.isdir(os.path.join(curpath, '.hg'))):
540 os.path.isdir(os.path.join(curpath, '.hg'))):
541 raise Abort(_('path %r is inside repo %r') %
541 raise Abort(_('path %r is inside repo %r') %
542 (path, prefix))
542 (path, prefix))
543 parts.pop()
543 parts.pop()
544 prefixes = []
544 prefixes = []
545 while parts:
545 while parts:
546 prefix = os.sep.join(parts)
546 prefix = os.sep.join(parts)
547 if prefix in self.auditeddir:
547 if prefix in self.auditeddir:
548 break
548 break
549 check(prefix)
549 check(prefix)
550 prefixes.append(prefix)
550 prefixes.append(prefix)
551 parts.pop()
551 parts.pop()
552
552
553 self.audited.add(path)
553 self.audited.add(path)
554 # only add prefixes to the cache after checking everything: we don't
554 # only add prefixes to the cache after checking everything: we don't
555 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
555 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
556 self.auditeddir.update(prefixes)
556 self.auditeddir.update(prefixes)
557
557
558 def nlinks(pathname):
558 def nlinks(pathname):
559 """Return number of hardlinks for the given file."""
559 """Return number of hardlinks for the given file."""
560 return os.lstat(pathname).st_nlink
560 return os.lstat(pathname).st_nlink
561
561
562 if hasattr(os, 'link'):
562 if hasattr(os, 'link'):
563 os_link = os.link
563 os_link = os.link
564 else:
564 else:
565 def os_link(src, dst):
565 def os_link(src, dst):
566 raise OSError(0, _("Hardlinks not supported"))
566 raise OSError(0, _("Hardlinks not supported"))
567
567
568 def lookup_reg(key, name=None, scope=None):
568 def lookup_reg(key, name=None, scope=None):
569 return None
569 return None
570
570
571 if os.name == 'nt':
571 if os.name == 'nt':
572 from windows import *
572 from windows import *
573 else:
573 else:
574 from posix import *
574 from posix import *
575
575
576 def makelock(info, pathname):
576 def makelock(info, pathname):
577 try:
577 try:
578 return os.symlink(info, pathname)
578 return os.symlink(info, pathname)
579 except OSError, why:
579 except OSError, why:
580 if why.errno == errno.EEXIST:
580 if why.errno == errno.EEXIST:
581 raise
581 raise
582 except AttributeError: # no symlink in os
582 except AttributeError: # no symlink in os
583 pass
583 pass
584
584
585 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
585 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
586 os.write(ld, info)
586 os.write(ld, info)
587 os.close(ld)
587 os.close(ld)
588
588
589 def readlock(pathname):
589 def readlock(pathname):
590 try:
590 try:
591 return os.readlink(pathname)
591 return os.readlink(pathname)
592 except OSError, why:
592 except OSError, why:
593 if why.errno not in (errno.EINVAL, errno.ENOSYS):
593 if why.errno not in (errno.EINVAL, errno.ENOSYS):
594 raise
594 raise
595 except AttributeError: # no symlink in os
595 except AttributeError: # no symlink in os
596 pass
596 pass
597 return posixfile(pathname).read()
597 return posixfile(pathname).read()
598
598
599 def fstat(fp):
599 def fstat(fp):
600 '''stat file object that may not have fileno method.'''
600 '''stat file object that may not have fileno method.'''
601 try:
601 try:
602 return os.fstat(fp.fileno())
602 return os.fstat(fp.fileno())
603 except AttributeError:
603 except AttributeError:
604 return os.stat(fp.name)
604 return os.stat(fp.name)
605
605
606 # File system features
606 # File system features
607
607
608 def checkcase(path):
608 def checkcase(path):
609 """
609 """
610 Check whether the given path is on a case-sensitive filesystem
610 Check whether the given path is on a case-sensitive filesystem
611
611
612 Requires a path (like /foo/.hg) ending with a foldable final
612 Requires a path (like /foo/.hg) ending with a foldable final
613 directory component.
613 directory component.
614 """
614 """
615 s1 = os.stat(path)
615 s1 = os.stat(path)
616 d, b = os.path.split(path)
616 d, b = os.path.split(path)
617 p2 = os.path.join(d, b.upper())
617 p2 = os.path.join(d, b.upper())
618 if path == p2:
618 if path == p2:
619 p2 = os.path.join(d, b.lower())
619 p2 = os.path.join(d, b.lower())
620 try:
620 try:
621 s2 = os.stat(p2)
621 s2 = os.stat(p2)
622 if s2 == s1:
622 if s2 == s1:
623 return False
623 return False
624 return True
624 return True
625 except:
625 except:
626 return True
626 return True
627
627
628 _fspathcache = {}
628 _fspathcache = {}
629 def fspath(name, root):
629 def fspath(name, root):
630 '''Get name in the case stored in the filesystem
630 '''Get name in the case stored in the filesystem
631
631
632 The name is either relative to root, or it is an absolute path starting
632 The name is either relative to root, or it is an absolute path starting
633 with root. Note that this function is unnecessary, and should not be
633 with root. Note that this function is unnecessary, and should not be
634 called, for case-sensitive filesystems (simply because it's expensive).
634 called, for case-sensitive filesystems (simply because it's expensive).
635 '''
635 '''
636 # If name is absolute, make it relative
636 # If name is absolute, make it relative
637 if name.lower().startswith(root.lower()):
637 if name.lower().startswith(root.lower()):
638 l = len(root)
638 l = len(root)
639 if name[l] == os.sep or name[l] == os.altsep:
639 if name[l] == os.sep or name[l] == os.altsep:
640 l = l + 1
640 l = l + 1
641 name = name[l:]
641 name = name[l:]
642
642
643 if not os.path.exists(os.path.join(root, name)):
643 if not os.path.exists(os.path.join(root, name)):
644 return None
644 return None
645
645
646 seps = os.sep
646 seps = os.sep
647 if os.altsep:
647 if os.altsep:
648 seps = seps + os.altsep
648 seps = seps + os.altsep
649 # Protect backslashes. This gets silly very quickly.
649 # Protect backslashes. This gets silly very quickly.
650 seps.replace('\\','\\\\')
650 seps.replace('\\','\\\\')
651 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
651 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
652 dir = os.path.normcase(os.path.normpath(root))
652 dir = os.path.normcase(os.path.normpath(root))
653 result = []
653 result = []
654 for part, sep in pattern.findall(name):
654 for part, sep in pattern.findall(name):
655 if sep:
655 if sep:
656 result.append(sep)
656 result.append(sep)
657 continue
657 continue
658
658
659 if dir not in _fspathcache:
659 if dir not in _fspathcache:
660 _fspathcache[dir] = os.listdir(dir)
660 _fspathcache[dir] = os.listdir(dir)
661 contents = _fspathcache[dir]
661 contents = _fspathcache[dir]
662
662
663 lpart = part.lower()
663 lpart = part.lower()
664 lenp = len(part)
664 for n in contents:
665 for n in contents:
665 if n.lower() == lpart:
666 if lenp == len(n) and n.lower() == lpart:
666 result.append(n)
667 result.append(n)
667 break
668 break
668 else:
669 else:
669 # Cannot happen, as the file exists!
670 # Cannot happen, as the file exists!
670 result.append(part)
671 result.append(part)
671 dir = os.path.join(dir, lpart)
672 dir = os.path.join(dir, lpart)
672
673
673 return ''.join(result)
674 return ''.join(result)
674
675
675 def checkexec(path):
676 def checkexec(path):
676 """
677 """
677 Check whether the given path is on a filesystem with UNIX-like exec flags
678 Check whether the given path is on a filesystem with UNIX-like exec flags
678
679
679 Requires a directory (like /foo/.hg)
680 Requires a directory (like /foo/.hg)
680 """
681 """
681
682
682 # VFAT on some Linux versions can flip mode but it doesn't persist
683 # VFAT on some Linux versions can flip mode but it doesn't persist
683 # a FS remount. Frequently we can detect it if files are created
684 # a FS remount. Frequently we can detect it if files are created
684 # with exec bit on.
685 # with exec bit on.
685
686
686 try:
687 try:
687 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
688 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
688 fh, fn = tempfile.mkstemp("", "", path)
689 fh, fn = tempfile.mkstemp("", "", path)
689 try:
690 try:
690 os.close(fh)
691 os.close(fh)
691 m = os.stat(fn).st_mode & 0777
692 m = os.stat(fn).st_mode & 0777
692 new_file_has_exec = m & EXECFLAGS
693 new_file_has_exec = m & EXECFLAGS
693 os.chmod(fn, m ^ EXECFLAGS)
694 os.chmod(fn, m ^ EXECFLAGS)
694 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
695 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
695 finally:
696 finally:
696 os.unlink(fn)
697 os.unlink(fn)
697 except (IOError, OSError):
698 except (IOError, OSError):
698 # we don't care, the user probably won't be able to commit anyway
699 # we don't care, the user probably won't be able to commit anyway
699 return False
700 return False
700 return not (new_file_has_exec or exec_flags_cannot_flip)
701 return not (new_file_has_exec or exec_flags_cannot_flip)
701
702
702 def checklink(path):
703 def checklink(path):
703 """check whether the given path is on a symlink-capable filesystem"""
704 """check whether the given path is on a symlink-capable filesystem"""
704 # mktemp is not racy because symlink creation will fail if the
705 # mktemp is not racy because symlink creation will fail if the
705 # file already exists
706 # file already exists
706 name = tempfile.mktemp(dir=path)
707 name = tempfile.mktemp(dir=path)
707 try:
708 try:
708 os.symlink(".", name)
709 os.symlink(".", name)
709 os.unlink(name)
710 os.unlink(name)
710 return True
711 return True
711 except (OSError, AttributeError):
712 except (OSError, AttributeError):
712 return False
713 return False
713
714
714 def needbinarypatch():
715 def needbinarypatch():
715 """return True if patches should be applied in binary mode by default."""
716 """return True if patches should be applied in binary mode by default."""
716 return os.name == 'nt'
717 return os.name == 'nt'
717
718
718 def endswithsep(path):
719 def endswithsep(path):
719 '''Check path ends with os.sep or os.altsep.'''
720 '''Check path ends with os.sep or os.altsep.'''
720 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
721 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
721
722
722 def splitpath(path):
723 def splitpath(path):
723 '''Split path by os.sep.
724 '''Split path by os.sep.
724 Note that this function does not use os.altsep because this is
725 Note that this function does not use os.altsep because this is
725 an alternative of simple "xxx.split(os.sep)".
726 an alternative of simple "xxx.split(os.sep)".
726 It is recommended to use os.path.normpath() before using this
727 It is recommended to use os.path.normpath() before using this
727 function if need.'''
728 function if need.'''
728 return path.split(os.sep)
729 return path.split(os.sep)
729
730
730 def gui():
731 def gui():
731 '''Are we running in a GUI?'''
732 '''Are we running in a GUI?'''
732 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
733 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
733
734
734 def mktempcopy(name, emptyok=False, createmode=None):
735 def mktempcopy(name, emptyok=False, createmode=None):
735 """Create a temporary file with the same contents from name
736 """Create a temporary file with the same contents from name
736
737
737 The permission bits are copied from the original file.
738 The permission bits are copied from the original file.
738
739
739 If the temporary file is going to be truncated immediately, you
740 If the temporary file is going to be truncated immediately, you
740 can use emptyok=True as an optimization.
741 can use emptyok=True as an optimization.
741
742
742 Returns the name of the temporary file.
743 Returns the name of the temporary file.
743 """
744 """
744 d, fn = os.path.split(name)
745 d, fn = os.path.split(name)
745 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
746 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
746 os.close(fd)
747 os.close(fd)
747 # Temporary files are created with mode 0600, which is usually not
748 # Temporary files are created with mode 0600, which is usually not
748 # what we want. If the original file already exists, just copy
749 # what we want. If the original file already exists, just copy
749 # its mode. Otherwise, manually obey umask.
750 # its mode. Otherwise, manually obey umask.
750 try:
751 try:
751 st_mode = os.lstat(name).st_mode & 0777
752 st_mode = os.lstat(name).st_mode & 0777
752 except OSError, inst:
753 except OSError, inst:
753 if inst.errno != errno.ENOENT:
754 if inst.errno != errno.ENOENT:
754 raise
755 raise
755 st_mode = createmode
756 st_mode = createmode
756 if st_mode is None:
757 if st_mode is None:
757 st_mode = ~umask
758 st_mode = ~umask
758 st_mode &= 0666
759 st_mode &= 0666
759 os.chmod(temp, st_mode)
760 os.chmod(temp, st_mode)
760 if emptyok:
761 if emptyok:
761 return temp
762 return temp
762 try:
763 try:
763 try:
764 try:
764 ifp = posixfile(name, "rb")
765 ifp = posixfile(name, "rb")
765 except IOError, inst:
766 except IOError, inst:
766 if inst.errno == errno.ENOENT:
767 if inst.errno == errno.ENOENT:
767 return temp
768 return temp
768 if not getattr(inst, 'filename', None):
769 if not getattr(inst, 'filename', None):
769 inst.filename = name
770 inst.filename = name
770 raise
771 raise
771 ofp = posixfile(temp, "wb")
772 ofp = posixfile(temp, "wb")
772 for chunk in filechunkiter(ifp):
773 for chunk in filechunkiter(ifp):
773 ofp.write(chunk)
774 ofp.write(chunk)
774 ifp.close()
775 ifp.close()
775 ofp.close()
776 ofp.close()
776 except:
777 except:
777 try: os.unlink(temp)
778 try: os.unlink(temp)
778 except: pass
779 except: pass
779 raise
780 raise
780 return temp
781 return temp
781
782
782 class atomictempfile(object):
783 class atomictempfile(object):
783 """file-like object that atomically updates a file
784 """file-like object that atomically updates a file
784
785
785 All writes will be redirected to a temporary copy of the original
786 All writes will be redirected to a temporary copy of the original
786 file. When rename is called, the copy is renamed to the original
787 file. When rename is called, the copy is renamed to the original
787 name, making the changes visible.
788 name, making the changes visible.
788 """
789 """
789 def __init__(self, name, mode, createmode):
790 def __init__(self, name, mode, createmode):
790 self.__name = name
791 self.__name = name
791 self._fp = None
792 self._fp = None
792 self.temp = mktempcopy(name, emptyok=('w' in mode),
793 self.temp = mktempcopy(name, emptyok=('w' in mode),
793 createmode=createmode)
794 createmode=createmode)
794 self._fp = posixfile(self.temp, mode)
795 self._fp = posixfile(self.temp, mode)
795
796
796 def __getattr__(self, name):
797 def __getattr__(self, name):
797 return getattr(self._fp, name)
798 return getattr(self._fp, name)
798
799
799 def rename(self):
800 def rename(self):
800 if not self._fp.closed:
801 if not self._fp.closed:
801 self._fp.close()
802 self._fp.close()
802 rename(self.temp, localpath(self.__name))
803 rename(self.temp, localpath(self.__name))
803
804
804 def __del__(self):
805 def __del__(self):
805 if not self._fp:
806 if not self._fp:
806 return
807 return
807 if not self._fp.closed:
808 if not self._fp.closed:
808 try:
809 try:
809 os.unlink(self.temp)
810 os.unlink(self.temp)
810 except: pass
811 except: pass
811 self._fp.close()
812 self._fp.close()
812
813
813 def makedirs(name, mode=None):
814 def makedirs(name, mode=None):
814 """recursive directory creation with parent mode inheritance"""
815 """recursive directory creation with parent mode inheritance"""
815 try:
816 try:
816 os.mkdir(name)
817 os.mkdir(name)
817 if mode is not None:
818 if mode is not None:
818 os.chmod(name, mode)
819 os.chmod(name, mode)
819 return
820 return
820 except OSError, err:
821 except OSError, err:
821 if err.errno == errno.EEXIST:
822 if err.errno == errno.EEXIST:
822 return
823 return
823 if err.errno != errno.ENOENT:
824 if err.errno != errno.ENOENT:
824 raise
825 raise
825 parent = os.path.abspath(os.path.dirname(name))
826 parent = os.path.abspath(os.path.dirname(name))
826 makedirs(parent, mode)
827 makedirs(parent, mode)
827 makedirs(name, mode)
828 makedirs(name, mode)
828
829
829 class opener(object):
830 class opener(object):
830 """Open files relative to a base directory
831 """Open files relative to a base directory
831
832
832 This class is used to hide the details of COW semantics and
833 This class is used to hide the details of COW semantics and
833 remote file access from higher level code.
834 remote file access from higher level code.
834 """
835 """
835 def __init__(self, base, audit=True):
836 def __init__(self, base, audit=True):
836 self.base = base
837 self.base = base
837 if audit:
838 if audit:
838 self.audit_path = path_auditor(base)
839 self.audit_path = path_auditor(base)
839 else:
840 else:
840 self.audit_path = always
841 self.audit_path = always
841 self.createmode = None
842 self.createmode = None
842
843
843 @propertycache
844 @propertycache
844 def _can_symlink(self):
845 def _can_symlink(self):
845 return checklink(self.base)
846 return checklink(self.base)
846
847
847 def _fixfilemode(self, name):
848 def _fixfilemode(self, name):
848 if self.createmode is None:
849 if self.createmode is None:
849 return
850 return
850 os.chmod(name, self.createmode & 0666)
851 os.chmod(name, self.createmode & 0666)
851
852
852 def __call__(self, path, mode="r", text=False, atomictemp=False):
853 def __call__(self, path, mode="r", text=False, atomictemp=False):
853 self.audit_path(path)
854 self.audit_path(path)
854 f = os.path.join(self.base, path)
855 f = os.path.join(self.base, path)
855
856
856 if not text and "b" not in mode:
857 if not text and "b" not in mode:
857 mode += "b" # for that other OS
858 mode += "b" # for that other OS
858
859
859 nlink = -1
860 nlink = -1
860 if mode not in ("r", "rb"):
861 if mode not in ("r", "rb"):
861 try:
862 try:
862 nlink = nlinks(f)
863 nlink = nlinks(f)
863 except OSError:
864 except OSError:
864 nlink = 0
865 nlink = 0
865 d = os.path.dirname(f)
866 d = os.path.dirname(f)
866 if not os.path.isdir(d):
867 if not os.path.isdir(d):
867 makedirs(d, self.createmode)
868 makedirs(d, self.createmode)
868 if atomictemp:
869 if atomictemp:
869 return atomictempfile(f, mode, self.createmode)
870 return atomictempfile(f, mode, self.createmode)
870 if nlink > 1:
871 if nlink > 1:
871 rename(mktempcopy(f), f)
872 rename(mktempcopy(f), f)
872 fp = posixfile(f, mode)
873 fp = posixfile(f, mode)
873 if nlink == 0:
874 if nlink == 0:
874 self._fixfilemode(f)
875 self._fixfilemode(f)
875 return fp
876 return fp
876
877
877 def symlink(self, src, dst):
878 def symlink(self, src, dst):
878 self.audit_path(dst)
879 self.audit_path(dst)
879 linkname = os.path.join(self.base, dst)
880 linkname = os.path.join(self.base, dst)
880 try:
881 try:
881 os.unlink(linkname)
882 os.unlink(linkname)
882 except OSError:
883 except OSError:
883 pass
884 pass
884
885
885 dirname = os.path.dirname(linkname)
886 dirname = os.path.dirname(linkname)
886 if not os.path.exists(dirname):
887 if not os.path.exists(dirname):
887 makedirs(dirname, self.createmode)
888 makedirs(dirname, self.createmode)
888
889
889 if self._can_symlink:
890 if self._can_symlink:
890 try:
891 try:
891 os.symlink(src, linkname)
892 os.symlink(src, linkname)
892 except OSError, err:
893 except OSError, err:
893 raise OSError(err.errno, _('could not symlink to %r: %s') %
894 raise OSError(err.errno, _('could not symlink to %r: %s') %
894 (src, err.strerror), linkname)
895 (src, err.strerror), linkname)
895 else:
896 else:
896 f = self(dst, "w")
897 f = self(dst, "w")
897 f.write(src)
898 f.write(src)
898 f.close()
899 f.close()
899 self._fixfilemode(dst)
900 self._fixfilemode(dst)
900
901
901 class chunkbuffer(object):
902 class chunkbuffer(object):
902 """Allow arbitrary sized chunks of data to be efficiently read from an
903 """Allow arbitrary sized chunks of data to be efficiently read from an
903 iterator over chunks of arbitrary size."""
904 iterator over chunks of arbitrary size."""
904
905
905 def __init__(self, in_iter):
906 def __init__(self, in_iter):
906 """in_iter is the iterator that's iterating over the input chunks.
907 """in_iter is the iterator that's iterating over the input chunks.
907 targetsize is how big a buffer to try to maintain."""
908 targetsize is how big a buffer to try to maintain."""
908 self.iter = iter(in_iter)
909 self.iter = iter(in_iter)
909 self.buf = ''
910 self.buf = ''
910 self.targetsize = 2**16
911 self.targetsize = 2**16
911
912
912 def read(self, l):
913 def read(self, l):
913 """Read L bytes of data from the iterator of chunks of data.
914 """Read L bytes of data from the iterator of chunks of data.
914 Returns less than L bytes if the iterator runs dry."""
915 Returns less than L bytes if the iterator runs dry."""
915 if l > len(self.buf) and self.iter:
916 if l > len(self.buf) and self.iter:
916 # Clamp to a multiple of self.targetsize
917 # Clamp to a multiple of self.targetsize
917 targetsize = max(l, self.targetsize)
918 targetsize = max(l, self.targetsize)
918 collector = cStringIO.StringIO()
919 collector = cStringIO.StringIO()
919 collector.write(self.buf)
920 collector.write(self.buf)
920 collected = len(self.buf)
921 collected = len(self.buf)
921 for chunk in self.iter:
922 for chunk in self.iter:
922 collector.write(chunk)
923 collector.write(chunk)
923 collected += len(chunk)
924 collected += len(chunk)
924 if collected >= targetsize:
925 if collected >= targetsize:
925 break
926 break
926 if collected < targetsize:
927 if collected < targetsize:
927 self.iter = False
928 self.iter = False
928 self.buf = collector.getvalue()
929 self.buf = collector.getvalue()
929 if len(self.buf) == l:
930 if len(self.buf) == l:
930 s, self.buf = str(self.buf), ''
931 s, self.buf = str(self.buf), ''
931 else:
932 else:
932 s, self.buf = self.buf[:l], buffer(self.buf, l)
933 s, self.buf = self.buf[:l], buffer(self.buf, l)
933 return s
934 return s
934
935
935 def filechunkiter(f, size=65536, limit=None):
936 def filechunkiter(f, size=65536, limit=None):
936 """Create a generator that produces the data in the file size
937 """Create a generator that produces the data in the file size
937 (default 65536) bytes at a time, up to optional limit (default is
938 (default 65536) bytes at a time, up to optional limit (default is
938 to read all data). Chunks may be less than size bytes if the
939 to read all data). Chunks may be less than size bytes if the
939 chunk is the last chunk in the file, or the file is a socket or
940 chunk is the last chunk in the file, or the file is a socket or
940 some other type of file that sometimes reads less data than is
941 some other type of file that sometimes reads less data than is
941 requested."""
942 requested."""
942 assert size >= 0
943 assert size >= 0
943 assert limit is None or limit >= 0
944 assert limit is None or limit >= 0
944 while True:
945 while True:
945 if limit is None: nbytes = size
946 if limit is None: nbytes = size
946 else: nbytes = min(limit, size)
947 else: nbytes = min(limit, size)
947 s = nbytes and f.read(nbytes)
948 s = nbytes and f.read(nbytes)
948 if not s: break
949 if not s: break
949 if limit: limit -= len(s)
950 if limit: limit -= len(s)
950 yield s
951 yield s
951
952
952 def makedate():
953 def makedate():
953 lt = time.localtime()
954 lt = time.localtime()
954 if lt[8] == 1 and time.daylight:
955 if lt[8] == 1 and time.daylight:
955 tz = time.altzone
956 tz = time.altzone
956 else:
957 else:
957 tz = time.timezone
958 tz = time.timezone
958 return time.mktime(lt), tz
959 return time.mktime(lt), tz
959
960
960 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
961 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
961 """represent a (unixtime, offset) tuple as a localized time.
962 """represent a (unixtime, offset) tuple as a localized time.
962 unixtime is seconds since the epoch, and offset is the time zone's
963 unixtime is seconds since the epoch, and offset is the time zone's
963 number of seconds away from UTC. if timezone is false, do not
964 number of seconds away from UTC. if timezone is false, do not
964 append time zone to string."""
965 append time zone to string."""
965 t, tz = date or makedate()
966 t, tz = date or makedate()
966 if "%1" in format or "%2" in format:
967 if "%1" in format or "%2" in format:
967 sign = (tz > 0) and "-" or "+"
968 sign = (tz > 0) and "-" or "+"
968 minutes = abs(tz) // 60
969 minutes = abs(tz) // 60
969 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
970 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
970 format = format.replace("%2", "%02d" % (minutes % 60))
971 format = format.replace("%2", "%02d" % (minutes % 60))
971 s = time.strftime(format, time.gmtime(float(t) - tz))
972 s = time.strftime(format, time.gmtime(float(t) - tz))
972 return s
973 return s
973
974
974 def shortdate(date=None):
975 def shortdate(date=None):
975 """turn (timestamp, tzoff) tuple into iso 8631 date."""
976 """turn (timestamp, tzoff) tuple into iso 8631 date."""
976 return datestr(date, format='%Y-%m-%d')
977 return datestr(date, format='%Y-%m-%d')
977
978
978 def strdate(string, format, defaults=[]):
979 def strdate(string, format, defaults=[]):
979 """parse a localized time string and return a (unixtime, offset) tuple.
980 """parse a localized time string and return a (unixtime, offset) tuple.
980 if the string cannot be parsed, ValueError is raised."""
981 if the string cannot be parsed, ValueError is raised."""
981 def timezone(string):
982 def timezone(string):
982 tz = string.split()[-1]
983 tz = string.split()[-1]
983 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
984 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
984 sign = (tz[0] == "+") and 1 or -1
985 sign = (tz[0] == "+") and 1 or -1
985 hours = int(tz[1:3])
986 hours = int(tz[1:3])
986 minutes = int(tz[3:5])
987 minutes = int(tz[3:5])
987 return -sign * (hours * 60 + minutes) * 60
988 return -sign * (hours * 60 + minutes) * 60
988 if tz == "GMT" or tz == "UTC":
989 if tz == "GMT" or tz == "UTC":
989 return 0
990 return 0
990 return None
991 return None
991
992
992 # NOTE: unixtime = localunixtime + offset
993 # NOTE: unixtime = localunixtime + offset
993 offset, date = timezone(string), string
994 offset, date = timezone(string), string
994 if offset != None:
995 if offset != None:
995 date = " ".join(string.split()[:-1])
996 date = " ".join(string.split()[:-1])
996
997
997 # add missing elements from defaults
998 # add missing elements from defaults
998 for part in defaults:
999 for part in defaults:
999 found = [True for p in part if ("%"+p) in format]
1000 found = [True for p in part if ("%"+p) in format]
1000 if not found:
1001 if not found:
1001 date += "@" + defaults[part]
1002 date += "@" + defaults[part]
1002 format += "@%" + part[0]
1003 format += "@%" + part[0]
1003
1004
1004 timetuple = time.strptime(date, format)
1005 timetuple = time.strptime(date, format)
1005 localunixtime = int(calendar.timegm(timetuple))
1006 localunixtime = int(calendar.timegm(timetuple))
1006 if offset is None:
1007 if offset is None:
1007 # local timezone
1008 # local timezone
1008 unixtime = int(time.mktime(timetuple))
1009 unixtime = int(time.mktime(timetuple))
1009 offset = unixtime - localunixtime
1010 offset = unixtime - localunixtime
1010 else:
1011 else:
1011 unixtime = localunixtime + offset
1012 unixtime = localunixtime + offset
1012 return unixtime, offset
1013 return unixtime, offset
1013
1014
1014 def parsedate(date, formats=None, defaults=None):
1015 def parsedate(date, formats=None, defaults=None):
1015 """parse a localized date/time string and return a (unixtime, offset) tuple.
1016 """parse a localized date/time string and return a (unixtime, offset) tuple.
1016
1017
1017 The date may be a "unixtime offset" string or in one of the specified
1018 The date may be a "unixtime offset" string or in one of the specified
1018 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1019 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1019 """
1020 """
1020 if not date:
1021 if not date:
1021 return 0, 0
1022 return 0, 0
1022 if isinstance(date, tuple) and len(date) == 2:
1023 if isinstance(date, tuple) and len(date) == 2:
1023 return date
1024 return date
1024 if not formats:
1025 if not formats:
1025 formats = defaultdateformats
1026 formats = defaultdateformats
1026 date = date.strip()
1027 date = date.strip()
1027 try:
1028 try:
1028 when, offset = map(int, date.split(' '))
1029 when, offset = map(int, date.split(' '))
1029 except ValueError:
1030 except ValueError:
1030 # fill out defaults
1031 # fill out defaults
1031 if not defaults:
1032 if not defaults:
1032 defaults = {}
1033 defaults = {}
1033 now = makedate()
1034 now = makedate()
1034 for part in "d mb yY HI M S".split():
1035 for part in "d mb yY HI M S".split():
1035 if part not in defaults:
1036 if part not in defaults:
1036 if part[0] in "HMS":
1037 if part[0] in "HMS":
1037 defaults[part] = "00"
1038 defaults[part] = "00"
1038 else:
1039 else:
1039 defaults[part] = datestr(now, "%" + part[0])
1040 defaults[part] = datestr(now, "%" + part[0])
1040
1041
1041 for format in formats:
1042 for format in formats:
1042 try:
1043 try:
1043 when, offset = strdate(date, format, defaults)
1044 when, offset = strdate(date, format, defaults)
1044 except (ValueError, OverflowError):
1045 except (ValueError, OverflowError):
1045 pass
1046 pass
1046 else:
1047 else:
1047 break
1048 break
1048 else:
1049 else:
1049 raise Abort(_('invalid date: %r ') % date)
1050 raise Abort(_('invalid date: %r ') % date)
1050 # validate explicit (probably user-specified) date and
1051 # validate explicit (probably user-specified) date and
1051 # time zone offset. values must fit in signed 32 bits for
1052 # time zone offset. values must fit in signed 32 bits for
1052 # current 32-bit linux runtimes. timezones go from UTC-12
1053 # current 32-bit linux runtimes. timezones go from UTC-12
1053 # to UTC+14
1054 # to UTC+14
1054 if abs(when) > 0x7fffffff:
1055 if abs(when) > 0x7fffffff:
1055 raise Abort(_('date exceeds 32 bits: %d') % when)
1056 raise Abort(_('date exceeds 32 bits: %d') % when)
1056 if offset < -50400 or offset > 43200:
1057 if offset < -50400 or offset > 43200:
1057 raise Abort(_('impossible time zone offset: %d') % offset)
1058 raise Abort(_('impossible time zone offset: %d') % offset)
1058 return when, offset
1059 return when, offset
1059
1060
1060 def matchdate(date):
1061 def matchdate(date):
1061 """Return a function that matches a given date match specifier
1062 """Return a function that matches a given date match specifier
1062
1063
1063 Formats include:
1064 Formats include:
1064
1065
1065 '{date}' match a given date to the accuracy provided
1066 '{date}' match a given date to the accuracy provided
1066
1067
1067 '<{date}' on or before a given date
1068 '<{date}' on or before a given date
1068
1069
1069 '>{date}' on or after a given date
1070 '>{date}' on or after a given date
1070
1071
1071 """
1072 """
1072
1073
1073 def lower(date):
1074 def lower(date):
1074 d = dict(mb="1", d="1")
1075 d = dict(mb="1", d="1")
1075 return parsedate(date, extendeddateformats, d)[0]
1076 return parsedate(date, extendeddateformats, d)[0]
1076
1077
1077 def upper(date):
1078 def upper(date):
1078 d = dict(mb="12", HI="23", M="59", S="59")
1079 d = dict(mb="12", HI="23", M="59", S="59")
1079 for days in "31 30 29".split():
1080 for days in "31 30 29".split():
1080 try:
1081 try:
1081 d["d"] = days
1082 d["d"] = days
1082 return parsedate(date, extendeddateformats, d)[0]
1083 return parsedate(date, extendeddateformats, d)[0]
1083 except:
1084 except:
1084 pass
1085 pass
1085 d["d"] = "28"
1086 d["d"] = "28"
1086 return parsedate(date, extendeddateformats, d)[0]
1087 return parsedate(date, extendeddateformats, d)[0]
1087
1088
1088 date = date.strip()
1089 date = date.strip()
1089 if date[0] == "<":
1090 if date[0] == "<":
1090 when = upper(date[1:])
1091 when = upper(date[1:])
1091 return lambda x: x <= when
1092 return lambda x: x <= when
1092 elif date[0] == ">":
1093 elif date[0] == ">":
1093 when = lower(date[1:])
1094 when = lower(date[1:])
1094 return lambda x: x >= when
1095 return lambda x: x >= when
1095 elif date[0] == "-":
1096 elif date[0] == "-":
1096 try:
1097 try:
1097 days = int(date[1:])
1098 days = int(date[1:])
1098 except ValueError:
1099 except ValueError:
1099 raise Abort(_("invalid day spec: %s") % date[1:])
1100 raise Abort(_("invalid day spec: %s") % date[1:])
1100 when = makedate()[0] - days * 3600 * 24
1101 when = makedate()[0] - days * 3600 * 24
1101 return lambda x: x >= when
1102 return lambda x: x >= when
1102 elif " to " in date:
1103 elif " to " in date:
1103 a, b = date.split(" to ")
1104 a, b = date.split(" to ")
1104 start, stop = lower(a), upper(b)
1105 start, stop = lower(a), upper(b)
1105 return lambda x: x >= start and x <= stop
1106 return lambda x: x >= start and x <= stop
1106 else:
1107 else:
1107 start, stop = lower(date), upper(date)
1108 start, stop = lower(date), upper(date)
1108 return lambda x: x >= start and x <= stop
1109 return lambda x: x >= start and x <= stop
1109
1110
1110 def shortuser(user):
1111 def shortuser(user):
1111 """Return a short representation of a user name or email address."""
1112 """Return a short representation of a user name or email address."""
1112 f = user.find('@')
1113 f = user.find('@')
1113 if f >= 0:
1114 if f >= 0:
1114 user = user[:f]
1115 user = user[:f]
1115 f = user.find('<')
1116 f = user.find('<')
1116 if f >= 0:
1117 if f >= 0:
1117 user = user[f+1:]
1118 user = user[f+1:]
1118 f = user.find(' ')
1119 f = user.find(' ')
1119 if f >= 0:
1120 if f >= 0:
1120 user = user[:f]
1121 user = user[:f]
1121 f = user.find('.')
1122 f = user.find('.')
1122 if f >= 0:
1123 if f >= 0:
1123 user = user[:f]
1124 user = user[:f]
1124 return user
1125 return user
1125
1126
1126 def email(author):
1127 def email(author):
1127 '''get email of author.'''
1128 '''get email of author.'''
1128 r = author.find('>')
1129 r = author.find('>')
1129 if r == -1: r = None
1130 if r == -1: r = None
1130 return author[author.find('<')+1:r]
1131 return author[author.find('<')+1:r]
1131
1132
1132 def ellipsis(text, maxlength=400):
1133 def ellipsis(text, maxlength=400):
1133 """Trim string to at most maxlength (default: 400) characters."""
1134 """Trim string to at most maxlength (default: 400) characters."""
1134 if len(text) <= maxlength:
1135 if len(text) <= maxlength:
1135 return text
1136 return text
1136 else:
1137 else:
1137 return "%s..." % (text[:maxlength-3])
1138 return "%s..." % (text[:maxlength-3])
1138
1139
1139 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1140 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1140 '''yield every hg repository under path, recursively.'''
1141 '''yield every hg repository under path, recursively.'''
1141 def errhandler(err):
1142 def errhandler(err):
1142 if err.filename == path:
1143 if err.filename == path:
1143 raise err
1144 raise err
1144 if followsym and hasattr(os.path, 'samestat'):
1145 if followsym and hasattr(os.path, 'samestat'):
1145 def _add_dir_if_not_there(dirlst, dirname):
1146 def _add_dir_if_not_there(dirlst, dirname):
1146 match = False
1147 match = False
1147 samestat = os.path.samestat
1148 samestat = os.path.samestat
1148 dirstat = os.stat(dirname)
1149 dirstat = os.stat(dirname)
1149 for lstdirstat in dirlst:
1150 for lstdirstat in dirlst:
1150 if samestat(dirstat, lstdirstat):
1151 if samestat(dirstat, lstdirstat):
1151 match = True
1152 match = True
1152 break
1153 break
1153 if not match:
1154 if not match:
1154 dirlst.append(dirstat)
1155 dirlst.append(dirstat)
1155 return not match
1156 return not match
1156 else:
1157 else:
1157 followsym = False
1158 followsym = False
1158
1159
1159 if (seen_dirs is None) and followsym:
1160 if (seen_dirs is None) and followsym:
1160 seen_dirs = []
1161 seen_dirs = []
1161 _add_dir_if_not_there(seen_dirs, path)
1162 _add_dir_if_not_there(seen_dirs, path)
1162 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1163 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1163 if '.hg' in dirs:
1164 if '.hg' in dirs:
1164 yield root # found a repository
1165 yield root # found a repository
1165 qroot = os.path.join(root, '.hg', 'patches')
1166 qroot = os.path.join(root, '.hg', 'patches')
1166 if os.path.isdir(os.path.join(qroot, '.hg')):
1167 if os.path.isdir(os.path.join(qroot, '.hg')):
1167 yield qroot # we have a patch queue repo here
1168 yield qroot # we have a patch queue repo here
1168 if recurse:
1169 if recurse:
1169 # avoid recursing inside the .hg directory
1170 # avoid recursing inside the .hg directory
1170 dirs.remove('.hg')
1171 dirs.remove('.hg')
1171 else:
1172 else:
1172 dirs[:] = [] # don't descend further
1173 dirs[:] = [] # don't descend further
1173 elif followsym:
1174 elif followsym:
1174 newdirs = []
1175 newdirs = []
1175 for d in dirs:
1176 for d in dirs:
1176 fname = os.path.join(root, d)
1177 fname = os.path.join(root, d)
1177 if _add_dir_if_not_there(seen_dirs, fname):
1178 if _add_dir_if_not_there(seen_dirs, fname):
1178 if os.path.islink(fname):
1179 if os.path.islink(fname):
1179 for hgname in walkrepos(fname, True, seen_dirs):
1180 for hgname in walkrepos(fname, True, seen_dirs):
1180 yield hgname
1181 yield hgname
1181 else:
1182 else:
1182 newdirs.append(d)
1183 newdirs.append(d)
1183 dirs[:] = newdirs
1184 dirs[:] = newdirs
1184
1185
1185 _rcpath = None
1186 _rcpath = None
1186
1187
1187 def os_rcpath():
1188 def os_rcpath():
1188 '''return default os-specific hgrc search path'''
1189 '''return default os-specific hgrc search path'''
1189 path = system_rcpath()
1190 path = system_rcpath()
1190 path.extend(user_rcpath())
1191 path.extend(user_rcpath())
1191 path = [os.path.normpath(f) for f in path]
1192 path = [os.path.normpath(f) for f in path]
1192 return path
1193 return path
1193
1194
1194 def rcpath():
1195 def rcpath():
1195 '''return hgrc search path. if env var HGRCPATH is set, use it.
1196 '''return hgrc search path. if env var HGRCPATH is set, use it.
1196 for each item in path, if directory, use files ending in .rc,
1197 for each item in path, if directory, use files ending in .rc,
1197 else use item.
1198 else use item.
1198 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1199 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1199 if no HGRCPATH, use default os-specific path.'''
1200 if no HGRCPATH, use default os-specific path.'''
1200 global _rcpath
1201 global _rcpath
1201 if _rcpath is None:
1202 if _rcpath is None:
1202 if 'HGRCPATH' in os.environ:
1203 if 'HGRCPATH' in os.environ:
1203 _rcpath = []
1204 _rcpath = []
1204 for p in os.environ['HGRCPATH'].split(os.pathsep):
1205 for p in os.environ['HGRCPATH'].split(os.pathsep):
1205 if not p: continue
1206 if not p: continue
1206 if os.path.isdir(p):
1207 if os.path.isdir(p):
1207 for f, kind in osutil.listdir(p):
1208 for f, kind in osutil.listdir(p):
1208 if f.endswith('.rc'):
1209 if f.endswith('.rc'):
1209 _rcpath.append(os.path.join(p, f))
1210 _rcpath.append(os.path.join(p, f))
1210 else:
1211 else:
1211 _rcpath.append(p)
1212 _rcpath.append(p)
1212 else:
1213 else:
1213 _rcpath = os_rcpath()
1214 _rcpath = os_rcpath()
1214 return _rcpath
1215 return _rcpath
1215
1216
1216 def bytecount(nbytes):
1217 def bytecount(nbytes):
1217 '''return byte count formatted as readable string, with units'''
1218 '''return byte count formatted as readable string, with units'''
1218
1219
1219 units = (
1220 units = (
1220 (100, 1<<30, _('%.0f GB')),
1221 (100, 1<<30, _('%.0f GB')),
1221 (10, 1<<30, _('%.1f GB')),
1222 (10, 1<<30, _('%.1f GB')),
1222 (1, 1<<30, _('%.2f GB')),
1223 (1, 1<<30, _('%.2f GB')),
1223 (100, 1<<20, _('%.0f MB')),
1224 (100, 1<<20, _('%.0f MB')),
1224 (10, 1<<20, _('%.1f MB')),
1225 (10, 1<<20, _('%.1f MB')),
1225 (1, 1<<20, _('%.2f MB')),
1226 (1, 1<<20, _('%.2f MB')),
1226 (100, 1<<10, _('%.0f KB')),
1227 (100, 1<<10, _('%.0f KB')),
1227 (10, 1<<10, _('%.1f KB')),
1228 (10, 1<<10, _('%.1f KB')),
1228 (1, 1<<10, _('%.2f KB')),
1229 (1, 1<<10, _('%.2f KB')),
1229 (1, 1, _('%.0f bytes')),
1230 (1, 1, _('%.0f bytes')),
1230 )
1231 )
1231
1232
1232 for multiplier, divisor, format in units:
1233 for multiplier, divisor, format in units:
1233 if nbytes >= divisor * multiplier:
1234 if nbytes >= divisor * multiplier:
1234 return format % (nbytes / float(divisor))
1235 return format % (nbytes / float(divisor))
1235 return units[-1][2] % nbytes
1236 return units[-1][2] % nbytes
1236
1237
1237 def drop_scheme(scheme, path):
1238 def drop_scheme(scheme, path):
1238 sc = scheme + ':'
1239 sc = scheme + ':'
1239 if path.startswith(sc):
1240 if path.startswith(sc):
1240 path = path[len(sc):]
1241 path = path[len(sc):]
1241 if path.startswith('//'):
1242 if path.startswith('//'):
1242 path = path[2:]
1243 path = path[2:]
1243 return path
1244 return path
1244
1245
1245 def uirepr(s):
1246 def uirepr(s):
1246 # Avoid double backslash in Windows path repr()
1247 # Avoid double backslash in Windows path repr()
1247 return repr(s).replace('\\\\', '\\')
1248 return repr(s).replace('\\\\', '\\')
1248
1249
1249 def termwidth():
1250 def termwidth():
1250 if 'COLUMNS' in os.environ:
1251 if 'COLUMNS' in os.environ:
1251 try:
1252 try:
1252 return int(os.environ['COLUMNS'])
1253 return int(os.environ['COLUMNS'])
1253 except ValueError:
1254 except ValueError:
1254 pass
1255 pass
1255 try:
1256 try:
1256 import termios, array, fcntl
1257 import termios, array, fcntl
1257 for dev in (sys.stdout, sys.stdin):
1258 for dev in (sys.stdout, sys.stdin):
1258 try:
1259 try:
1259 try:
1260 try:
1260 fd = dev.fileno()
1261 fd = dev.fileno()
1261 except AttributeError:
1262 except AttributeError:
1262 continue
1263 continue
1263 if not os.isatty(fd):
1264 if not os.isatty(fd):
1264 continue
1265 continue
1265 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1266 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1266 return array.array('h', arri)[1]
1267 return array.array('h', arri)[1]
1267 except ValueError:
1268 except ValueError:
1268 pass
1269 pass
1269 except ImportError:
1270 except ImportError:
1270 pass
1271 pass
1271 return 80
1272 return 80
1272
1273
1273 def wrap(line, hangindent, width=None):
1274 def wrap(line, hangindent, width=None):
1274 if width is None:
1275 if width is None:
1275 width = termwidth() - 2
1276 width = termwidth() - 2
1276 padding = '\n' + ' ' * hangindent
1277 padding = '\n' + ' ' * hangindent
1277 return padding.join(textwrap.wrap(line, width=width - hangindent))
1278 return padding.join(textwrap.wrap(line, width=width - hangindent))
1278
1279
1279 def iterlines(iterator):
1280 def iterlines(iterator):
1280 for chunk in iterator:
1281 for chunk in iterator:
1281 for line in chunk.splitlines():
1282 for line in chunk.splitlines():
1282 yield line
1283 yield line
General Comments 0
You need to be logged in to leave comments. Login now