##// END OF EJS Templates
util: state docstring of rename more precisely
Adrian Buehlmann -
r9548:7732606b default
parent child Browse files
Show More
@@ -1,1285 +1,1285 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, random, textwrap
19 import os, stat, time, calendar, random, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 # Setting bufsize to -1 lets the system decide the buffer size.
42 # Setting bufsize to -1 lets the system decide the buffer size.
43 # The default for bufsize is 0, meaning unbuffered. This leads to
43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 close_fds=closefds,
46 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 return p.stdin, p.stdout
48 return p.stdin, p.stdout
49 def popen3(cmd):
49 def popen3(cmd):
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 close_fds=closefds,
51 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stderr=subprocess.PIPE)
53 stderr=subprocess.PIPE)
54 return p.stdin, p.stdout, p.stderr
54 return p.stdin, p.stdout, p.stderr
55
55
56 def version():
56 def version():
57 """Return version information if available."""
57 """Return version information if available."""
58 try:
58 try:
59 import __version__
59 import __version__
60 return __version__.version
60 return __version__.version
61 except ImportError:
61 except ImportError:
62 return 'unknown'
62 return 'unknown'
63
63
64 # used by parsedate
64 # used by parsedate
65 defaultdateformats = (
65 defaultdateformats = (
66 '%Y-%m-%d %H:%M:%S',
66 '%Y-%m-%d %H:%M:%S',
67 '%Y-%m-%d %I:%M:%S%p',
67 '%Y-%m-%d %I:%M:%S%p',
68 '%Y-%m-%d %H:%M',
68 '%Y-%m-%d %H:%M',
69 '%Y-%m-%d %I:%M%p',
69 '%Y-%m-%d %I:%M%p',
70 '%Y-%m-%d',
70 '%Y-%m-%d',
71 '%m-%d',
71 '%m-%d',
72 '%m/%d',
72 '%m/%d',
73 '%m/%d/%y',
73 '%m/%d/%y',
74 '%m/%d/%Y',
74 '%m/%d/%Y',
75 '%a %b %d %H:%M:%S %Y',
75 '%a %b %d %H:%M:%S %Y',
76 '%a %b %d %I:%M:%S%p %Y',
76 '%a %b %d %I:%M:%S%p %Y',
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 '%b %d %H:%M:%S %Y',
78 '%b %d %H:%M:%S %Y',
79 '%b %d %I:%M:%S%p %Y',
79 '%b %d %I:%M:%S%p %Y',
80 '%b %d %H:%M:%S',
80 '%b %d %H:%M:%S',
81 '%b %d %I:%M:%S%p',
81 '%b %d %I:%M:%S%p',
82 '%b %d %H:%M',
82 '%b %d %H:%M',
83 '%b %d %I:%M%p',
83 '%b %d %I:%M%p',
84 '%b %d %Y',
84 '%b %d %Y',
85 '%b %d',
85 '%b %d',
86 '%H:%M:%S',
86 '%H:%M:%S',
87 '%I:%M:%S%p',
87 '%I:%M:%S%p',
88 '%H:%M',
88 '%H:%M',
89 '%I:%M%p',
89 '%I:%M%p',
90 )
90 )
91
91
92 extendeddateformats = defaultdateformats + (
92 extendeddateformats = defaultdateformats + (
93 "%Y",
93 "%Y",
94 "%Y-%m",
94 "%Y-%m",
95 "%b",
95 "%b",
96 "%b %Y",
96 "%b %Y",
97 )
97 )
98
98
99 def cachefunc(func):
99 def cachefunc(func):
100 '''cache the result of function calls'''
100 '''cache the result of function calls'''
101 # XXX doesn't handle keywords args
101 # XXX doesn't handle keywords args
102 cache = {}
102 cache = {}
103 if func.func_code.co_argcount == 1:
103 if func.func_code.co_argcount == 1:
104 # we gain a small amount of time because
104 # we gain a small amount of time because
105 # we don't need to pack/unpack the list
105 # we don't need to pack/unpack the list
106 def f(arg):
106 def f(arg):
107 if arg not in cache:
107 if arg not in cache:
108 cache[arg] = func(arg)
108 cache[arg] = func(arg)
109 return cache[arg]
109 return cache[arg]
110 else:
110 else:
111 def f(*args):
111 def f(*args):
112 if args not in cache:
112 if args not in cache:
113 cache[args] = func(*args)
113 cache[args] = func(*args)
114 return cache[args]
114 return cache[args]
115
115
116 return f
116 return f
117
117
118 def lrucachefunc(func):
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
119 '''cache most recent results of function calls'''
120 cache = {}
120 cache = {}
121 order = []
121 order = []
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 def f(arg):
123 def f(arg):
124 if arg not in cache:
124 if arg not in cache:
125 if len(cache) > 20:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 else:
128 else:
129 order.remove(arg)
129 order.remove(arg)
130 order.append(arg)
130 order.append(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 if len(cache) > 20:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
137 cache[args] = func(*args)
138 else:
138 else:
139 order.remove(args)
139 order.remove(args)
140 order.append(args)
140 order.append(args)
141 return cache[args]
141 return cache[args]
142
142
143 return f
143 return f
144
144
145 class propertycache(object):
145 class propertycache(object):
146 def __init__(self, func):
146 def __init__(self, func):
147 self.func = func
147 self.func = func
148 self.name = func.__name__
148 self.name = func.__name__
149 def __get__(self, obj, type=None):
149 def __get__(self, obj, type=None):
150 result = self.func(obj)
150 result = self.func(obj)
151 setattr(obj, self.name, result)
151 setattr(obj, self.name, result)
152 return result
152 return result
153
153
154 def pipefilter(s, cmd):
154 def pipefilter(s, cmd):
155 '''filter string S through command CMD, returning its output'''
155 '''filter string S through command CMD, returning its output'''
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
158 pout, perr = p.communicate(s)
158 pout, perr = p.communicate(s)
159 return pout
159 return pout
160
160
161 def tempfilter(s, cmd):
161 def tempfilter(s, cmd):
162 '''filter string S through a pair of temporary files with CMD.
162 '''filter string S through a pair of temporary files with CMD.
163 CMD is used as a template to create the real command to be run,
163 CMD is used as a template to create the real command to be run,
164 with the strings INFILE and OUTFILE replaced by the real names of
164 with the strings INFILE and OUTFILE replaced by the real names of
165 the temporary files generated.'''
165 the temporary files generated.'''
166 inname, outname = None, None
166 inname, outname = None, None
167 try:
167 try:
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
169 fp = os.fdopen(infd, 'wb')
169 fp = os.fdopen(infd, 'wb')
170 fp.write(s)
170 fp.write(s)
171 fp.close()
171 fp.close()
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
173 os.close(outfd)
173 os.close(outfd)
174 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('INFILE', inname)
175 cmd = cmd.replace('OUTFILE', outname)
175 cmd = cmd.replace('OUTFILE', outname)
176 code = os.system(cmd)
176 code = os.system(cmd)
177 if sys.platform == 'OpenVMS' and code & 1:
177 if sys.platform == 'OpenVMS' and code & 1:
178 code = 0
178 code = 0
179 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
180 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
181 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
182 finally:
182 finally:
183 try:
183 try:
184 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
185 except: pass
185 except: pass
186 try:
186 try:
187 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
188 except: pass
188 except: pass
189
189
190 filtertable = {
190 filtertable = {
191 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
192 'pipe:': pipefilter,
192 'pipe:': pipefilter,
193 }
193 }
194
194
195 def filter(s, cmd):
195 def filter(s, cmd):
196 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
197 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
198 if cmd.startswith(name):
198 if cmd.startswith(name):
199 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
200 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
201
201
202 def binary(s):
202 def binary(s):
203 """return true if a string is binary data"""
203 """return true if a string is binary data"""
204 return bool(s and '\0' in s)
204 return bool(s and '\0' in s)
205
205
206 def increasingchunks(source, min=1024, max=65536):
206 def increasingchunks(source, min=1024, max=65536):
207 '''return no less than min bytes per chunk while data remains,
207 '''return no less than min bytes per chunk while data remains,
208 doubling min after each chunk until it reaches max'''
208 doubling min after each chunk until it reaches max'''
209 def log2(x):
209 def log2(x):
210 if not x:
210 if not x:
211 return 0
211 return 0
212 i = 0
212 i = 0
213 while x:
213 while x:
214 x >>= 1
214 x >>= 1
215 i += 1
215 i += 1
216 return i - 1
216 return i - 1
217
217
218 buf = []
218 buf = []
219 blen = 0
219 blen = 0
220 for chunk in source:
220 for chunk in source:
221 buf.append(chunk)
221 buf.append(chunk)
222 blen += len(chunk)
222 blen += len(chunk)
223 if blen >= min:
223 if blen >= min:
224 if min < max:
224 if min < max:
225 min = min << 1
225 min = min << 1
226 nmin = 1 << log2(blen)
226 nmin = 1 << log2(blen)
227 if nmin > min:
227 if nmin > min:
228 min = nmin
228 min = nmin
229 if min > max:
229 if min > max:
230 min = max
230 min = max
231 yield ''.join(buf)
231 yield ''.join(buf)
232 blen = 0
232 blen = 0
233 buf = []
233 buf = []
234 if buf:
234 if buf:
235 yield ''.join(buf)
235 yield ''.join(buf)
236
236
237 Abort = error.Abort
237 Abort = error.Abort
238
238
239 def always(fn): return True
239 def always(fn): return True
240 def never(fn): return False
240 def never(fn): return False
241
241
242 def pathto(root, n1, n2):
242 def pathto(root, n1, n2):
243 '''return the relative path from one place to another.
243 '''return the relative path from one place to another.
244 root should use os.sep to separate directories
244 root should use os.sep to separate directories
245 n1 should use os.sep to separate directories
245 n1 should use os.sep to separate directories
246 n2 should use "/" to separate directories
246 n2 should use "/" to separate directories
247 returns an os.sep-separated path.
247 returns an os.sep-separated path.
248
248
249 If n1 is a relative path, it's assumed it's
249 If n1 is a relative path, it's assumed it's
250 relative to root.
250 relative to root.
251 n2 should always be relative to root.
251 n2 should always be relative to root.
252 '''
252 '''
253 if not n1: return localpath(n2)
253 if not n1: return localpath(n2)
254 if os.path.isabs(n1):
254 if os.path.isabs(n1):
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
256 return os.path.join(root, localpath(n2))
256 return os.path.join(root, localpath(n2))
257 n2 = '/'.join((pconvert(root), n2))
257 n2 = '/'.join((pconvert(root), n2))
258 a, b = splitpath(n1), n2.split('/')
258 a, b = splitpath(n1), n2.split('/')
259 a.reverse()
259 a.reverse()
260 b.reverse()
260 b.reverse()
261 while a and b and a[-1] == b[-1]:
261 while a and b and a[-1] == b[-1]:
262 a.pop()
262 a.pop()
263 b.pop()
263 b.pop()
264 b.reverse()
264 b.reverse()
265 return os.sep.join((['..'] * len(a)) + b) or '.'
265 return os.sep.join((['..'] * len(a)) + b) or '.'
266
266
267 def canonpath(root, cwd, myname):
267 def canonpath(root, cwd, myname):
268 """return the canonical path of myname, given cwd and root"""
268 """return the canonical path of myname, given cwd and root"""
269 if endswithsep(root):
269 if endswithsep(root):
270 rootsep = root
270 rootsep = root
271 else:
271 else:
272 rootsep = root + os.sep
272 rootsep = root + os.sep
273 name = myname
273 name = myname
274 if not os.path.isabs(name):
274 if not os.path.isabs(name):
275 name = os.path.join(root, cwd, name)
275 name = os.path.join(root, cwd, name)
276 name = os.path.normpath(name)
276 name = os.path.normpath(name)
277 audit_path = path_auditor(root)
277 audit_path = path_auditor(root)
278 if name != rootsep and name.startswith(rootsep):
278 if name != rootsep and name.startswith(rootsep):
279 name = name[len(rootsep):]
279 name = name[len(rootsep):]
280 audit_path(name)
280 audit_path(name)
281 return pconvert(name)
281 return pconvert(name)
282 elif name == root:
282 elif name == root:
283 return ''
283 return ''
284 else:
284 else:
285 # Determine whether `name' is in the hierarchy at or beneath `root',
285 # Determine whether `name' is in the hierarchy at or beneath `root',
286 # by iterating name=dirname(name) until that causes no change (can't
286 # by iterating name=dirname(name) until that causes no change (can't
287 # check name == '/', because that doesn't work on windows). For each
287 # check name == '/', because that doesn't work on windows). For each
288 # `name', compare dev/inode numbers. If they match, the list `rel'
288 # `name', compare dev/inode numbers. If they match, the list `rel'
289 # holds the reversed list of components making up the relative file
289 # holds the reversed list of components making up the relative file
290 # name we want.
290 # name we want.
291 root_st = os.stat(root)
291 root_st = os.stat(root)
292 rel = []
292 rel = []
293 while True:
293 while True:
294 try:
294 try:
295 name_st = os.stat(name)
295 name_st = os.stat(name)
296 except OSError:
296 except OSError:
297 break
297 break
298 if samestat(name_st, root_st):
298 if samestat(name_st, root_st):
299 if not rel:
299 if not rel:
300 # name was actually the same as root (maybe a symlink)
300 # name was actually the same as root (maybe a symlink)
301 return ''
301 return ''
302 rel.reverse()
302 rel.reverse()
303 name = os.path.join(*rel)
303 name = os.path.join(*rel)
304 audit_path(name)
304 audit_path(name)
305 return pconvert(name)
305 return pconvert(name)
306 dirname, basename = os.path.split(name)
306 dirname, basename = os.path.split(name)
307 rel.append(basename)
307 rel.append(basename)
308 if dirname == name:
308 if dirname == name:
309 break
309 break
310 name = dirname
310 name = dirname
311
311
312 raise Abort('%s not under root' % myname)
312 raise Abort('%s not under root' % myname)
313
313
314 _hgexecutable = None
314 _hgexecutable = None
315
315
316 def main_is_frozen():
316 def main_is_frozen():
317 """return True if we are a frozen executable.
317 """return True if we are a frozen executable.
318
318
319 The code supports py2exe (most common, Windows only) and tools/freeze
319 The code supports py2exe (most common, Windows only) and tools/freeze
320 (portable, not much used).
320 (portable, not much used).
321 """
321 """
322 return (hasattr(sys, "frozen") or # new py2exe
322 return (hasattr(sys, "frozen") or # new py2exe
323 hasattr(sys, "importers") or # old py2exe
323 hasattr(sys, "importers") or # old py2exe
324 imp.is_frozen("__main__")) # tools/freeze
324 imp.is_frozen("__main__")) # tools/freeze
325
325
326 def hgexecutable():
326 def hgexecutable():
327 """return location of the 'hg' executable.
327 """return location of the 'hg' executable.
328
328
329 Defaults to $HG or 'hg' in the search path.
329 Defaults to $HG or 'hg' in the search path.
330 """
330 """
331 if _hgexecutable is None:
331 if _hgexecutable is None:
332 hg = os.environ.get('HG')
332 hg = os.environ.get('HG')
333 if hg:
333 if hg:
334 set_hgexecutable(hg)
334 set_hgexecutable(hg)
335 elif main_is_frozen():
335 elif main_is_frozen():
336 set_hgexecutable(sys.executable)
336 set_hgexecutable(sys.executable)
337 else:
337 else:
338 set_hgexecutable(find_exe('hg') or 'hg')
338 set_hgexecutable(find_exe('hg') or 'hg')
339 return _hgexecutable
339 return _hgexecutable
340
340
341 def set_hgexecutable(path):
341 def set_hgexecutable(path):
342 """set location of the 'hg' executable"""
342 """set location of the 'hg' executable"""
343 global _hgexecutable
343 global _hgexecutable
344 _hgexecutable = path
344 _hgexecutable = path
345
345
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
347 '''enhanced shell command execution.
347 '''enhanced shell command execution.
348 run with environment maybe modified, maybe in different dir.
348 run with environment maybe modified, maybe in different dir.
349
349
350 if command fails and onerr is None, return status. if ui object,
350 if command fails and onerr is None, return status. if ui object,
351 print error message and return status, else raise onerr object as
351 print error message and return status, else raise onerr object as
352 exception.'''
352 exception.'''
353 def py2shell(val):
353 def py2shell(val):
354 'convert python object into string that is useful to shell'
354 'convert python object into string that is useful to shell'
355 if val is None or val is False:
355 if val is None or val is False:
356 return '0'
356 return '0'
357 if val is True:
357 if val is True:
358 return '1'
358 return '1'
359 return str(val)
359 return str(val)
360 origcmd = cmd
360 origcmd = cmd
361 if os.name == 'nt':
361 if os.name == 'nt':
362 cmd = '"%s"' % cmd
362 cmd = '"%s"' % cmd
363 env = dict(os.environ)
363 env = dict(os.environ)
364 env.update((k, py2shell(v)) for k, v in environ.iteritems())
364 env.update((k, py2shell(v)) for k, v in environ.iteritems())
365 env['HG'] = hgexecutable()
365 env['HG'] = hgexecutable()
366 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
366 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
367 env=env, cwd=cwd)
367 env=env, cwd=cwd)
368 if sys.platform == 'OpenVMS' and rc & 1:
368 if sys.platform == 'OpenVMS' and rc & 1:
369 rc = 0
369 rc = 0
370 if rc and onerr:
370 if rc and onerr:
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
372 explain_exit(rc)[0])
372 explain_exit(rc)[0])
373 if errprefix:
373 if errprefix:
374 errmsg = '%s: %s' % (errprefix, errmsg)
374 errmsg = '%s: %s' % (errprefix, errmsg)
375 try:
375 try:
376 onerr.warn(errmsg + '\n')
376 onerr.warn(errmsg + '\n')
377 except AttributeError:
377 except AttributeError:
378 raise onerr(errmsg)
378 raise onerr(errmsg)
379 return rc
379 return rc
380
380
381 def checksignature(func):
381 def checksignature(func):
382 '''wrap a function with code to check for calling errors'''
382 '''wrap a function with code to check for calling errors'''
383 def check(*args, **kwargs):
383 def check(*args, **kwargs):
384 try:
384 try:
385 return func(*args, **kwargs)
385 return func(*args, **kwargs)
386 except TypeError:
386 except TypeError:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
388 raise error.SignatureError
388 raise error.SignatureError
389 raise
389 raise
390
390
391 return check
391 return check
392
392
393 # os.path.lexists is not available on python2.3
393 # os.path.lexists is not available on python2.3
394 def lexists(filename):
394 def lexists(filename):
395 "test whether a file with this name exists. does not follow symlinks"
395 "test whether a file with this name exists. does not follow symlinks"
396 try:
396 try:
397 os.lstat(filename)
397 os.lstat(filename)
398 except:
398 except:
399 return False
399 return False
400 return True
400 return True
401
401
402 def rename(src, dst):
402 def rename(src, dst):
403 """forcibly rename a file"""
403 '''atomically rename file src to dst, replacing dst if it exists'''
404 try:
404 try:
405 os.rename(src, dst)
405 os.rename(src, dst)
406 except OSError, err: # FIXME: check err (EEXIST ?)
406 except OSError, err: # FIXME: check err (EEXIST ?)
407
407
408 # On windows, rename to existing file is not allowed, so we
408 # On windows, rename to existing file is not allowed, so we
409 # must delete destination first. But if a file is open, unlink
409 # must delete destination first. But if a file is open, unlink
410 # schedules it for delete but does not delete it. Rename
410 # schedules it for delete but does not delete it. Rename
411 # happens immediately even for open files, so we rename
411 # happens immediately even for open files, so we rename
412 # destination to a temporary name, then delete that. Then
412 # destination to a temporary name, then delete that. Then
413 # rename is safe to do.
413 # rename is safe to do.
414 # The temporary name is chosen at random to avoid the situation
414 # The temporary name is chosen at random to avoid the situation
415 # where a file is left lying around from a previous aborted run.
415 # where a file is left lying around from a previous aborted run.
416 # The usual race condition this introduces can't be avoided as
416 # The usual race condition this introduces can't be avoided as
417 # we need the name to rename into, and not the file itself. Due
417 # we need the name to rename into, and not the file itself. Due
418 # to the nature of the operation however, any races will at worst
418 # to the nature of the operation however, any races will at worst
419 # lead to the rename failing and the current operation aborting.
419 # lead to the rename failing and the current operation aborting.
420
420
421 def tempname(prefix):
421 def tempname(prefix):
422 for tries in xrange(10):
422 for tries in xrange(10):
423 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
423 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
424 if not os.path.exists(temp):
424 if not os.path.exists(temp):
425 return temp
425 return temp
426 raise IOError, (errno.EEXIST, "No usable temporary filename found")
426 raise IOError, (errno.EEXIST, "No usable temporary filename found")
427
427
428 temp = tempname(dst)
428 temp = tempname(dst)
429 os.rename(dst, temp)
429 os.rename(dst, temp)
430 try:
430 try:
431 os.unlink(temp)
431 os.unlink(temp)
432 except:
432 except:
433 # Some rude AV-scanners on Windows may cause the unlink to
433 # Some rude AV-scanners on Windows may cause the unlink to
434 # fail. Not aborting here just leaks the temp file, whereas
434 # fail. Not aborting here just leaks the temp file, whereas
435 # aborting at this point may leave serious inconsistencies.
435 # aborting at this point may leave serious inconsistencies.
436 # Ideally, we would notify the user here.
436 # Ideally, we would notify the user here.
437 pass
437 pass
438 os.rename(src, dst)
438 os.rename(src, dst)
439
439
440 def unlink(f):
440 def unlink(f):
441 """unlink and remove the directory if it is empty"""
441 """unlink and remove the directory if it is empty"""
442 os.unlink(f)
442 os.unlink(f)
443 # try removing directories that might now be empty
443 # try removing directories that might now be empty
444 try:
444 try:
445 os.removedirs(os.path.dirname(f))
445 os.removedirs(os.path.dirname(f))
446 except OSError:
446 except OSError:
447 pass
447 pass
448
448
449 def copyfile(src, dest):
449 def copyfile(src, dest):
450 "copy a file, preserving mode and atime/mtime"
450 "copy a file, preserving mode and atime/mtime"
451 if os.path.islink(src):
451 if os.path.islink(src):
452 try:
452 try:
453 os.unlink(dest)
453 os.unlink(dest)
454 except:
454 except:
455 pass
455 pass
456 os.symlink(os.readlink(src), dest)
456 os.symlink(os.readlink(src), dest)
457 else:
457 else:
458 try:
458 try:
459 shutil.copyfile(src, dest)
459 shutil.copyfile(src, dest)
460 shutil.copystat(src, dest)
460 shutil.copystat(src, dest)
461 except shutil.Error, inst:
461 except shutil.Error, inst:
462 raise Abort(str(inst))
462 raise Abort(str(inst))
463
463
464 def copyfiles(src, dst, hardlink=None):
464 def copyfiles(src, dst, hardlink=None):
465 """Copy a directory tree using hardlinks if possible"""
465 """Copy a directory tree using hardlinks if possible"""
466
466
467 if hardlink is None:
467 if hardlink is None:
468 hardlink = (os.stat(src).st_dev ==
468 hardlink = (os.stat(src).st_dev ==
469 os.stat(os.path.dirname(dst)).st_dev)
469 os.stat(os.path.dirname(dst)).st_dev)
470
470
471 if os.path.isdir(src):
471 if os.path.isdir(src):
472 os.mkdir(dst)
472 os.mkdir(dst)
473 for name, kind in osutil.listdir(src):
473 for name, kind in osutil.listdir(src):
474 srcname = os.path.join(src, name)
474 srcname = os.path.join(src, name)
475 dstname = os.path.join(dst, name)
475 dstname = os.path.join(dst, name)
476 copyfiles(srcname, dstname, hardlink)
476 copyfiles(srcname, dstname, hardlink)
477 else:
477 else:
478 if hardlink:
478 if hardlink:
479 try:
479 try:
480 os_link(src, dst)
480 os_link(src, dst)
481 except (IOError, OSError):
481 except (IOError, OSError):
482 hardlink = False
482 hardlink = False
483 shutil.copy(src, dst)
483 shutil.copy(src, dst)
484 else:
484 else:
485 shutil.copy(src, dst)
485 shutil.copy(src, dst)
486
486
487 class path_auditor(object):
487 class path_auditor(object):
488 '''ensure that a filesystem path contains no banned components.
488 '''ensure that a filesystem path contains no banned components.
489 the following properties of a path are checked:
489 the following properties of a path are checked:
490
490
491 - under top-level .hg
491 - under top-level .hg
492 - starts at the root of a windows drive
492 - starts at the root of a windows drive
493 - contains ".."
493 - contains ".."
494 - traverses a symlink (e.g. a/symlink_here/b)
494 - traverses a symlink (e.g. a/symlink_here/b)
495 - inside a nested repository'''
495 - inside a nested repository'''
496
496
497 def __init__(self, root):
497 def __init__(self, root):
498 self.audited = set()
498 self.audited = set()
499 self.auditeddir = set()
499 self.auditeddir = set()
500 self.root = root
500 self.root = root
501
501
502 def __call__(self, path):
502 def __call__(self, path):
503 if path in self.audited:
503 if path in self.audited:
504 return
504 return
505 normpath = os.path.normcase(path)
505 normpath = os.path.normcase(path)
506 parts = splitpath(normpath)
506 parts = splitpath(normpath)
507 if (os.path.splitdrive(path)[0]
507 if (os.path.splitdrive(path)[0]
508 or parts[0].lower() in ('.hg', '.hg.', '')
508 or parts[0].lower() in ('.hg', '.hg.', '')
509 or os.pardir in parts):
509 or os.pardir in parts):
510 raise Abort(_("path contains illegal component: %s") % path)
510 raise Abort(_("path contains illegal component: %s") % path)
511 if '.hg' in path.lower():
511 if '.hg' in path.lower():
512 lparts = [p.lower() for p in parts]
512 lparts = [p.lower() for p in parts]
513 for p in '.hg', '.hg.':
513 for p in '.hg', '.hg.':
514 if p in lparts[1:]:
514 if p in lparts[1:]:
515 pos = lparts.index(p)
515 pos = lparts.index(p)
516 base = os.path.join(*parts[:pos])
516 base = os.path.join(*parts[:pos])
517 raise Abort(_('path %r is inside repo %r') % (path, base))
517 raise Abort(_('path %r is inside repo %r') % (path, base))
518 def check(prefix):
518 def check(prefix):
519 curpath = os.path.join(self.root, prefix)
519 curpath = os.path.join(self.root, prefix)
520 try:
520 try:
521 st = os.lstat(curpath)
521 st = os.lstat(curpath)
522 except OSError, err:
522 except OSError, err:
523 # EINVAL can be raised as invalid path syntax under win32.
523 # EINVAL can be raised as invalid path syntax under win32.
524 # They must be ignored for patterns can be checked too.
524 # They must be ignored for patterns can be checked too.
525 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
525 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
526 raise
526 raise
527 else:
527 else:
528 if stat.S_ISLNK(st.st_mode):
528 if stat.S_ISLNK(st.st_mode):
529 raise Abort(_('path %r traverses symbolic link %r') %
529 raise Abort(_('path %r traverses symbolic link %r') %
530 (path, prefix))
530 (path, prefix))
531 elif (stat.S_ISDIR(st.st_mode) and
531 elif (stat.S_ISDIR(st.st_mode) and
532 os.path.isdir(os.path.join(curpath, '.hg'))):
532 os.path.isdir(os.path.join(curpath, '.hg'))):
533 raise Abort(_('path %r is inside repo %r') %
533 raise Abort(_('path %r is inside repo %r') %
534 (path, prefix))
534 (path, prefix))
535 parts.pop()
535 parts.pop()
536 prefixes = []
536 prefixes = []
537 while parts:
537 while parts:
538 prefix = os.sep.join(parts)
538 prefix = os.sep.join(parts)
539 if prefix in self.auditeddir:
539 if prefix in self.auditeddir:
540 break
540 break
541 check(prefix)
541 check(prefix)
542 prefixes.append(prefix)
542 prefixes.append(prefix)
543 parts.pop()
543 parts.pop()
544
544
545 self.audited.add(path)
545 self.audited.add(path)
546 # only add prefixes to the cache after checking everything: we don't
546 # only add prefixes to the cache after checking everything: we don't
547 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
547 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
548 self.auditeddir.update(prefixes)
548 self.auditeddir.update(prefixes)
549
549
550 def nlinks(pathname):
550 def nlinks(pathname):
551 """Return number of hardlinks for the given file."""
551 """Return number of hardlinks for the given file."""
552 return os.lstat(pathname).st_nlink
552 return os.lstat(pathname).st_nlink
553
553
554 if hasattr(os, 'link'):
554 if hasattr(os, 'link'):
555 os_link = os.link
555 os_link = os.link
556 else:
556 else:
557 def os_link(src, dst):
557 def os_link(src, dst):
558 raise OSError(0, _("Hardlinks not supported"))
558 raise OSError(0, _("Hardlinks not supported"))
559
559
560 def lookup_reg(key, name=None, scope=None):
560 def lookup_reg(key, name=None, scope=None):
561 return None
561 return None
562
562
563 if os.name == 'nt':
563 if os.name == 'nt':
564 from windows import *
564 from windows import *
565 else:
565 else:
566 from posix import *
566 from posix import *
567
567
568 def makelock(info, pathname):
568 def makelock(info, pathname):
569 try:
569 try:
570 return os.symlink(info, pathname)
570 return os.symlink(info, pathname)
571 except OSError, why:
571 except OSError, why:
572 if why.errno == errno.EEXIST:
572 if why.errno == errno.EEXIST:
573 raise
573 raise
574 except AttributeError: # no symlink in os
574 except AttributeError: # no symlink in os
575 pass
575 pass
576
576
577 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
577 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
578 os.write(ld, info)
578 os.write(ld, info)
579 os.close(ld)
579 os.close(ld)
580
580
581 def readlock(pathname):
581 def readlock(pathname):
582 try:
582 try:
583 return os.readlink(pathname)
583 return os.readlink(pathname)
584 except OSError, why:
584 except OSError, why:
585 if why.errno not in (errno.EINVAL, errno.ENOSYS):
585 if why.errno not in (errno.EINVAL, errno.ENOSYS):
586 raise
586 raise
587 except AttributeError: # no symlink in os
587 except AttributeError: # no symlink in os
588 pass
588 pass
589 return posixfile(pathname).read()
589 return posixfile(pathname).read()
590
590
591 def fstat(fp):
591 def fstat(fp):
592 '''stat file object that may not have fileno method.'''
592 '''stat file object that may not have fileno method.'''
593 try:
593 try:
594 return os.fstat(fp.fileno())
594 return os.fstat(fp.fileno())
595 except AttributeError:
595 except AttributeError:
596 return os.stat(fp.name)
596 return os.stat(fp.name)
597
597
598 # File system features
598 # File system features
599
599
600 def checkcase(path):
600 def checkcase(path):
601 """
601 """
602 Check whether the given path is on a case-sensitive filesystem
602 Check whether the given path is on a case-sensitive filesystem
603
603
604 Requires a path (like /foo/.hg) ending with a foldable final
604 Requires a path (like /foo/.hg) ending with a foldable final
605 directory component.
605 directory component.
606 """
606 """
607 s1 = os.stat(path)
607 s1 = os.stat(path)
608 d, b = os.path.split(path)
608 d, b = os.path.split(path)
609 p2 = os.path.join(d, b.upper())
609 p2 = os.path.join(d, b.upper())
610 if path == p2:
610 if path == p2:
611 p2 = os.path.join(d, b.lower())
611 p2 = os.path.join(d, b.lower())
612 try:
612 try:
613 s2 = os.stat(p2)
613 s2 = os.stat(p2)
614 if s2 == s1:
614 if s2 == s1:
615 return False
615 return False
616 return True
616 return True
617 except:
617 except:
618 return True
618 return True
619
619
620 _fspathcache = {}
620 _fspathcache = {}
621 def fspath(name, root):
621 def fspath(name, root):
622 '''Get name in the case stored in the filesystem
622 '''Get name in the case stored in the filesystem
623
623
624 The name is either relative to root, or it is an absolute path starting
624 The name is either relative to root, or it is an absolute path starting
625 with root. Note that this function is unnecessary, and should not be
625 with root. Note that this function is unnecessary, and should not be
626 called, for case-sensitive filesystems (simply because it's expensive).
626 called, for case-sensitive filesystems (simply because it's expensive).
627 '''
627 '''
628 # If name is absolute, make it relative
628 # If name is absolute, make it relative
629 if name.lower().startswith(root.lower()):
629 if name.lower().startswith(root.lower()):
630 l = len(root)
630 l = len(root)
631 if name[l] == os.sep or name[l] == os.altsep:
631 if name[l] == os.sep or name[l] == os.altsep:
632 l = l + 1
632 l = l + 1
633 name = name[l:]
633 name = name[l:]
634
634
635 if not os.path.exists(os.path.join(root, name)):
635 if not os.path.exists(os.path.join(root, name)):
636 return None
636 return None
637
637
638 seps = os.sep
638 seps = os.sep
639 if os.altsep:
639 if os.altsep:
640 seps = seps + os.altsep
640 seps = seps + os.altsep
641 # Protect backslashes. This gets silly very quickly.
641 # Protect backslashes. This gets silly very quickly.
642 seps.replace('\\','\\\\')
642 seps.replace('\\','\\\\')
643 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
643 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
644 dir = os.path.normcase(os.path.normpath(root))
644 dir = os.path.normcase(os.path.normpath(root))
645 result = []
645 result = []
646 for part, sep in pattern.findall(name):
646 for part, sep in pattern.findall(name):
647 if sep:
647 if sep:
648 result.append(sep)
648 result.append(sep)
649 continue
649 continue
650
650
651 if dir not in _fspathcache:
651 if dir not in _fspathcache:
652 _fspathcache[dir] = os.listdir(dir)
652 _fspathcache[dir] = os.listdir(dir)
653 contents = _fspathcache[dir]
653 contents = _fspathcache[dir]
654
654
655 lpart = part.lower()
655 lpart = part.lower()
656 lenp = len(part)
656 lenp = len(part)
657 for n in contents:
657 for n in contents:
658 if lenp == len(n) and n.lower() == lpart:
658 if lenp == len(n) and n.lower() == lpart:
659 result.append(n)
659 result.append(n)
660 break
660 break
661 else:
661 else:
662 # Cannot happen, as the file exists!
662 # Cannot happen, as the file exists!
663 result.append(part)
663 result.append(part)
664 dir = os.path.join(dir, lpart)
664 dir = os.path.join(dir, lpart)
665
665
666 return ''.join(result)
666 return ''.join(result)
667
667
668 def checkexec(path):
668 def checkexec(path):
669 """
669 """
670 Check whether the given path is on a filesystem with UNIX-like exec flags
670 Check whether the given path is on a filesystem with UNIX-like exec flags
671
671
672 Requires a directory (like /foo/.hg)
672 Requires a directory (like /foo/.hg)
673 """
673 """
674
674
675 # VFAT on some Linux versions can flip mode but it doesn't persist
675 # VFAT on some Linux versions can flip mode but it doesn't persist
676 # a FS remount. Frequently we can detect it if files are created
676 # a FS remount. Frequently we can detect it if files are created
677 # with exec bit on.
677 # with exec bit on.
678
678
679 try:
679 try:
680 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
680 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
681 fh, fn = tempfile.mkstemp("", "", path)
681 fh, fn = tempfile.mkstemp("", "", path)
682 try:
682 try:
683 os.close(fh)
683 os.close(fh)
684 m = os.stat(fn).st_mode & 0777
684 m = os.stat(fn).st_mode & 0777
685 new_file_has_exec = m & EXECFLAGS
685 new_file_has_exec = m & EXECFLAGS
686 os.chmod(fn, m ^ EXECFLAGS)
686 os.chmod(fn, m ^ EXECFLAGS)
687 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
687 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
688 finally:
688 finally:
689 os.unlink(fn)
689 os.unlink(fn)
690 except (IOError, OSError):
690 except (IOError, OSError):
691 # we don't care, the user probably won't be able to commit anyway
691 # we don't care, the user probably won't be able to commit anyway
692 return False
692 return False
693 return not (new_file_has_exec or exec_flags_cannot_flip)
693 return not (new_file_has_exec or exec_flags_cannot_flip)
694
694
695 def checklink(path):
695 def checklink(path):
696 """check whether the given path is on a symlink-capable filesystem"""
696 """check whether the given path is on a symlink-capable filesystem"""
697 # mktemp is not racy because symlink creation will fail if the
697 # mktemp is not racy because symlink creation will fail if the
698 # file already exists
698 # file already exists
699 name = tempfile.mktemp(dir=path)
699 name = tempfile.mktemp(dir=path)
700 try:
700 try:
701 os.symlink(".", name)
701 os.symlink(".", name)
702 os.unlink(name)
702 os.unlink(name)
703 return True
703 return True
704 except (OSError, AttributeError):
704 except (OSError, AttributeError):
705 return False
705 return False
706
706
707 def needbinarypatch():
707 def needbinarypatch():
708 """return True if patches should be applied in binary mode by default."""
708 """return True if patches should be applied in binary mode by default."""
709 return os.name == 'nt'
709 return os.name == 'nt'
710
710
711 def endswithsep(path):
711 def endswithsep(path):
712 '''Check path ends with os.sep or os.altsep.'''
712 '''Check path ends with os.sep or os.altsep.'''
713 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
713 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
714
714
715 def splitpath(path):
715 def splitpath(path):
716 '''Split path by os.sep.
716 '''Split path by os.sep.
717 Note that this function does not use os.altsep because this is
717 Note that this function does not use os.altsep because this is
718 an alternative of simple "xxx.split(os.sep)".
718 an alternative of simple "xxx.split(os.sep)".
719 It is recommended to use os.path.normpath() before using this
719 It is recommended to use os.path.normpath() before using this
720 function if need.'''
720 function if need.'''
721 return path.split(os.sep)
721 return path.split(os.sep)
722
722
723 def gui():
723 def gui():
724 '''Are we running in a GUI?'''
724 '''Are we running in a GUI?'''
725 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
725 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
726
726
727 def mktempcopy(name, emptyok=False, createmode=None):
727 def mktempcopy(name, emptyok=False, createmode=None):
728 """Create a temporary file with the same contents from name
728 """Create a temporary file with the same contents from name
729
729
730 The permission bits are copied from the original file.
730 The permission bits are copied from the original file.
731
731
732 If the temporary file is going to be truncated immediately, you
732 If the temporary file is going to be truncated immediately, you
733 can use emptyok=True as an optimization.
733 can use emptyok=True as an optimization.
734
734
735 Returns the name of the temporary file.
735 Returns the name of the temporary file.
736 """
736 """
737 d, fn = os.path.split(name)
737 d, fn = os.path.split(name)
738 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
738 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
739 os.close(fd)
739 os.close(fd)
740 # Temporary files are created with mode 0600, which is usually not
740 # Temporary files are created with mode 0600, which is usually not
741 # what we want. If the original file already exists, just copy
741 # what we want. If the original file already exists, just copy
742 # its mode. Otherwise, manually obey umask.
742 # its mode. Otherwise, manually obey umask.
743 try:
743 try:
744 st_mode = os.lstat(name).st_mode & 0777
744 st_mode = os.lstat(name).st_mode & 0777
745 except OSError, inst:
745 except OSError, inst:
746 if inst.errno != errno.ENOENT:
746 if inst.errno != errno.ENOENT:
747 raise
747 raise
748 st_mode = createmode
748 st_mode = createmode
749 if st_mode is None:
749 if st_mode is None:
750 st_mode = ~umask
750 st_mode = ~umask
751 st_mode &= 0666
751 st_mode &= 0666
752 os.chmod(temp, st_mode)
752 os.chmod(temp, st_mode)
753 if emptyok:
753 if emptyok:
754 return temp
754 return temp
755 try:
755 try:
756 try:
756 try:
757 ifp = posixfile(name, "rb")
757 ifp = posixfile(name, "rb")
758 except IOError, inst:
758 except IOError, inst:
759 if inst.errno == errno.ENOENT:
759 if inst.errno == errno.ENOENT:
760 return temp
760 return temp
761 if not getattr(inst, 'filename', None):
761 if not getattr(inst, 'filename', None):
762 inst.filename = name
762 inst.filename = name
763 raise
763 raise
764 ofp = posixfile(temp, "wb")
764 ofp = posixfile(temp, "wb")
765 for chunk in filechunkiter(ifp):
765 for chunk in filechunkiter(ifp):
766 ofp.write(chunk)
766 ofp.write(chunk)
767 ifp.close()
767 ifp.close()
768 ofp.close()
768 ofp.close()
769 except:
769 except:
770 try: os.unlink(temp)
770 try: os.unlink(temp)
771 except: pass
771 except: pass
772 raise
772 raise
773 return temp
773 return temp
774
774
775 class atomictempfile(object):
775 class atomictempfile(object):
776 """file-like object that atomically updates a file
776 """file-like object that atomically updates a file
777
777
778 All writes will be redirected to a temporary copy of the original
778 All writes will be redirected to a temporary copy of the original
779 file. When rename is called, the copy is renamed to the original
779 file. When rename is called, the copy is renamed to the original
780 name, making the changes visible.
780 name, making the changes visible.
781 """
781 """
782 def __init__(self, name, mode, createmode):
782 def __init__(self, name, mode, createmode):
783 self.__name = name
783 self.__name = name
784 self._fp = None
784 self._fp = None
785 self.temp = mktempcopy(name, emptyok=('w' in mode),
785 self.temp = mktempcopy(name, emptyok=('w' in mode),
786 createmode=createmode)
786 createmode=createmode)
787 self._fp = posixfile(self.temp, mode)
787 self._fp = posixfile(self.temp, mode)
788
788
789 def __getattr__(self, name):
789 def __getattr__(self, name):
790 return getattr(self._fp, name)
790 return getattr(self._fp, name)
791
791
792 def rename(self):
792 def rename(self):
793 if not self._fp.closed:
793 if not self._fp.closed:
794 self._fp.close()
794 self._fp.close()
795 rename(self.temp, localpath(self.__name))
795 rename(self.temp, localpath(self.__name))
796
796
797 def __del__(self):
797 def __del__(self):
798 if not self._fp:
798 if not self._fp:
799 return
799 return
800 if not self._fp.closed:
800 if not self._fp.closed:
801 try:
801 try:
802 os.unlink(self.temp)
802 os.unlink(self.temp)
803 except: pass
803 except: pass
804 self._fp.close()
804 self._fp.close()
805
805
806 def makedirs(name, mode=None):
806 def makedirs(name, mode=None):
807 """recursive directory creation with parent mode inheritance"""
807 """recursive directory creation with parent mode inheritance"""
808 try:
808 try:
809 os.mkdir(name)
809 os.mkdir(name)
810 if mode is not None:
810 if mode is not None:
811 os.chmod(name, mode)
811 os.chmod(name, mode)
812 return
812 return
813 except OSError, err:
813 except OSError, err:
814 if err.errno == errno.EEXIST:
814 if err.errno == errno.EEXIST:
815 return
815 return
816 if err.errno != errno.ENOENT:
816 if err.errno != errno.ENOENT:
817 raise
817 raise
818 parent = os.path.abspath(os.path.dirname(name))
818 parent = os.path.abspath(os.path.dirname(name))
819 makedirs(parent, mode)
819 makedirs(parent, mode)
820 makedirs(name, mode)
820 makedirs(name, mode)
821
821
822 class opener(object):
822 class opener(object):
823 """Open files relative to a base directory
823 """Open files relative to a base directory
824
824
825 This class is used to hide the details of COW semantics and
825 This class is used to hide the details of COW semantics and
826 remote file access from higher level code.
826 remote file access from higher level code.
827 """
827 """
828 def __init__(self, base, audit=True):
828 def __init__(self, base, audit=True):
829 self.base = base
829 self.base = base
830 if audit:
830 if audit:
831 self.audit_path = path_auditor(base)
831 self.audit_path = path_auditor(base)
832 else:
832 else:
833 self.audit_path = always
833 self.audit_path = always
834 self.createmode = None
834 self.createmode = None
835
835
836 @propertycache
836 @propertycache
837 def _can_symlink(self):
837 def _can_symlink(self):
838 return checklink(self.base)
838 return checklink(self.base)
839
839
840 def _fixfilemode(self, name):
840 def _fixfilemode(self, name):
841 if self.createmode is None:
841 if self.createmode is None:
842 return
842 return
843 os.chmod(name, self.createmode & 0666)
843 os.chmod(name, self.createmode & 0666)
844
844
845 def __call__(self, path, mode="r", text=False, atomictemp=False):
845 def __call__(self, path, mode="r", text=False, atomictemp=False):
846 self.audit_path(path)
846 self.audit_path(path)
847 f = os.path.join(self.base, path)
847 f = os.path.join(self.base, path)
848
848
849 if not text and "b" not in mode:
849 if not text and "b" not in mode:
850 mode += "b" # for that other OS
850 mode += "b" # for that other OS
851
851
852 nlink = -1
852 nlink = -1
853 if mode not in ("r", "rb"):
853 if mode not in ("r", "rb"):
854 try:
854 try:
855 nlink = nlinks(f)
855 nlink = nlinks(f)
856 except OSError:
856 except OSError:
857 nlink = 0
857 nlink = 0
858 d = os.path.dirname(f)
858 d = os.path.dirname(f)
859 if not os.path.isdir(d):
859 if not os.path.isdir(d):
860 makedirs(d, self.createmode)
860 makedirs(d, self.createmode)
861 if atomictemp:
861 if atomictemp:
862 return atomictempfile(f, mode, self.createmode)
862 return atomictempfile(f, mode, self.createmode)
863 if nlink > 1:
863 if nlink > 1:
864 rename(mktempcopy(f), f)
864 rename(mktempcopy(f), f)
865 fp = posixfile(f, mode)
865 fp = posixfile(f, mode)
866 if nlink == 0:
866 if nlink == 0:
867 self._fixfilemode(f)
867 self._fixfilemode(f)
868 return fp
868 return fp
869
869
870 def symlink(self, src, dst):
870 def symlink(self, src, dst):
871 self.audit_path(dst)
871 self.audit_path(dst)
872 linkname = os.path.join(self.base, dst)
872 linkname = os.path.join(self.base, dst)
873 try:
873 try:
874 os.unlink(linkname)
874 os.unlink(linkname)
875 except OSError:
875 except OSError:
876 pass
876 pass
877
877
878 dirname = os.path.dirname(linkname)
878 dirname = os.path.dirname(linkname)
879 if not os.path.exists(dirname):
879 if not os.path.exists(dirname):
880 makedirs(dirname, self.createmode)
880 makedirs(dirname, self.createmode)
881
881
882 if self._can_symlink:
882 if self._can_symlink:
883 try:
883 try:
884 os.symlink(src, linkname)
884 os.symlink(src, linkname)
885 except OSError, err:
885 except OSError, err:
886 raise OSError(err.errno, _('could not symlink to %r: %s') %
886 raise OSError(err.errno, _('could not symlink to %r: %s') %
887 (src, err.strerror), linkname)
887 (src, err.strerror), linkname)
888 else:
888 else:
889 f = self(dst, "w")
889 f = self(dst, "w")
890 f.write(src)
890 f.write(src)
891 f.close()
891 f.close()
892 self._fixfilemode(dst)
892 self._fixfilemode(dst)
893
893
894 class chunkbuffer(object):
894 class chunkbuffer(object):
895 """Allow arbitrary sized chunks of data to be efficiently read from an
895 """Allow arbitrary sized chunks of data to be efficiently read from an
896 iterator over chunks of arbitrary size."""
896 iterator over chunks of arbitrary size."""
897
897
898 def __init__(self, in_iter):
898 def __init__(self, in_iter):
899 """in_iter is the iterator that's iterating over the input chunks.
899 """in_iter is the iterator that's iterating over the input chunks.
900 targetsize is how big a buffer to try to maintain."""
900 targetsize is how big a buffer to try to maintain."""
901 self.iter = iter(in_iter)
901 self.iter = iter(in_iter)
902 self.buf = ''
902 self.buf = ''
903 self.targetsize = 2**16
903 self.targetsize = 2**16
904
904
905 def read(self, l):
905 def read(self, l):
906 """Read L bytes of data from the iterator of chunks of data.
906 """Read L bytes of data from the iterator of chunks of data.
907 Returns less than L bytes if the iterator runs dry."""
907 Returns less than L bytes if the iterator runs dry."""
908 if l > len(self.buf) and self.iter:
908 if l > len(self.buf) and self.iter:
909 # Clamp to a multiple of self.targetsize
909 # Clamp to a multiple of self.targetsize
910 targetsize = max(l, self.targetsize)
910 targetsize = max(l, self.targetsize)
911 collector = cStringIO.StringIO()
911 collector = cStringIO.StringIO()
912 collector.write(self.buf)
912 collector.write(self.buf)
913 collected = len(self.buf)
913 collected = len(self.buf)
914 for chunk in self.iter:
914 for chunk in self.iter:
915 collector.write(chunk)
915 collector.write(chunk)
916 collected += len(chunk)
916 collected += len(chunk)
917 if collected >= targetsize:
917 if collected >= targetsize:
918 break
918 break
919 if collected < targetsize:
919 if collected < targetsize:
920 self.iter = False
920 self.iter = False
921 self.buf = collector.getvalue()
921 self.buf = collector.getvalue()
922 if len(self.buf) == l:
922 if len(self.buf) == l:
923 s, self.buf = str(self.buf), ''
923 s, self.buf = str(self.buf), ''
924 else:
924 else:
925 s, self.buf = self.buf[:l], buffer(self.buf, l)
925 s, self.buf = self.buf[:l], buffer(self.buf, l)
926 return s
926 return s
927
927
928 def filechunkiter(f, size=65536, limit=None):
928 def filechunkiter(f, size=65536, limit=None):
929 """Create a generator that produces the data in the file size
929 """Create a generator that produces the data in the file size
930 (default 65536) bytes at a time, up to optional limit (default is
930 (default 65536) bytes at a time, up to optional limit (default is
931 to read all data). Chunks may be less than size bytes if the
931 to read all data). Chunks may be less than size bytes if the
932 chunk is the last chunk in the file, or the file is a socket or
932 chunk is the last chunk in the file, or the file is a socket or
933 some other type of file that sometimes reads less data than is
933 some other type of file that sometimes reads less data than is
934 requested."""
934 requested."""
935 assert size >= 0
935 assert size >= 0
936 assert limit is None or limit >= 0
936 assert limit is None or limit >= 0
937 while True:
937 while True:
938 if limit is None: nbytes = size
938 if limit is None: nbytes = size
939 else: nbytes = min(limit, size)
939 else: nbytes = min(limit, size)
940 s = nbytes and f.read(nbytes)
940 s = nbytes and f.read(nbytes)
941 if not s: break
941 if not s: break
942 if limit: limit -= len(s)
942 if limit: limit -= len(s)
943 yield s
943 yield s
944
944
945 def makedate():
945 def makedate():
946 lt = time.localtime()
946 lt = time.localtime()
947 if lt[8] == 1 and time.daylight:
947 if lt[8] == 1 and time.daylight:
948 tz = time.altzone
948 tz = time.altzone
949 else:
949 else:
950 tz = time.timezone
950 tz = time.timezone
951 return time.mktime(lt), tz
951 return time.mktime(lt), tz
952
952
953 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
953 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
954 """represent a (unixtime, offset) tuple as a localized time.
954 """represent a (unixtime, offset) tuple as a localized time.
955 unixtime is seconds since the epoch, and offset is the time zone's
955 unixtime is seconds since the epoch, and offset is the time zone's
956 number of seconds away from UTC. if timezone is false, do not
956 number of seconds away from UTC. if timezone is false, do not
957 append time zone to string."""
957 append time zone to string."""
958 t, tz = date or makedate()
958 t, tz = date or makedate()
959 if "%1" in format or "%2" in format:
959 if "%1" in format or "%2" in format:
960 sign = (tz > 0) and "-" or "+"
960 sign = (tz > 0) and "-" or "+"
961 minutes = abs(tz) // 60
961 minutes = abs(tz) // 60
962 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
962 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
963 format = format.replace("%2", "%02d" % (minutes % 60))
963 format = format.replace("%2", "%02d" % (minutes % 60))
964 s = time.strftime(format, time.gmtime(float(t) - tz))
964 s = time.strftime(format, time.gmtime(float(t) - tz))
965 return s
965 return s
966
966
967 def shortdate(date=None):
967 def shortdate(date=None):
968 """turn (timestamp, tzoff) tuple into iso 8631 date."""
968 """turn (timestamp, tzoff) tuple into iso 8631 date."""
969 return datestr(date, format='%Y-%m-%d')
969 return datestr(date, format='%Y-%m-%d')
970
970
971 def strdate(string, format, defaults=[]):
971 def strdate(string, format, defaults=[]):
972 """parse a localized time string and return a (unixtime, offset) tuple.
972 """parse a localized time string and return a (unixtime, offset) tuple.
973 if the string cannot be parsed, ValueError is raised."""
973 if the string cannot be parsed, ValueError is raised."""
974 def timezone(string):
974 def timezone(string):
975 tz = string.split()[-1]
975 tz = string.split()[-1]
976 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
976 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
977 sign = (tz[0] == "+") and 1 or -1
977 sign = (tz[0] == "+") and 1 or -1
978 hours = int(tz[1:3])
978 hours = int(tz[1:3])
979 minutes = int(tz[3:5])
979 minutes = int(tz[3:5])
980 return -sign * (hours * 60 + minutes) * 60
980 return -sign * (hours * 60 + minutes) * 60
981 if tz == "GMT" or tz == "UTC":
981 if tz == "GMT" or tz == "UTC":
982 return 0
982 return 0
983 return None
983 return None
984
984
985 # NOTE: unixtime = localunixtime + offset
985 # NOTE: unixtime = localunixtime + offset
986 offset, date = timezone(string), string
986 offset, date = timezone(string), string
987 if offset != None:
987 if offset != None:
988 date = " ".join(string.split()[:-1])
988 date = " ".join(string.split()[:-1])
989
989
990 # add missing elements from defaults
990 # add missing elements from defaults
991 for part in defaults:
991 for part in defaults:
992 found = [True for p in part if ("%"+p) in format]
992 found = [True for p in part if ("%"+p) in format]
993 if not found:
993 if not found:
994 date += "@" + defaults[part]
994 date += "@" + defaults[part]
995 format += "@%" + part[0]
995 format += "@%" + part[0]
996
996
997 timetuple = time.strptime(date, format)
997 timetuple = time.strptime(date, format)
998 localunixtime = int(calendar.timegm(timetuple))
998 localunixtime = int(calendar.timegm(timetuple))
999 if offset is None:
999 if offset is None:
1000 # local timezone
1000 # local timezone
1001 unixtime = int(time.mktime(timetuple))
1001 unixtime = int(time.mktime(timetuple))
1002 offset = unixtime - localunixtime
1002 offset = unixtime - localunixtime
1003 else:
1003 else:
1004 unixtime = localunixtime + offset
1004 unixtime = localunixtime + offset
1005 return unixtime, offset
1005 return unixtime, offset
1006
1006
1007 def parsedate(date, formats=None, defaults=None):
1007 def parsedate(date, formats=None, defaults=None):
1008 """parse a localized date/time string and return a (unixtime, offset) tuple.
1008 """parse a localized date/time string and return a (unixtime, offset) tuple.
1009
1009
1010 The date may be a "unixtime offset" string or in one of the specified
1010 The date may be a "unixtime offset" string or in one of the specified
1011 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1011 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1012 """
1012 """
1013 if not date:
1013 if not date:
1014 return 0, 0
1014 return 0, 0
1015 if isinstance(date, tuple) and len(date) == 2:
1015 if isinstance(date, tuple) and len(date) == 2:
1016 return date
1016 return date
1017 if not formats:
1017 if not formats:
1018 formats = defaultdateformats
1018 formats = defaultdateformats
1019 date = date.strip()
1019 date = date.strip()
1020 try:
1020 try:
1021 when, offset = map(int, date.split(' '))
1021 when, offset = map(int, date.split(' '))
1022 except ValueError:
1022 except ValueError:
1023 # fill out defaults
1023 # fill out defaults
1024 if not defaults:
1024 if not defaults:
1025 defaults = {}
1025 defaults = {}
1026 now = makedate()
1026 now = makedate()
1027 for part in "d mb yY HI M S".split():
1027 for part in "d mb yY HI M S".split():
1028 if part not in defaults:
1028 if part not in defaults:
1029 if part[0] in "HMS":
1029 if part[0] in "HMS":
1030 defaults[part] = "00"
1030 defaults[part] = "00"
1031 else:
1031 else:
1032 defaults[part] = datestr(now, "%" + part[0])
1032 defaults[part] = datestr(now, "%" + part[0])
1033
1033
1034 for format in formats:
1034 for format in formats:
1035 try:
1035 try:
1036 when, offset = strdate(date, format, defaults)
1036 when, offset = strdate(date, format, defaults)
1037 except (ValueError, OverflowError):
1037 except (ValueError, OverflowError):
1038 pass
1038 pass
1039 else:
1039 else:
1040 break
1040 break
1041 else:
1041 else:
1042 raise Abort(_('invalid date: %r ') % date)
1042 raise Abort(_('invalid date: %r ') % date)
1043 # validate explicit (probably user-specified) date and
1043 # validate explicit (probably user-specified) date and
1044 # time zone offset. values must fit in signed 32 bits for
1044 # time zone offset. values must fit in signed 32 bits for
1045 # current 32-bit linux runtimes. timezones go from UTC-12
1045 # current 32-bit linux runtimes. timezones go from UTC-12
1046 # to UTC+14
1046 # to UTC+14
1047 if abs(when) > 0x7fffffff:
1047 if abs(when) > 0x7fffffff:
1048 raise Abort(_('date exceeds 32 bits: %d') % when)
1048 raise Abort(_('date exceeds 32 bits: %d') % when)
1049 if offset < -50400 or offset > 43200:
1049 if offset < -50400 or offset > 43200:
1050 raise Abort(_('impossible time zone offset: %d') % offset)
1050 raise Abort(_('impossible time zone offset: %d') % offset)
1051 return when, offset
1051 return when, offset
1052
1052
1053 def matchdate(date):
1053 def matchdate(date):
1054 """Return a function that matches a given date match specifier
1054 """Return a function that matches a given date match specifier
1055
1055
1056 Formats include:
1056 Formats include:
1057
1057
1058 '{date}' match a given date to the accuracy provided
1058 '{date}' match a given date to the accuracy provided
1059
1059
1060 '<{date}' on or before a given date
1060 '<{date}' on or before a given date
1061
1061
1062 '>{date}' on or after a given date
1062 '>{date}' on or after a given date
1063
1063
1064 """
1064 """
1065
1065
1066 def lower(date):
1066 def lower(date):
1067 d = dict(mb="1", d="1")
1067 d = dict(mb="1", d="1")
1068 return parsedate(date, extendeddateformats, d)[0]
1068 return parsedate(date, extendeddateformats, d)[0]
1069
1069
1070 def upper(date):
1070 def upper(date):
1071 d = dict(mb="12", HI="23", M="59", S="59")
1071 d = dict(mb="12", HI="23", M="59", S="59")
1072 for days in "31 30 29".split():
1072 for days in "31 30 29".split():
1073 try:
1073 try:
1074 d["d"] = days
1074 d["d"] = days
1075 return parsedate(date, extendeddateformats, d)[0]
1075 return parsedate(date, extendeddateformats, d)[0]
1076 except:
1076 except:
1077 pass
1077 pass
1078 d["d"] = "28"
1078 d["d"] = "28"
1079 return parsedate(date, extendeddateformats, d)[0]
1079 return parsedate(date, extendeddateformats, d)[0]
1080
1080
1081 date = date.strip()
1081 date = date.strip()
1082 if date[0] == "<":
1082 if date[0] == "<":
1083 when = upper(date[1:])
1083 when = upper(date[1:])
1084 return lambda x: x <= when
1084 return lambda x: x <= when
1085 elif date[0] == ">":
1085 elif date[0] == ">":
1086 when = lower(date[1:])
1086 when = lower(date[1:])
1087 return lambda x: x >= when
1087 return lambda x: x >= when
1088 elif date[0] == "-":
1088 elif date[0] == "-":
1089 try:
1089 try:
1090 days = int(date[1:])
1090 days = int(date[1:])
1091 except ValueError:
1091 except ValueError:
1092 raise Abort(_("invalid day spec: %s") % date[1:])
1092 raise Abort(_("invalid day spec: %s") % date[1:])
1093 when = makedate()[0] - days * 3600 * 24
1093 when = makedate()[0] - days * 3600 * 24
1094 return lambda x: x >= when
1094 return lambda x: x >= when
1095 elif " to " in date:
1095 elif " to " in date:
1096 a, b = date.split(" to ")
1096 a, b = date.split(" to ")
1097 start, stop = lower(a), upper(b)
1097 start, stop = lower(a), upper(b)
1098 return lambda x: x >= start and x <= stop
1098 return lambda x: x >= start and x <= stop
1099 else:
1099 else:
1100 start, stop = lower(date), upper(date)
1100 start, stop = lower(date), upper(date)
1101 return lambda x: x >= start and x <= stop
1101 return lambda x: x >= start and x <= stop
1102
1102
1103 def shortuser(user):
1103 def shortuser(user):
1104 """Return a short representation of a user name or email address."""
1104 """Return a short representation of a user name or email address."""
1105 f = user.find('@')
1105 f = user.find('@')
1106 if f >= 0:
1106 if f >= 0:
1107 user = user[:f]
1107 user = user[:f]
1108 f = user.find('<')
1108 f = user.find('<')
1109 if f >= 0:
1109 if f >= 0:
1110 user = user[f+1:]
1110 user = user[f+1:]
1111 f = user.find(' ')
1111 f = user.find(' ')
1112 if f >= 0:
1112 if f >= 0:
1113 user = user[:f]
1113 user = user[:f]
1114 f = user.find('.')
1114 f = user.find('.')
1115 if f >= 0:
1115 if f >= 0:
1116 user = user[:f]
1116 user = user[:f]
1117 return user
1117 return user
1118
1118
1119 def email(author):
1119 def email(author):
1120 '''get email of author.'''
1120 '''get email of author.'''
1121 r = author.find('>')
1121 r = author.find('>')
1122 if r == -1: r = None
1122 if r == -1: r = None
1123 return author[author.find('<')+1:r]
1123 return author[author.find('<')+1:r]
1124
1124
1125 def ellipsis(text, maxlength=400):
1125 def ellipsis(text, maxlength=400):
1126 """Trim string to at most maxlength (default: 400) characters."""
1126 """Trim string to at most maxlength (default: 400) characters."""
1127 if len(text) <= maxlength:
1127 if len(text) <= maxlength:
1128 return text
1128 return text
1129 else:
1129 else:
1130 return "%s..." % (text[:maxlength-3])
1130 return "%s..." % (text[:maxlength-3])
1131
1131
1132 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1132 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1133 '''yield every hg repository under path, recursively.'''
1133 '''yield every hg repository under path, recursively.'''
1134 def errhandler(err):
1134 def errhandler(err):
1135 if err.filename == path:
1135 if err.filename == path:
1136 raise err
1136 raise err
1137 if followsym and hasattr(os.path, 'samestat'):
1137 if followsym and hasattr(os.path, 'samestat'):
1138 def _add_dir_if_not_there(dirlst, dirname):
1138 def _add_dir_if_not_there(dirlst, dirname):
1139 match = False
1139 match = False
1140 samestat = os.path.samestat
1140 samestat = os.path.samestat
1141 dirstat = os.stat(dirname)
1141 dirstat = os.stat(dirname)
1142 for lstdirstat in dirlst:
1142 for lstdirstat in dirlst:
1143 if samestat(dirstat, lstdirstat):
1143 if samestat(dirstat, lstdirstat):
1144 match = True
1144 match = True
1145 break
1145 break
1146 if not match:
1146 if not match:
1147 dirlst.append(dirstat)
1147 dirlst.append(dirstat)
1148 return not match
1148 return not match
1149 else:
1149 else:
1150 followsym = False
1150 followsym = False
1151
1151
1152 if (seen_dirs is None) and followsym:
1152 if (seen_dirs is None) and followsym:
1153 seen_dirs = []
1153 seen_dirs = []
1154 _add_dir_if_not_there(seen_dirs, path)
1154 _add_dir_if_not_there(seen_dirs, path)
1155 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1155 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1156 if '.hg' in dirs:
1156 if '.hg' in dirs:
1157 yield root # found a repository
1157 yield root # found a repository
1158 qroot = os.path.join(root, '.hg', 'patches')
1158 qroot = os.path.join(root, '.hg', 'patches')
1159 if os.path.isdir(os.path.join(qroot, '.hg')):
1159 if os.path.isdir(os.path.join(qroot, '.hg')):
1160 yield qroot # we have a patch queue repo here
1160 yield qroot # we have a patch queue repo here
1161 if recurse:
1161 if recurse:
1162 # avoid recursing inside the .hg directory
1162 # avoid recursing inside the .hg directory
1163 dirs.remove('.hg')
1163 dirs.remove('.hg')
1164 else:
1164 else:
1165 dirs[:] = [] # don't descend further
1165 dirs[:] = [] # don't descend further
1166 elif followsym:
1166 elif followsym:
1167 newdirs = []
1167 newdirs = []
1168 for d in dirs:
1168 for d in dirs:
1169 fname = os.path.join(root, d)
1169 fname = os.path.join(root, d)
1170 if _add_dir_if_not_there(seen_dirs, fname):
1170 if _add_dir_if_not_there(seen_dirs, fname):
1171 if os.path.islink(fname):
1171 if os.path.islink(fname):
1172 for hgname in walkrepos(fname, True, seen_dirs):
1172 for hgname in walkrepos(fname, True, seen_dirs):
1173 yield hgname
1173 yield hgname
1174 else:
1174 else:
1175 newdirs.append(d)
1175 newdirs.append(d)
1176 dirs[:] = newdirs
1176 dirs[:] = newdirs
1177
1177
1178 _rcpath = None
1178 _rcpath = None
1179
1179
1180 def os_rcpath():
1180 def os_rcpath():
1181 '''return default os-specific hgrc search path'''
1181 '''return default os-specific hgrc search path'''
1182 path = system_rcpath()
1182 path = system_rcpath()
1183 path.extend(user_rcpath())
1183 path.extend(user_rcpath())
1184 path = [os.path.normpath(f) for f in path]
1184 path = [os.path.normpath(f) for f in path]
1185 return path
1185 return path
1186
1186
1187 def rcpath():
1187 def rcpath():
1188 '''return hgrc search path. if env var HGRCPATH is set, use it.
1188 '''return hgrc search path. if env var HGRCPATH is set, use it.
1189 for each item in path, if directory, use files ending in .rc,
1189 for each item in path, if directory, use files ending in .rc,
1190 else use item.
1190 else use item.
1191 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1191 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1192 if no HGRCPATH, use default os-specific path.'''
1192 if no HGRCPATH, use default os-specific path.'''
1193 global _rcpath
1193 global _rcpath
1194 if _rcpath is None:
1194 if _rcpath is None:
1195 if 'HGRCPATH' in os.environ:
1195 if 'HGRCPATH' in os.environ:
1196 _rcpath = []
1196 _rcpath = []
1197 for p in os.environ['HGRCPATH'].split(os.pathsep):
1197 for p in os.environ['HGRCPATH'].split(os.pathsep):
1198 if not p: continue
1198 if not p: continue
1199 if os.path.isdir(p):
1199 if os.path.isdir(p):
1200 for f, kind in osutil.listdir(p):
1200 for f, kind in osutil.listdir(p):
1201 if f.endswith('.rc'):
1201 if f.endswith('.rc'):
1202 _rcpath.append(os.path.join(p, f))
1202 _rcpath.append(os.path.join(p, f))
1203 else:
1203 else:
1204 _rcpath.append(p)
1204 _rcpath.append(p)
1205 else:
1205 else:
1206 _rcpath = os_rcpath()
1206 _rcpath = os_rcpath()
1207 return _rcpath
1207 return _rcpath
1208
1208
1209 def bytecount(nbytes):
1209 def bytecount(nbytes):
1210 '''return byte count formatted as readable string, with units'''
1210 '''return byte count formatted as readable string, with units'''
1211
1211
1212 units = (
1212 units = (
1213 (100, 1<<30, _('%.0f GB')),
1213 (100, 1<<30, _('%.0f GB')),
1214 (10, 1<<30, _('%.1f GB')),
1214 (10, 1<<30, _('%.1f GB')),
1215 (1, 1<<30, _('%.2f GB')),
1215 (1, 1<<30, _('%.2f GB')),
1216 (100, 1<<20, _('%.0f MB')),
1216 (100, 1<<20, _('%.0f MB')),
1217 (10, 1<<20, _('%.1f MB')),
1217 (10, 1<<20, _('%.1f MB')),
1218 (1, 1<<20, _('%.2f MB')),
1218 (1, 1<<20, _('%.2f MB')),
1219 (100, 1<<10, _('%.0f KB')),
1219 (100, 1<<10, _('%.0f KB')),
1220 (10, 1<<10, _('%.1f KB')),
1220 (10, 1<<10, _('%.1f KB')),
1221 (1, 1<<10, _('%.2f KB')),
1221 (1, 1<<10, _('%.2f KB')),
1222 (1, 1, _('%.0f bytes')),
1222 (1, 1, _('%.0f bytes')),
1223 )
1223 )
1224
1224
1225 for multiplier, divisor, format in units:
1225 for multiplier, divisor, format in units:
1226 if nbytes >= divisor * multiplier:
1226 if nbytes >= divisor * multiplier:
1227 return format % (nbytes / float(divisor))
1227 return format % (nbytes / float(divisor))
1228 return units[-1][2] % nbytes
1228 return units[-1][2] % nbytes
1229
1229
1230 def drop_scheme(scheme, path):
1230 def drop_scheme(scheme, path):
1231 sc = scheme + ':'
1231 sc = scheme + ':'
1232 if path.startswith(sc):
1232 if path.startswith(sc):
1233 path = path[len(sc):]
1233 path = path[len(sc):]
1234 if path.startswith('//'):
1234 if path.startswith('//'):
1235 path = path[2:]
1235 path = path[2:]
1236 return path
1236 return path
1237
1237
1238 def uirepr(s):
1238 def uirepr(s):
1239 # Avoid double backslash in Windows path repr()
1239 # Avoid double backslash in Windows path repr()
1240 return repr(s).replace('\\\\', '\\')
1240 return repr(s).replace('\\\\', '\\')
1241
1241
1242 def termwidth():
1242 def termwidth():
1243 if 'COLUMNS' in os.environ:
1243 if 'COLUMNS' in os.environ:
1244 try:
1244 try:
1245 return int(os.environ['COLUMNS'])
1245 return int(os.environ['COLUMNS'])
1246 except ValueError:
1246 except ValueError:
1247 pass
1247 pass
1248 try:
1248 try:
1249 import termios, array, fcntl
1249 import termios, array, fcntl
1250 for dev in (sys.stdout, sys.stdin):
1250 for dev in (sys.stdout, sys.stdin):
1251 try:
1251 try:
1252 try:
1252 try:
1253 fd = dev.fileno()
1253 fd = dev.fileno()
1254 except AttributeError:
1254 except AttributeError:
1255 continue
1255 continue
1256 if not os.isatty(fd):
1256 if not os.isatty(fd):
1257 continue
1257 continue
1258 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1258 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1259 return array.array('h', arri)[1]
1259 return array.array('h', arri)[1]
1260 except ValueError:
1260 except ValueError:
1261 pass
1261 pass
1262 except ImportError:
1262 except ImportError:
1263 pass
1263 pass
1264 return 80
1264 return 80
1265
1265
1266 def wrap(line, hangindent, width=None):
1266 def wrap(line, hangindent, width=None):
1267 if width is None:
1267 if width is None:
1268 width = termwidth() - 2
1268 width = termwidth() - 2
1269 if width <= hangindent:
1269 if width <= hangindent:
1270 # adjust for weird terminal size
1270 # adjust for weird terminal size
1271 width = max(78, hangindent + 1)
1271 width = max(78, hangindent + 1)
1272 padding = '\n' + ' ' * hangindent
1272 padding = '\n' + ' ' * hangindent
1273 # To avoid corrupting multi-byte characters in line, we must wrap
1273 # To avoid corrupting multi-byte characters in line, we must wrap
1274 # a Unicode string instead of a bytestring.
1274 # a Unicode string instead of a bytestring.
1275 try:
1275 try:
1276 u = line.decode(encoding.encoding)
1276 u = line.decode(encoding.encoding)
1277 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1277 w = padding.join(textwrap.wrap(u, width=width - hangindent))
1278 return w.encode(encoding.encoding)
1278 return w.encode(encoding.encoding)
1279 except UnicodeDecodeError:
1279 except UnicodeDecodeError:
1280 return padding.join(textwrap.wrap(line, width=width - hangindent))
1280 return padding.join(textwrap.wrap(line, width=width - hangindent))
1281
1281
1282 def iterlines(iterator):
1282 def iterlines(iterator):
1283 for chunk in iterator:
1283 for chunk in iterator:
1284 for line in chunk.splitlines():
1284 for line in chunk.splitlines():
1285 yield line
1285 yield line
General Comments 0
You need to be logged in to leave comments. Login now