##// END OF EJS Templates
util: canonpath: simplify logic...
Nicolas Dumazet -
r9386:eae98607 default
parent child Browse files
Show More
@@ -1,1284 +1,1282 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, random, textwrap
19 import os, stat, time, calendar, random, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 # Setting bufsize to -1 lets the system decide the buffer size.
42 # Setting bufsize to -1 lets the system decide the buffer size.
43 # The default for bufsize is 0, meaning unbuffered. This leads to
43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 close_fds=closefds,
46 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 return p.stdin, p.stdout
48 return p.stdin, p.stdout
49 def popen3(cmd):
49 def popen3(cmd):
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 close_fds=closefds,
51 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stderr=subprocess.PIPE)
53 stderr=subprocess.PIPE)
54 return p.stdin, p.stdout, p.stderr
54 return p.stdin, p.stdout, p.stderr
55
55
56 def version():
56 def version():
57 """Return version information if available."""
57 """Return version information if available."""
58 try:
58 try:
59 import __version__
59 import __version__
60 return __version__.version
60 return __version__.version
61 except ImportError:
61 except ImportError:
62 return 'unknown'
62 return 'unknown'
63
63
64 # used by parsedate
64 # used by parsedate
65 defaultdateformats = (
65 defaultdateformats = (
66 '%Y-%m-%d %H:%M:%S',
66 '%Y-%m-%d %H:%M:%S',
67 '%Y-%m-%d %I:%M:%S%p',
67 '%Y-%m-%d %I:%M:%S%p',
68 '%Y-%m-%d %H:%M',
68 '%Y-%m-%d %H:%M',
69 '%Y-%m-%d %I:%M%p',
69 '%Y-%m-%d %I:%M%p',
70 '%Y-%m-%d',
70 '%Y-%m-%d',
71 '%m-%d',
71 '%m-%d',
72 '%m/%d',
72 '%m/%d',
73 '%m/%d/%y',
73 '%m/%d/%y',
74 '%m/%d/%Y',
74 '%m/%d/%Y',
75 '%a %b %d %H:%M:%S %Y',
75 '%a %b %d %H:%M:%S %Y',
76 '%a %b %d %I:%M:%S%p %Y',
76 '%a %b %d %I:%M:%S%p %Y',
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 '%b %d %H:%M:%S %Y',
78 '%b %d %H:%M:%S %Y',
79 '%b %d %I:%M:%S%p %Y',
79 '%b %d %I:%M:%S%p %Y',
80 '%b %d %H:%M:%S',
80 '%b %d %H:%M:%S',
81 '%b %d %I:%M:%S%p',
81 '%b %d %I:%M:%S%p',
82 '%b %d %H:%M',
82 '%b %d %H:%M',
83 '%b %d %I:%M%p',
83 '%b %d %I:%M%p',
84 '%b %d %Y',
84 '%b %d %Y',
85 '%b %d',
85 '%b %d',
86 '%H:%M:%S',
86 '%H:%M:%S',
87 '%I:%M:%S%p',
87 '%I:%M:%S%p',
88 '%H:%M',
88 '%H:%M',
89 '%I:%M%p',
89 '%I:%M%p',
90 )
90 )
91
91
92 extendeddateformats = defaultdateformats + (
92 extendeddateformats = defaultdateformats + (
93 "%Y",
93 "%Y",
94 "%Y-%m",
94 "%Y-%m",
95 "%b",
95 "%b",
96 "%b %Y",
96 "%b %Y",
97 )
97 )
98
98
99 def cachefunc(func):
99 def cachefunc(func):
100 '''cache the result of function calls'''
100 '''cache the result of function calls'''
101 # XXX doesn't handle keywords args
101 # XXX doesn't handle keywords args
102 cache = {}
102 cache = {}
103 if func.func_code.co_argcount == 1:
103 if func.func_code.co_argcount == 1:
104 # we gain a small amount of time because
104 # we gain a small amount of time because
105 # we don't need to pack/unpack the list
105 # we don't need to pack/unpack the list
106 def f(arg):
106 def f(arg):
107 if arg not in cache:
107 if arg not in cache:
108 cache[arg] = func(arg)
108 cache[arg] = func(arg)
109 return cache[arg]
109 return cache[arg]
110 else:
110 else:
111 def f(*args):
111 def f(*args):
112 if args not in cache:
112 if args not in cache:
113 cache[args] = func(*args)
113 cache[args] = func(*args)
114 return cache[args]
114 return cache[args]
115
115
116 return f
116 return f
117
117
118 def lrucachefunc(func):
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
119 '''cache most recent results of function calls'''
120 cache = {}
120 cache = {}
121 order = []
121 order = []
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 def f(arg):
123 def f(arg):
124 if arg not in cache:
124 if arg not in cache:
125 if len(cache) > 20:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 else:
128 else:
129 order.remove(arg)
129 order.remove(arg)
130 order.append(arg)
130 order.append(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 if len(cache) > 20:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
137 cache[args] = func(*args)
138 else:
138 else:
139 order.remove(args)
139 order.remove(args)
140 order.append(args)
140 order.append(args)
141 return cache[args]
141 return cache[args]
142
142
143 return f
143 return f
144
144
145 class propertycache(object):
145 class propertycache(object):
146 def __init__(self, func):
146 def __init__(self, func):
147 self.func = func
147 self.func = func
148 self.name = func.__name__
148 self.name = func.__name__
149 def __get__(self, obj, type=None):
149 def __get__(self, obj, type=None):
150 result = self.func(obj)
150 result = self.func(obj)
151 setattr(obj, self.name, result)
151 setattr(obj, self.name, result)
152 return result
152 return result
153
153
154 def pipefilter(s, cmd):
154 def pipefilter(s, cmd):
155 '''filter string S through command CMD, returning its output'''
155 '''filter string S through command CMD, returning its output'''
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
158 pout, perr = p.communicate(s)
158 pout, perr = p.communicate(s)
159 return pout
159 return pout
160
160
161 def tempfilter(s, cmd):
161 def tempfilter(s, cmd):
162 '''filter string S through a pair of temporary files with CMD.
162 '''filter string S through a pair of temporary files with CMD.
163 CMD is used as a template to create the real command to be run,
163 CMD is used as a template to create the real command to be run,
164 with the strings INFILE and OUTFILE replaced by the real names of
164 with the strings INFILE and OUTFILE replaced by the real names of
165 the temporary files generated.'''
165 the temporary files generated.'''
166 inname, outname = None, None
166 inname, outname = None, None
167 try:
167 try:
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
169 fp = os.fdopen(infd, 'wb')
169 fp = os.fdopen(infd, 'wb')
170 fp.write(s)
170 fp.write(s)
171 fp.close()
171 fp.close()
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
173 os.close(outfd)
173 os.close(outfd)
174 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('INFILE', inname)
175 cmd = cmd.replace('OUTFILE', outname)
175 cmd = cmd.replace('OUTFILE', outname)
176 code = os.system(cmd)
176 code = os.system(cmd)
177 if sys.platform == 'OpenVMS' and code & 1:
177 if sys.platform == 'OpenVMS' and code & 1:
178 code = 0
178 code = 0
179 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
180 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
181 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
182 finally:
182 finally:
183 try:
183 try:
184 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
185 except: pass
185 except: pass
186 try:
186 try:
187 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
188 except: pass
188 except: pass
189
189
190 filtertable = {
190 filtertable = {
191 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
192 'pipe:': pipefilter,
192 'pipe:': pipefilter,
193 }
193 }
194
194
195 def filter(s, cmd):
195 def filter(s, cmd):
196 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
197 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
198 if cmd.startswith(name):
198 if cmd.startswith(name):
199 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
200 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
201
201
202 def binary(s):
202 def binary(s):
203 """return true if a string is binary data"""
203 """return true if a string is binary data"""
204 return bool(s and '\0' in s)
204 return bool(s and '\0' in s)
205
205
206 def increasingchunks(source, min=1024, max=65536):
206 def increasingchunks(source, min=1024, max=65536):
207 '''return no less than min bytes per chunk while data remains,
207 '''return no less than min bytes per chunk while data remains,
208 doubling min after each chunk until it reaches max'''
208 doubling min after each chunk until it reaches max'''
209 def log2(x):
209 def log2(x):
210 if not x:
210 if not x:
211 return 0
211 return 0
212 i = 0
212 i = 0
213 while x:
213 while x:
214 x >>= 1
214 x >>= 1
215 i += 1
215 i += 1
216 return i - 1
216 return i - 1
217
217
218 buf = []
218 buf = []
219 blen = 0
219 blen = 0
220 for chunk in source:
220 for chunk in source:
221 buf.append(chunk)
221 buf.append(chunk)
222 blen += len(chunk)
222 blen += len(chunk)
223 if blen >= min:
223 if blen >= min:
224 if min < max:
224 if min < max:
225 min = min << 1
225 min = min << 1
226 nmin = 1 << log2(blen)
226 nmin = 1 << log2(blen)
227 if nmin > min:
227 if nmin > min:
228 min = nmin
228 min = nmin
229 if min > max:
229 if min > max:
230 min = max
230 min = max
231 yield ''.join(buf)
231 yield ''.join(buf)
232 blen = 0
232 blen = 0
233 buf = []
233 buf = []
234 if buf:
234 if buf:
235 yield ''.join(buf)
235 yield ''.join(buf)
236
236
237 Abort = error.Abort
237 Abort = error.Abort
238
238
239 def always(fn): return True
239 def always(fn): return True
240 def never(fn): return False
240 def never(fn): return False
241
241
242 def pathto(root, n1, n2):
242 def pathto(root, n1, n2):
243 '''return the relative path from one place to another.
243 '''return the relative path from one place to another.
244 root should use os.sep to separate directories
244 root should use os.sep to separate directories
245 n1 should use os.sep to separate directories
245 n1 should use os.sep to separate directories
246 n2 should use "/" to separate directories
246 n2 should use "/" to separate directories
247 returns an os.sep-separated path.
247 returns an os.sep-separated path.
248
248
249 If n1 is a relative path, it's assumed it's
249 If n1 is a relative path, it's assumed it's
250 relative to root.
250 relative to root.
251 n2 should always be relative to root.
251 n2 should always be relative to root.
252 '''
252 '''
253 if not n1: return localpath(n2)
253 if not n1: return localpath(n2)
254 if os.path.isabs(n1):
254 if os.path.isabs(n1):
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
256 return os.path.join(root, localpath(n2))
256 return os.path.join(root, localpath(n2))
257 n2 = '/'.join((pconvert(root), n2))
257 n2 = '/'.join((pconvert(root), n2))
258 a, b = splitpath(n1), n2.split('/')
258 a, b = splitpath(n1), n2.split('/')
259 a.reverse()
259 a.reverse()
260 b.reverse()
260 b.reverse()
261 while a and b and a[-1] == b[-1]:
261 while a and b and a[-1] == b[-1]:
262 a.pop()
262 a.pop()
263 b.pop()
263 b.pop()
264 b.reverse()
264 b.reverse()
265 return os.sep.join((['..'] * len(a)) + b) or '.'
265 return os.sep.join((['..'] * len(a)) + b) or '.'
266
266
267 def canonpath(root, cwd, myname):
267 def canonpath(root, cwd, myname):
268 """return the canonical path of myname, given cwd and root"""
268 """return the canonical path of myname, given cwd and root"""
269 if root == os.sep:
269 if endswithsep(root):
270 rootsep = os.sep
271 elif endswithsep(root):
272 rootsep = root
270 rootsep = root
273 else:
271 else:
274 rootsep = root + os.sep
272 rootsep = root + os.sep
275 name = myname
273 name = myname
276 if not os.path.isabs(name):
274 if not os.path.isabs(name):
277 name = os.path.join(root, cwd, name)
275 name = os.path.join(root, cwd, name)
278 name = os.path.normpath(name)
276 name = os.path.normpath(name)
279 audit_path = path_auditor(root)
277 audit_path = path_auditor(root)
280 if name != rootsep and name.startswith(rootsep):
278 if name != rootsep and name.startswith(rootsep):
281 name = name[len(rootsep):]
279 name = name[len(rootsep):]
282 audit_path(name)
280 audit_path(name)
283 return pconvert(name)
281 return pconvert(name)
284 elif name == root:
282 elif name == root:
285 return ''
283 return ''
286 else:
284 else:
287 # Determine whether `name' is in the hierarchy at or beneath `root',
285 # Determine whether `name' is in the hierarchy at or beneath `root',
288 # by iterating name=dirname(name) until that causes no change (can't
286 # by iterating name=dirname(name) until that causes no change (can't
289 # check name == '/', because that doesn't work on windows). For each
287 # check name == '/', because that doesn't work on windows). For each
290 # `name', compare dev/inode numbers. If they match, the list `rel'
288 # `name', compare dev/inode numbers. If they match, the list `rel'
291 # holds the reversed list of components making up the relative file
289 # holds the reversed list of components making up the relative file
292 # name we want.
290 # name we want.
293 root_st = os.stat(root)
291 root_st = os.stat(root)
294 rel = []
292 rel = []
295 while True:
293 while True:
296 try:
294 try:
297 name_st = os.stat(name)
295 name_st = os.stat(name)
298 except OSError:
296 except OSError:
299 break
297 break
300 if samestat(name_st, root_st):
298 if samestat(name_st, root_st):
301 if not rel:
299 if not rel:
302 # name was actually the same as root (maybe a symlink)
300 # name was actually the same as root (maybe a symlink)
303 return ''
301 return ''
304 rel.reverse()
302 rel.reverse()
305 name = os.path.join(*rel)
303 name = os.path.join(*rel)
306 audit_path(name)
304 audit_path(name)
307 return pconvert(name)
305 return pconvert(name)
308 dirname, basename = os.path.split(name)
306 dirname, basename = os.path.split(name)
309 rel.append(basename)
307 rel.append(basename)
310 if dirname == name:
308 if dirname == name:
311 break
309 break
312 name = dirname
310 name = dirname
313
311
314 raise Abort('%s not under root' % myname)
312 raise Abort('%s not under root' % myname)
315
313
316 _hgexecutable = None
314 _hgexecutable = None
317
315
318 def main_is_frozen():
316 def main_is_frozen():
319 """return True if we are a frozen executable.
317 """return True if we are a frozen executable.
320
318
321 The code supports py2exe (most common, Windows only) and tools/freeze
319 The code supports py2exe (most common, Windows only) and tools/freeze
322 (portable, not much used).
320 (portable, not much used).
323 """
321 """
324 return (hasattr(sys, "frozen") or # new py2exe
322 return (hasattr(sys, "frozen") or # new py2exe
325 hasattr(sys, "importers") or # old py2exe
323 hasattr(sys, "importers") or # old py2exe
326 imp.is_frozen("__main__")) # tools/freeze
324 imp.is_frozen("__main__")) # tools/freeze
327
325
328 def hgexecutable():
326 def hgexecutable():
329 """return location of the 'hg' executable.
327 """return location of the 'hg' executable.
330
328
331 Defaults to $HG or 'hg' in the search path.
329 Defaults to $HG or 'hg' in the search path.
332 """
330 """
333 if _hgexecutable is None:
331 if _hgexecutable is None:
334 hg = os.environ.get('HG')
332 hg = os.environ.get('HG')
335 if hg:
333 if hg:
336 set_hgexecutable(hg)
334 set_hgexecutable(hg)
337 elif main_is_frozen():
335 elif main_is_frozen():
338 set_hgexecutable(sys.executable)
336 set_hgexecutable(sys.executable)
339 else:
337 else:
340 set_hgexecutable(find_exe('hg') or 'hg')
338 set_hgexecutable(find_exe('hg') or 'hg')
341 return _hgexecutable
339 return _hgexecutable
342
340
343 def set_hgexecutable(path):
341 def set_hgexecutable(path):
344 """set location of the 'hg' executable"""
342 """set location of the 'hg' executable"""
345 global _hgexecutable
343 global _hgexecutable
346 _hgexecutable = path
344 _hgexecutable = path
347
345
348 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
346 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
349 '''enhanced shell command execution.
347 '''enhanced shell command execution.
350 run with environment maybe modified, maybe in different dir.
348 run with environment maybe modified, maybe in different dir.
351
349
352 if command fails and onerr is None, return status. if ui object,
350 if command fails and onerr is None, return status. if ui object,
353 print error message and return status, else raise onerr object as
351 print error message and return status, else raise onerr object as
354 exception.'''
352 exception.'''
355 def py2shell(val):
353 def py2shell(val):
356 'convert python object into string that is useful to shell'
354 'convert python object into string that is useful to shell'
357 if val is None or val is False:
355 if val is None or val is False:
358 return '0'
356 return '0'
359 if val is True:
357 if val is True:
360 return '1'
358 return '1'
361 return str(val)
359 return str(val)
362 oldenv = {}
360 oldenv = {}
363 for k in environ:
361 for k in environ:
364 oldenv[k] = os.environ.get(k)
362 oldenv[k] = os.environ.get(k)
365 if cwd is not None:
363 if cwd is not None:
366 oldcwd = os.getcwd()
364 oldcwd = os.getcwd()
367 origcmd = cmd
365 origcmd = cmd
368 if os.name == 'nt':
366 if os.name == 'nt':
369 cmd = '"%s"' % cmd
367 cmd = '"%s"' % cmd
370 try:
368 try:
371 for k, v in environ.iteritems():
369 for k, v in environ.iteritems():
372 os.environ[k] = py2shell(v)
370 os.environ[k] = py2shell(v)
373 os.environ['HG'] = hgexecutable()
371 os.environ['HG'] = hgexecutable()
374 if cwd is not None and oldcwd != cwd:
372 if cwd is not None and oldcwd != cwd:
375 os.chdir(cwd)
373 os.chdir(cwd)
376 rc = os.system(cmd)
374 rc = os.system(cmd)
377 if sys.platform == 'OpenVMS' and rc & 1:
375 if sys.platform == 'OpenVMS' and rc & 1:
378 rc = 0
376 rc = 0
379 if rc and onerr:
377 if rc and onerr:
380 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
378 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
381 explain_exit(rc)[0])
379 explain_exit(rc)[0])
382 if errprefix:
380 if errprefix:
383 errmsg = '%s: %s' % (errprefix, errmsg)
381 errmsg = '%s: %s' % (errprefix, errmsg)
384 try:
382 try:
385 onerr.warn(errmsg + '\n')
383 onerr.warn(errmsg + '\n')
386 except AttributeError:
384 except AttributeError:
387 raise onerr(errmsg)
385 raise onerr(errmsg)
388 return rc
386 return rc
389 finally:
387 finally:
390 for k, v in oldenv.iteritems():
388 for k, v in oldenv.iteritems():
391 if v is None:
389 if v is None:
392 del os.environ[k]
390 del os.environ[k]
393 else:
391 else:
394 os.environ[k] = v
392 os.environ[k] = v
395 if cwd is not None and oldcwd != cwd:
393 if cwd is not None and oldcwd != cwd:
396 os.chdir(oldcwd)
394 os.chdir(oldcwd)
397
395
398 def checksignature(func):
396 def checksignature(func):
399 '''wrap a function with code to check for calling errors'''
397 '''wrap a function with code to check for calling errors'''
400 def check(*args, **kwargs):
398 def check(*args, **kwargs):
401 try:
399 try:
402 return func(*args, **kwargs)
400 return func(*args, **kwargs)
403 except TypeError:
401 except TypeError:
404 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
402 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
405 raise error.SignatureError
403 raise error.SignatureError
406 raise
404 raise
407
405
408 return check
406 return check
409
407
410 # os.path.lexists is not available on python2.3
408 # os.path.lexists is not available on python2.3
411 def lexists(filename):
409 def lexists(filename):
412 "test whether a file with this name exists. does not follow symlinks"
410 "test whether a file with this name exists. does not follow symlinks"
413 try:
411 try:
414 os.lstat(filename)
412 os.lstat(filename)
415 except:
413 except:
416 return False
414 return False
417 return True
415 return True
418
416
419 def rename(src, dst):
417 def rename(src, dst):
420 """forcibly rename a file"""
418 """forcibly rename a file"""
421 try:
419 try:
422 os.rename(src, dst)
420 os.rename(src, dst)
423 except OSError, err: # FIXME: check err (EEXIST ?)
421 except OSError, err: # FIXME: check err (EEXIST ?)
424
422
425 # On windows, rename to existing file is not allowed, so we
423 # On windows, rename to existing file is not allowed, so we
426 # must delete destination first. But if a file is open, unlink
424 # must delete destination first. But if a file is open, unlink
427 # schedules it for delete but does not delete it. Rename
425 # schedules it for delete but does not delete it. Rename
428 # happens immediately even for open files, so we rename
426 # happens immediately even for open files, so we rename
429 # destination to a temporary name, then delete that. Then
427 # destination to a temporary name, then delete that. Then
430 # rename is safe to do.
428 # rename is safe to do.
431 # The temporary name is chosen at random to avoid the situation
429 # The temporary name is chosen at random to avoid the situation
432 # where a file is left lying around from a previous aborted run.
430 # where a file is left lying around from a previous aborted run.
433 # The usual race condition this introduces can't be avoided as
431 # The usual race condition this introduces can't be avoided as
434 # we need the name to rename into, and not the file itself. Due
432 # we need the name to rename into, and not the file itself. Due
435 # to the nature of the operation however, any races will at worst
433 # to the nature of the operation however, any races will at worst
436 # lead to the rename failing and the current operation aborting.
434 # lead to the rename failing and the current operation aborting.
437
435
438 def tempname(prefix):
436 def tempname(prefix):
439 for tries in xrange(10):
437 for tries in xrange(10):
440 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
438 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
441 if not os.path.exists(temp):
439 if not os.path.exists(temp):
442 return temp
440 return temp
443 raise IOError, (errno.EEXIST, "No usable temporary filename found")
441 raise IOError, (errno.EEXIST, "No usable temporary filename found")
444
442
445 temp = tempname(dst)
443 temp = tempname(dst)
446 os.rename(dst, temp)
444 os.rename(dst, temp)
447 os.unlink(temp)
445 os.unlink(temp)
448 os.rename(src, dst)
446 os.rename(src, dst)
449
447
450 def unlink(f):
448 def unlink(f):
451 """unlink and remove the directory if it is empty"""
449 """unlink and remove the directory if it is empty"""
452 os.unlink(f)
450 os.unlink(f)
453 # try removing directories that might now be empty
451 # try removing directories that might now be empty
454 try:
452 try:
455 os.removedirs(os.path.dirname(f))
453 os.removedirs(os.path.dirname(f))
456 except OSError:
454 except OSError:
457 pass
455 pass
458
456
459 def copyfile(src, dest):
457 def copyfile(src, dest):
460 "copy a file, preserving mode and atime/mtime"
458 "copy a file, preserving mode and atime/mtime"
461 if os.path.islink(src):
459 if os.path.islink(src):
462 try:
460 try:
463 os.unlink(dest)
461 os.unlink(dest)
464 except:
462 except:
465 pass
463 pass
466 os.symlink(os.readlink(src), dest)
464 os.symlink(os.readlink(src), dest)
467 else:
465 else:
468 try:
466 try:
469 shutil.copyfile(src, dest)
467 shutil.copyfile(src, dest)
470 shutil.copystat(src, dest)
468 shutil.copystat(src, dest)
471 except shutil.Error, inst:
469 except shutil.Error, inst:
472 raise Abort(str(inst))
470 raise Abort(str(inst))
473
471
474 def copyfiles(src, dst, hardlink=None):
472 def copyfiles(src, dst, hardlink=None):
475 """Copy a directory tree using hardlinks if possible"""
473 """Copy a directory tree using hardlinks if possible"""
476
474
477 if hardlink is None:
475 if hardlink is None:
478 hardlink = (os.stat(src).st_dev ==
476 hardlink = (os.stat(src).st_dev ==
479 os.stat(os.path.dirname(dst)).st_dev)
477 os.stat(os.path.dirname(dst)).st_dev)
480
478
481 if os.path.isdir(src):
479 if os.path.isdir(src):
482 os.mkdir(dst)
480 os.mkdir(dst)
483 for name, kind in osutil.listdir(src):
481 for name, kind in osutil.listdir(src):
484 srcname = os.path.join(src, name)
482 srcname = os.path.join(src, name)
485 dstname = os.path.join(dst, name)
483 dstname = os.path.join(dst, name)
486 copyfiles(srcname, dstname, hardlink)
484 copyfiles(srcname, dstname, hardlink)
487 else:
485 else:
488 if hardlink:
486 if hardlink:
489 try:
487 try:
490 os_link(src, dst)
488 os_link(src, dst)
491 except (IOError, OSError):
489 except (IOError, OSError):
492 hardlink = False
490 hardlink = False
493 shutil.copy(src, dst)
491 shutil.copy(src, dst)
494 else:
492 else:
495 shutil.copy(src, dst)
493 shutil.copy(src, dst)
496
494
497 class path_auditor(object):
495 class path_auditor(object):
498 '''ensure that a filesystem path contains no banned components.
496 '''ensure that a filesystem path contains no banned components.
499 the following properties of a path are checked:
497 the following properties of a path are checked:
500
498
501 - under top-level .hg
499 - under top-level .hg
502 - starts at the root of a windows drive
500 - starts at the root of a windows drive
503 - contains ".."
501 - contains ".."
504 - traverses a symlink (e.g. a/symlink_here/b)
502 - traverses a symlink (e.g. a/symlink_here/b)
505 - inside a nested repository'''
503 - inside a nested repository'''
506
504
507 def __init__(self, root):
505 def __init__(self, root):
508 self.audited = set()
506 self.audited = set()
509 self.auditeddir = set()
507 self.auditeddir = set()
510 self.root = root
508 self.root = root
511
509
512 def __call__(self, path):
510 def __call__(self, path):
513 if path in self.audited:
511 if path in self.audited:
514 return
512 return
515 normpath = os.path.normcase(path)
513 normpath = os.path.normcase(path)
516 parts = splitpath(normpath)
514 parts = splitpath(normpath)
517 if (os.path.splitdrive(path)[0]
515 if (os.path.splitdrive(path)[0]
518 or parts[0].lower() in ('.hg', '.hg.', '')
516 or parts[0].lower() in ('.hg', '.hg.', '')
519 or os.pardir in parts):
517 or os.pardir in parts):
520 raise Abort(_("path contains illegal component: %s") % path)
518 raise Abort(_("path contains illegal component: %s") % path)
521 if '.hg' in path.lower():
519 if '.hg' in path.lower():
522 lparts = [p.lower() for p in parts]
520 lparts = [p.lower() for p in parts]
523 for p in '.hg', '.hg.':
521 for p in '.hg', '.hg.':
524 if p in lparts[1:]:
522 if p in lparts[1:]:
525 pos = lparts.index(p)
523 pos = lparts.index(p)
526 base = os.path.join(*parts[:pos])
524 base = os.path.join(*parts[:pos])
527 raise Abort(_('path %r is inside repo %r') % (path, base))
525 raise Abort(_('path %r is inside repo %r') % (path, base))
528 def check(prefix):
526 def check(prefix):
529 curpath = os.path.join(self.root, prefix)
527 curpath = os.path.join(self.root, prefix)
530 try:
528 try:
531 st = os.lstat(curpath)
529 st = os.lstat(curpath)
532 except OSError, err:
530 except OSError, err:
533 # EINVAL can be raised as invalid path syntax under win32.
531 # EINVAL can be raised as invalid path syntax under win32.
534 # They must be ignored for patterns can be checked too.
532 # They must be ignored for patterns can be checked too.
535 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
533 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
536 raise
534 raise
537 else:
535 else:
538 if stat.S_ISLNK(st.st_mode):
536 if stat.S_ISLNK(st.st_mode):
539 raise Abort(_('path %r traverses symbolic link %r') %
537 raise Abort(_('path %r traverses symbolic link %r') %
540 (path, prefix))
538 (path, prefix))
541 elif (stat.S_ISDIR(st.st_mode) and
539 elif (stat.S_ISDIR(st.st_mode) and
542 os.path.isdir(os.path.join(curpath, '.hg'))):
540 os.path.isdir(os.path.join(curpath, '.hg'))):
543 raise Abort(_('path %r is inside repo %r') %
541 raise Abort(_('path %r is inside repo %r') %
544 (path, prefix))
542 (path, prefix))
545 parts.pop()
543 parts.pop()
546 prefixes = []
544 prefixes = []
547 while parts:
545 while parts:
548 prefix = os.sep.join(parts)
546 prefix = os.sep.join(parts)
549 if prefix in self.auditeddir:
547 if prefix in self.auditeddir:
550 break
548 break
551 check(prefix)
549 check(prefix)
552 prefixes.append(prefix)
550 prefixes.append(prefix)
553 parts.pop()
551 parts.pop()
554
552
555 self.audited.add(path)
553 self.audited.add(path)
556 # only add prefixes to the cache after checking everything: we don't
554 # only add prefixes to the cache after checking everything: we don't
557 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
555 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
558 self.auditeddir.update(prefixes)
556 self.auditeddir.update(prefixes)
559
557
560 def nlinks(pathname):
558 def nlinks(pathname):
561 """Return number of hardlinks for the given file."""
559 """Return number of hardlinks for the given file."""
562 return os.lstat(pathname).st_nlink
560 return os.lstat(pathname).st_nlink
563
561
564 if hasattr(os, 'link'):
562 if hasattr(os, 'link'):
565 os_link = os.link
563 os_link = os.link
566 else:
564 else:
567 def os_link(src, dst):
565 def os_link(src, dst):
568 raise OSError(0, _("Hardlinks not supported"))
566 raise OSError(0, _("Hardlinks not supported"))
569
567
570 def lookup_reg(key, name=None, scope=None):
568 def lookup_reg(key, name=None, scope=None):
571 return None
569 return None
572
570
573 if os.name == 'nt':
571 if os.name == 'nt':
574 from windows import *
572 from windows import *
575 else:
573 else:
576 from posix import *
574 from posix import *
577
575
578 def makelock(info, pathname):
576 def makelock(info, pathname):
579 try:
577 try:
580 return os.symlink(info, pathname)
578 return os.symlink(info, pathname)
581 except OSError, why:
579 except OSError, why:
582 if why.errno == errno.EEXIST:
580 if why.errno == errno.EEXIST:
583 raise
581 raise
584 except AttributeError: # no symlink in os
582 except AttributeError: # no symlink in os
585 pass
583 pass
586
584
587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
585 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
588 os.write(ld, info)
586 os.write(ld, info)
589 os.close(ld)
587 os.close(ld)
590
588
591 def readlock(pathname):
589 def readlock(pathname):
592 try:
590 try:
593 return os.readlink(pathname)
591 return os.readlink(pathname)
594 except OSError, why:
592 except OSError, why:
595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
593 if why.errno not in (errno.EINVAL, errno.ENOSYS):
596 raise
594 raise
597 except AttributeError: # no symlink in os
595 except AttributeError: # no symlink in os
598 pass
596 pass
599 return posixfile(pathname).read()
597 return posixfile(pathname).read()
600
598
601 def fstat(fp):
599 def fstat(fp):
602 '''stat file object that may not have fileno method.'''
600 '''stat file object that may not have fileno method.'''
603 try:
601 try:
604 return os.fstat(fp.fileno())
602 return os.fstat(fp.fileno())
605 except AttributeError:
603 except AttributeError:
606 return os.stat(fp.name)
604 return os.stat(fp.name)
607
605
608 # File system features
606 # File system features
609
607
610 def checkcase(path):
608 def checkcase(path):
611 """
609 """
612 Check whether the given path is on a case-sensitive filesystem
610 Check whether the given path is on a case-sensitive filesystem
613
611
614 Requires a path (like /foo/.hg) ending with a foldable final
612 Requires a path (like /foo/.hg) ending with a foldable final
615 directory component.
613 directory component.
616 """
614 """
617 s1 = os.stat(path)
615 s1 = os.stat(path)
618 d, b = os.path.split(path)
616 d, b = os.path.split(path)
619 p2 = os.path.join(d, b.upper())
617 p2 = os.path.join(d, b.upper())
620 if path == p2:
618 if path == p2:
621 p2 = os.path.join(d, b.lower())
619 p2 = os.path.join(d, b.lower())
622 try:
620 try:
623 s2 = os.stat(p2)
621 s2 = os.stat(p2)
624 if s2 == s1:
622 if s2 == s1:
625 return False
623 return False
626 return True
624 return True
627 except:
625 except:
628 return True
626 return True
629
627
630 _fspathcache = {}
628 _fspathcache = {}
631 def fspath(name, root):
629 def fspath(name, root):
632 '''Get name in the case stored in the filesystem
630 '''Get name in the case stored in the filesystem
633
631
634 The name is either relative to root, or it is an absolute path starting
632 The name is either relative to root, or it is an absolute path starting
635 with root. Note that this function is unnecessary, and should not be
633 with root. Note that this function is unnecessary, and should not be
636 called, for case-sensitive filesystems (simply because it's expensive).
634 called, for case-sensitive filesystems (simply because it's expensive).
637 '''
635 '''
638 # If name is absolute, make it relative
636 # If name is absolute, make it relative
639 if name.lower().startswith(root.lower()):
637 if name.lower().startswith(root.lower()):
640 l = len(root)
638 l = len(root)
641 if name[l] == os.sep or name[l] == os.altsep:
639 if name[l] == os.sep or name[l] == os.altsep:
642 l = l + 1
640 l = l + 1
643 name = name[l:]
641 name = name[l:]
644
642
645 if not os.path.exists(os.path.join(root, name)):
643 if not os.path.exists(os.path.join(root, name)):
646 return None
644 return None
647
645
648 seps = os.sep
646 seps = os.sep
649 if os.altsep:
647 if os.altsep:
650 seps = seps + os.altsep
648 seps = seps + os.altsep
651 # Protect backslashes. This gets silly very quickly.
649 # Protect backslashes. This gets silly very quickly.
652 seps.replace('\\','\\\\')
650 seps.replace('\\','\\\\')
653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
651 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
654 dir = os.path.normcase(os.path.normpath(root))
652 dir = os.path.normcase(os.path.normpath(root))
655 result = []
653 result = []
656 for part, sep in pattern.findall(name):
654 for part, sep in pattern.findall(name):
657 if sep:
655 if sep:
658 result.append(sep)
656 result.append(sep)
659 continue
657 continue
660
658
661 if dir not in _fspathcache:
659 if dir not in _fspathcache:
662 _fspathcache[dir] = os.listdir(dir)
660 _fspathcache[dir] = os.listdir(dir)
663 contents = _fspathcache[dir]
661 contents = _fspathcache[dir]
664
662
665 lpart = part.lower()
663 lpart = part.lower()
666 for n in contents:
664 for n in contents:
667 if n.lower() == lpart:
665 if n.lower() == lpart:
668 result.append(n)
666 result.append(n)
669 break
667 break
670 else:
668 else:
671 # Cannot happen, as the file exists!
669 # Cannot happen, as the file exists!
672 result.append(part)
670 result.append(part)
673 dir = os.path.join(dir, lpart)
671 dir = os.path.join(dir, lpart)
674
672
675 return ''.join(result)
673 return ''.join(result)
676
674
677 def checkexec(path):
675 def checkexec(path):
678 """
676 """
679 Check whether the given path is on a filesystem with UNIX-like exec flags
677 Check whether the given path is on a filesystem with UNIX-like exec flags
680
678
681 Requires a directory (like /foo/.hg)
679 Requires a directory (like /foo/.hg)
682 """
680 """
683
681
684 # VFAT on some Linux versions can flip mode but it doesn't persist
682 # VFAT on some Linux versions can flip mode but it doesn't persist
685 # a FS remount. Frequently we can detect it if files are created
683 # a FS remount. Frequently we can detect it if files are created
686 # with exec bit on.
684 # with exec bit on.
687
685
688 try:
686 try:
689 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
687 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
690 fh, fn = tempfile.mkstemp("", "", path)
688 fh, fn = tempfile.mkstemp("", "", path)
691 try:
689 try:
692 os.close(fh)
690 os.close(fh)
693 m = os.stat(fn).st_mode & 0777
691 m = os.stat(fn).st_mode & 0777
694 new_file_has_exec = m & EXECFLAGS
692 new_file_has_exec = m & EXECFLAGS
695 os.chmod(fn, m ^ EXECFLAGS)
693 os.chmod(fn, m ^ EXECFLAGS)
696 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
694 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
697 finally:
695 finally:
698 os.unlink(fn)
696 os.unlink(fn)
699 except (IOError, OSError):
697 except (IOError, OSError):
700 # we don't care, the user probably won't be able to commit anyway
698 # we don't care, the user probably won't be able to commit anyway
701 return False
699 return False
702 return not (new_file_has_exec or exec_flags_cannot_flip)
700 return not (new_file_has_exec or exec_flags_cannot_flip)
703
701
704 def checklink(path):
702 def checklink(path):
705 """check whether the given path is on a symlink-capable filesystem"""
703 """check whether the given path is on a symlink-capable filesystem"""
706 # mktemp is not racy because symlink creation will fail if the
704 # mktemp is not racy because symlink creation will fail if the
707 # file already exists
705 # file already exists
708 name = tempfile.mktemp(dir=path)
706 name = tempfile.mktemp(dir=path)
709 try:
707 try:
710 os.symlink(".", name)
708 os.symlink(".", name)
711 os.unlink(name)
709 os.unlink(name)
712 return True
710 return True
713 except (OSError, AttributeError):
711 except (OSError, AttributeError):
714 return False
712 return False
715
713
716 def needbinarypatch():
714 def needbinarypatch():
717 """return True if patches should be applied in binary mode by default."""
715 """return True if patches should be applied in binary mode by default."""
718 return os.name == 'nt'
716 return os.name == 'nt'
719
717
720 def endswithsep(path):
718 def endswithsep(path):
721 '''Check path ends with os.sep or os.altsep.'''
719 '''Check path ends with os.sep or os.altsep.'''
722 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
720 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
723
721
724 def splitpath(path):
722 def splitpath(path):
725 '''Split path by os.sep.
723 '''Split path by os.sep.
726 Note that this function does not use os.altsep because this is
724 Note that this function does not use os.altsep because this is
727 an alternative of simple "xxx.split(os.sep)".
725 an alternative of simple "xxx.split(os.sep)".
728 It is recommended to use os.path.normpath() before using this
726 It is recommended to use os.path.normpath() before using this
729 function if need.'''
727 function if need.'''
730 return path.split(os.sep)
728 return path.split(os.sep)
731
729
732 def gui():
730 def gui():
733 '''Are we running in a GUI?'''
731 '''Are we running in a GUI?'''
734 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
732 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
735
733
736 def mktempcopy(name, emptyok=False, createmode=None):
734 def mktempcopy(name, emptyok=False, createmode=None):
737 """Create a temporary file with the same contents from name
735 """Create a temporary file with the same contents from name
738
736
739 The permission bits are copied from the original file.
737 The permission bits are copied from the original file.
740
738
741 If the temporary file is going to be truncated immediately, you
739 If the temporary file is going to be truncated immediately, you
742 can use emptyok=True as an optimization.
740 can use emptyok=True as an optimization.
743
741
744 Returns the name of the temporary file.
742 Returns the name of the temporary file.
745 """
743 """
746 d, fn = os.path.split(name)
744 d, fn = os.path.split(name)
747 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
745 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
748 os.close(fd)
746 os.close(fd)
749 # Temporary files are created with mode 0600, which is usually not
747 # Temporary files are created with mode 0600, which is usually not
750 # what we want. If the original file already exists, just copy
748 # what we want. If the original file already exists, just copy
751 # its mode. Otherwise, manually obey umask.
749 # its mode. Otherwise, manually obey umask.
752 try:
750 try:
753 st_mode = os.lstat(name).st_mode & 0777
751 st_mode = os.lstat(name).st_mode & 0777
754 except OSError, inst:
752 except OSError, inst:
755 if inst.errno != errno.ENOENT:
753 if inst.errno != errno.ENOENT:
756 raise
754 raise
757 st_mode = createmode
755 st_mode = createmode
758 if st_mode is None:
756 if st_mode is None:
759 st_mode = ~umask
757 st_mode = ~umask
760 st_mode &= 0666
758 st_mode &= 0666
761 os.chmod(temp, st_mode)
759 os.chmod(temp, st_mode)
762 if emptyok:
760 if emptyok:
763 return temp
761 return temp
764 try:
762 try:
765 try:
763 try:
766 ifp = posixfile(name, "rb")
764 ifp = posixfile(name, "rb")
767 except IOError, inst:
765 except IOError, inst:
768 if inst.errno == errno.ENOENT:
766 if inst.errno == errno.ENOENT:
769 return temp
767 return temp
770 if not getattr(inst, 'filename', None):
768 if not getattr(inst, 'filename', None):
771 inst.filename = name
769 inst.filename = name
772 raise
770 raise
773 ofp = posixfile(temp, "wb")
771 ofp = posixfile(temp, "wb")
774 for chunk in filechunkiter(ifp):
772 for chunk in filechunkiter(ifp):
775 ofp.write(chunk)
773 ofp.write(chunk)
776 ifp.close()
774 ifp.close()
777 ofp.close()
775 ofp.close()
778 except:
776 except:
779 try: os.unlink(temp)
777 try: os.unlink(temp)
780 except: pass
778 except: pass
781 raise
779 raise
782 return temp
780 return temp
783
781
784 class atomictempfile(object):
782 class atomictempfile(object):
785 """file-like object that atomically updates a file
783 """file-like object that atomically updates a file
786
784
787 All writes will be redirected to a temporary copy of the original
785 All writes will be redirected to a temporary copy of the original
788 file. When rename is called, the copy is renamed to the original
786 file. When rename is called, the copy is renamed to the original
789 name, making the changes visible.
787 name, making the changes visible.
790 """
788 """
791 def __init__(self, name, mode, createmode):
789 def __init__(self, name, mode, createmode):
792 self.__name = name
790 self.__name = name
793 self._fp = None
791 self._fp = None
794 self.temp = mktempcopy(name, emptyok=('w' in mode),
792 self.temp = mktempcopy(name, emptyok=('w' in mode),
795 createmode=createmode)
793 createmode=createmode)
796 self._fp = posixfile(self.temp, mode)
794 self._fp = posixfile(self.temp, mode)
797
795
798 def __getattr__(self, name):
796 def __getattr__(self, name):
799 return getattr(self._fp, name)
797 return getattr(self._fp, name)
800
798
801 def rename(self):
799 def rename(self):
802 if not self._fp.closed:
800 if not self._fp.closed:
803 self._fp.close()
801 self._fp.close()
804 rename(self.temp, localpath(self.__name))
802 rename(self.temp, localpath(self.__name))
805
803
806 def __del__(self):
804 def __del__(self):
807 if not self._fp:
805 if not self._fp:
808 return
806 return
809 if not self._fp.closed:
807 if not self._fp.closed:
810 try:
808 try:
811 os.unlink(self.temp)
809 os.unlink(self.temp)
812 except: pass
810 except: pass
813 self._fp.close()
811 self._fp.close()
814
812
815 def makedirs(name, mode=None):
813 def makedirs(name, mode=None):
816 """recursive directory creation with parent mode inheritance"""
814 """recursive directory creation with parent mode inheritance"""
817 try:
815 try:
818 os.mkdir(name)
816 os.mkdir(name)
819 if mode is not None:
817 if mode is not None:
820 os.chmod(name, mode)
818 os.chmod(name, mode)
821 return
819 return
822 except OSError, err:
820 except OSError, err:
823 if err.errno == errno.EEXIST:
821 if err.errno == errno.EEXIST:
824 return
822 return
825 if err.errno != errno.ENOENT:
823 if err.errno != errno.ENOENT:
826 raise
824 raise
827 parent = os.path.abspath(os.path.dirname(name))
825 parent = os.path.abspath(os.path.dirname(name))
828 makedirs(parent, mode)
826 makedirs(parent, mode)
829 makedirs(name, mode)
827 makedirs(name, mode)
830
828
831 class opener(object):
829 class opener(object):
832 """Open files relative to a base directory
830 """Open files relative to a base directory
833
831
834 This class is used to hide the details of COW semantics and
832 This class is used to hide the details of COW semantics and
835 remote file access from higher level code.
833 remote file access from higher level code.
836 """
834 """
837 def __init__(self, base, audit=True):
835 def __init__(self, base, audit=True):
838 self.base = base
836 self.base = base
839 if audit:
837 if audit:
840 self.audit_path = path_auditor(base)
838 self.audit_path = path_auditor(base)
841 else:
839 else:
842 self.audit_path = always
840 self.audit_path = always
843 self.createmode = None
841 self.createmode = None
844
842
845 @propertycache
843 @propertycache
846 def _can_symlink(self):
844 def _can_symlink(self):
847 return checklink(self.base)
845 return checklink(self.base)
848
846
849 def _fixfilemode(self, name):
847 def _fixfilemode(self, name):
850 if self.createmode is None:
848 if self.createmode is None:
851 return
849 return
852 os.chmod(name, self.createmode & 0666)
850 os.chmod(name, self.createmode & 0666)
853
851
854 def __call__(self, path, mode="r", text=False, atomictemp=False):
852 def __call__(self, path, mode="r", text=False, atomictemp=False):
855 self.audit_path(path)
853 self.audit_path(path)
856 f = os.path.join(self.base, path)
854 f = os.path.join(self.base, path)
857
855
858 if not text and "b" not in mode:
856 if not text and "b" not in mode:
859 mode += "b" # for that other OS
857 mode += "b" # for that other OS
860
858
861 nlink = -1
859 nlink = -1
862 if mode not in ("r", "rb"):
860 if mode not in ("r", "rb"):
863 try:
861 try:
864 nlink = nlinks(f)
862 nlink = nlinks(f)
865 except OSError:
863 except OSError:
866 nlink = 0
864 nlink = 0
867 d = os.path.dirname(f)
865 d = os.path.dirname(f)
868 if not os.path.isdir(d):
866 if not os.path.isdir(d):
869 makedirs(d, self.createmode)
867 makedirs(d, self.createmode)
870 if atomictemp:
868 if atomictemp:
871 return atomictempfile(f, mode, self.createmode)
869 return atomictempfile(f, mode, self.createmode)
872 if nlink > 1:
870 if nlink > 1:
873 rename(mktempcopy(f), f)
871 rename(mktempcopy(f), f)
874 fp = posixfile(f, mode)
872 fp = posixfile(f, mode)
875 if nlink == 0:
873 if nlink == 0:
876 self._fixfilemode(f)
874 self._fixfilemode(f)
877 return fp
875 return fp
878
876
879 def symlink(self, src, dst):
877 def symlink(self, src, dst):
880 self.audit_path(dst)
878 self.audit_path(dst)
881 linkname = os.path.join(self.base, dst)
879 linkname = os.path.join(self.base, dst)
882 try:
880 try:
883 os.unlink(linkname)
881 os.unlink(linkname)
884 except OSError:
882 except OSError:
885 pass
883 pass
886
884
887 dirname = os.path.dirname(linkname)
885 dirname = os.path.dirname(linkname)
888 if not os.path.exists(dirname):
886 if not os.path.exists(dirname):
889 makedirs(dirname, self.createmode)
887 makedirs(dirname, self.createmode)
890
888
891 if self._can_symlink:
889 if self._can_symlink:
892 try:
890 try:
893 os.symlink(src, linkname)
891 os.symlink(src, linkname)
894 except OSError, err:
892 except OSError, err:
895 raise OSError(err.errno, _('could not symlink to %r: %s') %
893 raise OSError(err.errno, _('could not symlink to %r: %s') %
896 (src, err.strerror), linkname)
894 (src, err.strerror), linkname)
897 else:
895 else:
898 f = self(dst, "w")
896 f = self(dst, "w")
899 f.write(src)
897 f.write(src)
900 f.close()
898 f.close()
901 self._fixfilemode(dst)
899 self._fixfilemode(dst)
902
900
903 class chunkbuffer(object):
901 class chunkbuffer(object):
904 """Allow arbitrary sized chunks of data to be efficiently read from an
902 """Allow arbitrary sized chunks of data to be efficiently read from an
905 iterator over chunks of arbitrary size."""
903 iterator over chunks of arbitrary size."""
906
904
907 def __init__(self, in_iter):
905 def __init__(self, in_iter):
908 """in_iter is the iterator that's iterating over the input chunks.
906 """in_iter is the iterator that's iterating over the input chunks.
909 targetsize is how big a buffer to try to maintain."""
907 targetsize is how big a buffer to try to maintain."""
910 self.iter = iter(in_iter)
908 self.iter = iter(in_iter)
911 self.buf = ''
909 self.buf = ''
912 self.targetsize = 2**16
910 self.targetsize = 2**16
913
911
914 def read(self, l):
912 def read(self, l):
915 """Read L bytes of data from the iterator of chunks of data.
913 """Read L bytes of data from the iterator of chunks of data.
916 Returns less than L bytes if the iterator runs dry."""
914 Returns less than L bytes if the iterator runs dry."""
917 if l > len(self.buf) and self.iter:
915 if l > len(self.buf) and self.iter:
918 # Clamp to a multiple of self.targetsize
916 # Clamp to a multiple of self.targetsize
919 targetsize = max(l, self.targetsize)
917 targetsize = max(l, self.targetsize)
920 collector = cStringIO.StringIO()
918 collector = cStringIO.StringIO()
921 collector.write(self.buf)
919 collector.write(self.buf)
922 collected = len(self.buf)
920 collected = len(self.buf)
923 for chunk in self.iter:
921 for chunk in self.iter:
924 collector.write(chunk)
922 collector.write(chunk)
925 collected += len(chunk)
923 collected += len(chunk)
926 if collected >= targetsize:
924 if collected >= targetsize:
927 break
925 break
928 if collected < targetsize:
926 if collected < targetsize:
929 self.iter = False
927 self.iter = False
930 self.buf = collector.getvalue()
928 self.buf = collector.getvalue()
931 if len(self.buf) == l:
929 if len(self.buf) == l:
932 s, self.buf = str(self.buf), ''
930 s, self.buf = str(self.buf), ''
933 else:
931 else:
934 s, self.buf = self.buf[:l], buffer(self.buf, l)
932 s, self.buf = self.buf[:l], buffer(self.buf, l)
935 return s
933 return s
936
934
937 def filechunkiter(f, size=65536, limit=None):
935 def filechunkiter(f, size=65536, limit=None):
938 """Create a generator that produces the data in the file size
936 """Create a generator that produces the data in the file size
939 (default 65536) bytes at a time, up to optional limit (default is
937 (default 65536) bytes at a time, up to optional limit (default is
940 to read all data). Chunks may be less than size bytes if the
938 to read all data). Chunks may be less than size bytes if the
941 chunk is the last chunk in the file, or the file is a socket or
939 chunk is the last chunk in the file, or the file is a socket or
942 some other type of file that sometimes reads less data than is
940 some other type of file that sometimes reads less data than is
943 requested."""
941 requested."""
944 assert size >= 0
942 assert size >= 0
945 assert limit is None or limit >= 0
943 assert limit is None or limit >= 0
946 while True:
944 while True:
947 if limit is None: nbytes = size
945 if limit is None: nbytes = size
948 else: nbytes = min(limit, size)
946 else: nbytes = min(limit, size)
949 s = nbytes and f.read(nbytes)
947 s = nbytes and f.read(nbytes)
950 if not s: break
948 if not s: break
951 if limit: limit -= len(s)
949 if limit: limit -= len(s)
952 yield s
950 yield s
953
951
954 def makedate():
952 def makedate():
955 lt = time.localtime()
953 lt = time.localtime()
956 if lt[8] == 1 and time.daylight:
954 if lt[8] == 1 and time.daylight:
957 tz = time.altzone
955 tz = time.altzone
958 else:
956 else:
959 tz = time.timezone
957 tz = time.timezone
960 return time.mktime(lt), tz
958 return time.mktime(lt), tz
961
959
962 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
960 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
963 """represent a (unixtime, offset) tuple as a localized time.
961 """represent a (unixtime, offset) tuple as a localized time.
964 unixtime is seconds since the epoch, and offset is the time zone's
962 unixtime is seconds since the epoch, and offset is the time zone's
965 number of seconds away from UTC. if timezone is false, do not
963 number of seconds away from UTC. if timezone is false, do not
966 append time zone to string."""
964 append time zone to string."""
967 t, tz = date or makedate()
965 t, tz = date or makedate()
968 if "%1" in format or "%2" in format:
966 if "%1" in format or "%2" in format:
969 sign = (tz > 0) and "-" or "+"
967 sign = (tz > 0) and "-" or "+"
970 minutes = abs(tz) // 60
968 minutes = abs(tz) // 60
971 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
969 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
972 format = format.replace("%2", "%02d" % (minutes % 60))
970 format = format.replace("%2", "%02d" % (minutes % 60))
973 s = time.strftime(format, time.gmtime(float(t) - tz))
971 s = time.strftime(format, time.gmtime(float(t) - tz))
974 return s
972 return s
975
973
976 def shortdate(date=None):
974 def shortdate(date=None):
977 """turn (timestamp, tzoff) tuple into iso 8631 date."""
975 """turn (timestamp, tzoff) tuple into iso 8631 date."""
978 return datestr(date, format='%Y-%m-%d')
976 return datestr(date, format='%Y-%m-%d')
979
977
980 def strdate(string, format, defaults=[]):
978 def strdate(string, format, defaults=[]):
981 """parse a localized time string and return a (unixtime, offset) tuple.
979 """parse a localized time string and return a (unixtime, offset) tuple.
982 if the string cannot be parsed, ValueError is raised."""
980 if the string cannot be parsed, ValueError is raised."""
983 def timezone(string):
981 def timezone(string):
984 tz = string.split()[-1]
982 tz = string.split()[-1]
985 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
983 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
986 sign = (tz[0] == "+") and 1 or -1
984 sign = (tz[0] == "+") and 1 or -1
987 hours = int(tz[1:3])
985 hours = int(tz[1:3])
988 minutes = int(tz[3:5])
986 minutes = int(tz[3:5])
989 return -sign * (hours * 60 + minutes) * 60
987 return -sign * (hours * 60 + minutes) * 60
990 if tz == "GMT" or tz == "UTC":
988 if tz == "GMT" or tz == "UTC":
991 return 0
989 return 0
992 return None
990 return None
993
991
994 # NOTE: unixtime = localunixtime + offset
992 # NOTE: unixtime = localunixtime + offset
995 offset, date = timezone(string), string
993 offset, date = timezone(string), string
996 if offset != None:
994 if offset != None:
997 date = " ".join(string.split()[:-1])
995 date = " ".join(string.split()[:-1])
998
996
999 # add missing elements from defaults
997 # add missing elements from defaults
1000 for part in defaults:
998 for part in defaults:
1001 found = [True for p in part if ("%"+p) in format]
999 found = [True for p in part if ("%"+p) in format]
1002 if not found:
1000 if not found:
1003 date += "@" + defaults[part]
1001 date += "@" + defaults[part]
1004 format += "@%" + part[0]
1002 format += "@%" + part[0]
1005
1003
1006 timetuple = time.strptime(date, format)
1004 timetuple = time.strptime(date, format)
1007 localunixtime = int(calendar.timegm(timetuple))
1005 localunixtime = int(calendar.timegm(timetuple))
1008 if offset is None:
1006 if offset is None:
1009 # local timezone
1007 # local timezone
1010 unixtime = int(time.mktime(timetuple))
1008 unixtime = int(time.mktime(timetuple))
1011 offset = unixtime - localunixtime
1009 offset = unixtime - localunixtime
1012 else:
1010 else:
1013 unixtime = localunixtime + offset
1011 unixtime = localunixtime + offset
1014 return unixtime, offset
1012 return unixtime, offset
1015
1013
1016 def parsedate(date, formats=None, defaults=None):
1014 def parsedate(date, formats=None, defaults=None):
1017 """parse a localized date/time string and return a (unixtime, offset) tuple.
1015 """parse a localized date/time string and return a (unixtime, offset) tuple.
1018
1016
1019 The date may be a "unixtime offset" string or in one of the specified
1017 The date may be a "unixtime offset" string or in one of the specified
1020 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1018 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1021 """
1019 """
1022 if not date:
1020 if not date:
1023 return 0, 0
1021 return 0, 0
1024 if isinstance(date, tuple) and len(date) == 2:
1022 if isinstance(date, tuple) and len(date) == 2:
1025 return date
1023 return date
1026 if not formats:
1024 if not formats:
1027 formats = defaultdateformats
1025 formats = defaultdateformats
1028 date = date.strip()
1026 date = date.strip()
1029 try:
1027 try:
1030 when, offset = map(int, date.split(' '))
1028 when, offset = map(int, date.split(' '))
1031 except ValueError:
1029 except ValueError:
1032 # fill out defaults
1030 # fill out defaults
1033 if not defaults:
1031 if not defaults:
1034 defaults = {}
1032 defaults = {}
1035 now = makedate()
1033 now = makedate()
1036 for part in "d mb yY HI M S".split():
1034 for part in "d mb yY HI M S".split():
1037 if part not in defaults:
1035 if part not in defaults:
1038 if part[0] in "HMS":
1036 if part[0] in "HMS":
1039 defaults[part] = "00"
1037 defaults[part] = "00"
1040 else:
1038 else:
1041 defaults[part] = datestr(now, "%" + part[0])
1039 defaults[part] = datestr(now, "%" + part[0])
1042
1040
1043 for format in formats:
1041 for format in formats:
1044 try:
1042 try:
1045 when, offset = strdate(date, format, defaults)
1043 when, offset = strdate(date, format, defaults)
1046 except (ValueError, OverflowError):
1044 except (ValueError, OverflowError):
1047 pass
1045 pass
1048 else:
1046 else:
1049 break
1047 break
1050 else:
1048 else:
1051 raise Abort(_('invalid date: %r ') % date)
1049 raise Abort(_('invalid date: %r ') % date)
1052 # validate explicit (probably user-specified) date and
1050 # validate explicit (probably user-specified) date and
1053 # time zone offset. values must fit in signed 32 bits for
1051 # time zone offset. values must fit in signed 32 bits for
1054 # current 32-bit linux runtimes. timezones go from UTC-12
1052 # current 32-bit linux runtimes. timezones go from UTC-12
1055 # to UTC+14
1053 # to UTC+14
1056 if abs(when) > 0x7fffffff:
1054 if abs(when) > 0x7fffffff:
1057 raise Abort(_('date exceeds 32 bits: %d') % when)
1055 raise Abort(_('date exceeds 32 bits: %d') % when)
1058 if offset < -50400 or offset > 43200:
1056 if offset < -50400 or offset > 43200:
1059 raise Abort(_('impossible time zone offset: %d') % offset)
1057 raise Abort(_('impossible time zone offset: %d') % offset)
1060 return when, offset
1058 return when, offset
1061
1059
1062 def matchdate(date):
1060 def matchdate(date):
1063 """Return a function that matches a given date match specifier
1061 """Return a function that matches a given date match specifier
1064
1062
1065 Formats include:
1063 Formats include:
1066
1064
1067 '{date}' match a given date to the accuracy provided
1065 '{date}' match a given date to the accuracy provided
1068
1066
1069 '<{date}' on or before a given date
1067 '<{date}' on or before a given date
1070
1068
1071 '>{date}' on or after a given date
1069 '>{date}' on or after a given date
1072
1070
1073 """
1071 """
1074
1072
1075 def lower(date):
1073 def lower(date):
1076 d = dict(mb="1", d="1")
1074 d = dict(mb="1", d="1")
1077 return parsedate(date, extendeddateformats, d)[0]
1075 return parsedate(date, extendeddateformats, d)[0]
1078
1076
1079 def upper(date):
1077 def upper(date):
1080 d = dict(mb="12", HI="23", M="59", S="59")
1078 d = dict(mb="12", HI="23", M="59", S="59")
1081 for days in "31 30 29".split():
1079 for days in "31 30 29".split():
1082 try:
1080 try:
1083 d["d"] = days
1081 d["d"] = days
1084 return parsedate(date, extendeddateformats, d)[0]
1082 return parsedate(date, extendeddateformats, d)[0]
1085 except:
1083 except:
1086 pass
1084 pass
1087 d["d"] = "28"
1085 d["d"] = "28"
1088 return parsedate(date, extendeddateformats, d)[0]
1086 return parsedate(date, extendeddateformats, d)[0]
1089
1087
1090 date = date.strip()
1088 date = date.strip()
1091 if date[0] == "<":
1089 if date[0] == "<":
1092 when = upper(date[1:])
1090 when = upper(date[1:])
1093 return lambda x: x <= when
1091 return lambda x: x <= when
1094 elif date[0] == ">":
1092 elif date[0] == ">":
1095 when = lower(date[1:])
1093 when = lower(date[1:])
1096 return lambda x: x >= when
1094 return lambda x: x >= when
1097 elif date[0] == "-":
1095 elif date[0] == "-":
1098 try:
1096 try:
1099 days = int(date[1:])
1097 days = int(date[1:])
1100 except ValueError:
1098 except ValueError:
1101 raise Abort(_("invalid day spec: %s") % date[1:])
1099 raise Abort(_("invalid day spec: %s") % date[1:])
1102 when = makedate()[0] - days * 3600 * 24
1100 when = makedate()[0] - days * 3600 * 24
1103 return lambda x: x >= when
1101 return lambda x: x >= when
1104 elif " to " in date:
1102 elif " to " in date:
1105 a, b = date.split(" to ")
1103 a, b = date.split(" to ")
1106 start, stop = lower(a), upper(b)
1104 start, stop = lower(a), upper(b)
1107 return lambda x: x >= start and x <= stop
1105 return lambda x: x >= start and x <= stop
1108 else:
1106 else:
1109 start, stop = lower(date), upper(date)
1107 start, stop = lower(date), upper(date)
1110 return lambda x: x >= start and x <= stop
1108 return lambda x: x >= start and x <= stop
1111
1109
1112 def shortuser(user):
1110 def shortuser(user):
1113 """Return a short representation of a user name or email address."""
1111 """Return a short representation of a user name or email address."""
1114 f = user.find('@')
1112 f = user.find('@')
1115 if f >= 0:
1113 if f >= 0:
1116 user = user[:f]
1114 user = user[:f]
1117 f = user.find('<')
1115 f = user.find('<')
1118 if f >= 0:
1116 if f >= 0:
1119 user = user[f+1:]
1117 user = user[f+1:]
1120 f = user.find(' ')
1118 f = user.find(' ')
1121 if f >= 0:
1119 if f >= 0:
1122 user = user[:f]
1120 user = user[:f]
1123 f = user.find('.')
1121 f = user.find('.')
1124 if f >= 0:
1122 if f >= 0:
1125 user = user[:f]
1123 user = user[:f]
1126 return user
1124 return user
1127
1125
1128 def email(author):
1126 def email(author):
1129 '''get email of author.'''
1127 '''get email of author.'''
1130 r = author.find('>')
1128 r = author.find('>')
1131 if r == -1: r = None
1129 if r == -1: r = None
1132 return author[author.find('<')+1:r]
1130 return author[author.find('<')+1:r]
1133
1131
1134 def ellipsis(text, maxlength=400):
1132 def ellipsis(text, maxlength=400):
1135 """Trim string to at most maxlength (default: 400) characters."""
1133 """Trim string to at most maxlength (default: 400) characters."""
1136 if len(text) <= maxlength:
1134 if len(text) <= maxlength:
1137 return text
1135 return text
1138 else:
1136 else:
1139 return "%s..." % (text[:maxlength-3])
1137 return "%s..." % (text[:maxlength-3])
1140
1138
1141 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1139 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1142 '''yield every hg repository under path, recursively.'''
1140 '''yield every hg repository under path, recursively.'''
1143 def errhandler(err):
1141 def errhandler(err):
1144 if err.filename == path:
1142 if err.filename == path:
1145 raise err
1143 raise err
1146 if followsym and hasattr(os.path, 'samestat'):
1144 if followsym and hasattr(os.path, 'samestat'):
1147 def _add_dir_if_not_there(dirlst, dirname):
1145 def _add_dir_if_not_there(dirlst, dirname):
1148 match = False
1146 match = False
1149 samestat = os.path.samestat
1147 samestat = os.path.samestat
1150 dirstat = os.stat(dirname)
1148 dirstat = os.stat(dirname)
1151 for lstdirstat in dirlst:
1149 for lstdirstat in dirlst:
1152 if samestat(dirstat, lstdirstat):
1150 if samestat(dirstat, lstdirstat):
1153 match = True
1151 match = True
1154 break
1152 break
1155 if not match:
1153 if not match:
1156 dirlst.append(dirstat)
1154 dirlst.append(dirstat)
1157 return not match
1155 return not match
1158 else:
1156 else:
1159 followsym = False
1157 followsym = False
1160
1158
1161 if (seen_dirs is None) and followsym:
1159 if (seen_dirs is None) and followsym:
1162 seen_dirs = []
1160 seen_dirs = []
1163 _add_dir_if_not_there(seen_dirs, path)
1161 _add_dir_if_not_there(seen_dirs, path)
1164 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1162 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1165 if '.hg' in dirs:
1163 if '.hg' in dirs:
1166 yield root # found a repository
1164 yield root # found a repository
1167 qroot = os.path.join(root, '.hg', 'patches')
1165 qroot = os.path.join(root, '.hg', 'patches')
1168 if os.path.isdir(os.path.join(qroot, '.hg')):
1166 if os.path.isdir(os.path.join(qroot, '.hg')):
1169 yield qroot # we have a patch queue repo here
1167 yield qroot # we have a patch queue repo here
1170 if recurse:
1168 if recurse:
1171 # avoid recursing inside the .hg directory
1169 # avoid recursing inside the .hg directory
1172 dirs.remove('.hg')
1170 dirs.remove('.hg')
1173 else:
1171 else:
1174 dirs[:] = [] # don't descend further
1172 dirs[:] = [] # don't descend further
1175 elif followsym:
1173 elif followsym:
1176 newdirs = []
1174 newdirs = []
1177 for d in dirs:
1175 for d in dirs:
1178 fname = os.path.join(root, d)
1176 fname = os.path.join(root, d)
1179 if _add_dir_if_not_there(seen_dirs, fname):
1177 if _add_dir_if_not_there(seen_dirs, fname):
1180 if os.path.islink(fname):
1178 if os.path.islink(fname):
1181 for hgname in walkrepos(fname, True, seen_dirs):
1179 for hgname in walkrepos(fname, True, seen_dirs):
1182 yield hgname
1180 yield hgname
1183 else:
1181 else:
1184 newdirs.append(d)
1182 newdirs.append(d)
1185 dirs[:] = newdirs
1183 dirs[:] = newdirs
1186
1184
1187 _rcpath = None
1185 _rcpath = None
1188
1186
1189 def os_rcpath():
1187 def os_rcpath():
1190 '''return default os-specific hgrc search path'''
1188 '''return default os-specific hgrc search path'''
1191 path = system_rcpath()
1189 path = system_rcpath()
1192 path.extend(user_rcpath())
1190 path.extend(user_rcpath())
1193 path = [os.path.normpath(f) for f in path]
1191 path = [os.path.normpath(f) for f in path]
1194 return path
1192 return path
1195
1193
1196 def rcpath():
1194 def rcpath():
1197 '''return hgrc search path. if env var HGRCPATH is set, use it.
1195 '''return hgrc search path. if env var HGRCPATH is set, use it.
1198 for each item in path, if directory, use files ending in .rc,
1196 for each item in path, if directory, use files ending in .rc,
1199 else use item.
1197 else use item.
1200 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1198 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1201 if no HGRCPATH, use default os-specific path.'''
1199 if no HGRCPATH, use default os-specific path.'''
1202 global _rcpath
1200 global _rcpath
1203 if _rcpath is None:
1201 if _rcpath is None:
1204 if 'HGRCPATH' in os.environ:
1202 if 'HGRCPATH' in os.environ:
1205 _rcpath = []
1203 _rcpath = []
1206 for p in os.environ['HGRCPATH'].split(os.pathsep):
1204 for p in os.environ['HGRCPATH'].split(os.pathsep):
1207 if not p: continue
1205 if not p: continue
1208 if os.path.isdir(p):
1206 if os.path.isdir(p):
1209 for f, kind in osutil.listdir(p):
1207 for f, kind in osutil.listdir(p):
1210 if f.endswith('.rc'):
1208 if f.endswith('.rc'):
1211 _rcpath.append(os.path.join(p, f))
1209 _rcpath.append(os.path.join(p, f))
1212 else:
1210 else:
1213 _rcpath.append(p)
1211 _rcpath.append(p)
1214 else:
1212 else:
1215 _rcpath = os_rcpath()
1213 _rcpath = os_rcpath()
1216 return _rcpath
1214 return _rcpath
1217
1215
1218 def bytecount(nbytes):
1216 def bytecount(nbytes):
1219 '''return byte count formatted as readable string, with units'''
1217 '''return byte count formatted as readable string, with units'''
1220
1218
1221 units = (
1219 units = (
1222 (100, 1<<30, _('%.0f GB')),
1220 (100, 1<<30, _('%.0f GB')),
1223 (10, 1<<30, _('%.1f GB')),
1221 (10, 1<<30, _('%.1f GB')),
1224 (1, 1<<30, _('%.2f GB')),
1222 (1, 1<<30, _('%.2f GB')),
1225 (100, 1<<20, _('%.0f MB')),
1223 (100, 1<<20, _('%.0f MB')),
1226 (10, 1<<20, _('%.1f MB')),
1224 (10, 1<<20, _('%.1f MB')),
1227 (1, 1<<20, _('%.2f MB')),
1225 (1, 1<<20, _('%.2f MB')),
1228 (100, 1<<10, _('%.0f KB')),
1226 (100, 1<<10, _('%.0f KB')),
1229 (10, 1<<10, _('%.1f KB')),
1227 (10, 1<<10, _('%.1f KB')),
1230 (1, 1<<10, _('%.2f KB')),
1228 (1, 1<<10, _('%.2f KB')),
1231 (1, 1, _('%.0f bytes')),
1229 (1, 1, _('%.0f bytes')),
1232 )
1230 )
1233
1231
1234 for multiplier, divisor, format in units:
1232 for multiplier, divisor, format in units:
1235 if nbytes >= divisor * multiplier:
1233 if nbytes >= divisor * multiplier:
1236 return format % (nbytes / float(divisor))
1234 return format % (nbytes / float(divisor))
1237 return units[-1][2] % nbytes
1235 return units[-1][2] % nbytes
1238
1236
1239 def drop_scheme(scheme, path):
1237 def drop_scheme(scheme, path):
1240 sc = scheme + ':'
1238 sc = scheme + ':'
1241 if path.startswith(sc):
1239 if path.startswith(sc):
1242 path = path[len(sc):]
1240 path = path[len(sc):]
1243 if path.startswith('//'):
1241 if path.startswith('//'):
1244 path = path[2:]
1242 path = path[2:]
1245 return path
1243 return path
1246
1244
1247 def uirepr(s):
1245 def uirepr(s):
1248 # Avoid double backslash in Windows path repr()
1246 # Avoid double backslash in Windows path repr()
1249 return repr(s).replace('\\\\', '\\')
1247 return repr(s).replace('\\\\', '\\')
1250
1248
1251 def termwidth():
1249 def termwidth():
1252 if 'COLUMNS' in os.environ:
1250 if 'COLUMNS' in os.environ:
1253 try:
1251 try:
1254 return int(os.environ['COLUMNS'])
1252 return int(os.environ['COLUMNS'])
1255 except ValueError:
1253 except ValueError:
1256 pass
1254 pass
1257 try:
1255 try:
1258 import termios, array, fcntl
1256 import termios, array, fcntl
1259 for dev in (sys.stdout, sys.stdin):
1257 for dev in (sys.stdout, sys.stdin):
1260 try:
1258 try:
1261 try:
1259 try:
1262 fd = dev.fileno()
1260 fd = dev.fileno()
1263 except AttributeError:
1261 except AttributeError:
1264 continue
1262 continue
1265 if not os.isatty(fd):
1263 if not os.isatty(fd):
1266 continue
1264 continue
1267 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1265 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1268 return array.array('h', arri)[1]
1266 return array.array('h', arri)[1]
1269 except ValueError:
1267 except ValueError:
1270 pass
1268 pass
1271 except ImportError:
1269 except ImportError:
1272 pass
1270 pass
1273 return 80
1271 return 80
1274
1272
1275 def wrap(line, hangindent, width=None):
1273 def wrap(line, hangindent, width=None):
1276 if width is None:
1274 if width is None:
1277 width = termwidth() - 2
1275 width = termwidth() - 2
1278 padding = '\n' + ' ' * hangindent
1276 padding = '\n' + ' ' * hangindent
1279 return padding.join(textwrap.wrap(line, width=width - hangindent))
1277 return padding.join(textwrap.wrap(line, width=width - hangindent))
1280
1278
1281 def iterlines(iterator):
1279 def iterlines(iterator):
1282 for chunk in iterator:
1280 for chunk in iterator:
1283 for line in chunk.splitlines():
1281 for line in chunk.splitlines():
1284 yield line
1282 yield line
General Comments 0
You need to be logged in to leave comments. Login now