##// END OF EJS Templates
util: use 'auditor' as consistent name for path auditors
Martin Geisler -
r12078:e03ca36b default
parent child Browse files
Show More
@@ -1,1433 +1,1433
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 return open(outname, 'rb').read()
201 return open(outname, 'rb').read()
202 finally:
202 finally:
203 try:
203 try:
204 if inname:
204 if inname:
205 os.unlink(inname)
205 os.unlink(inname)
206 except:
206 except:
207 pass
207 pass
208 try:
208 try:
209 if outname:
209 if outname:
210 os.unlink(outname)
210 os.unlink(outname)
211 except:
211 except:
212 pass
212 pass
213
213
214 filtertable = {
214 filtertable = {
215 'tempfile:': tempfilter,
215 'tempfile:': tempfilter,
216 'pipe:': pipefilter,
216 'pipe:': pipefilter,
217 }
217 }
218
218
219 def filter(s, cmd):
219 def filter(s, cmd):
220 "filter a string through a command that transforms its input to its output"
220 "filter a string through a command that transforms its input to its output"
221 for name, fn in filtertable.iteritems():
221 for name, fn in filtertable.iteritems():
222 if cmd.startswith(name):
222 if cmd.startswith(name):
223 return fn(s, cmd[len(name):].lstrip())
223 return fn(s, cmd[len(name):].lstrip())
224 return pipefilter(s, cmd)
224 return pipefilter(s, cmd)
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data"""
227 """return true if a string is binary data"""
228 return bool(s and '\0' in s)
228 return bool(s and '\0' in s)
229
229
230 def increasingchunks(source, min=1024, max=65536):
230 def increasingchunks(source, min=1024, max=65536):
231 '''return no less than min bytes per chunk while data remains,
231 '''return no less than min bytes per chunk while data remains,
232 doubling min after each chunk until it reaches max'''
232 doubling min after each chunk until it reaches max'''
233 def log2(x):
233 def log2(x):
234 if not x:
234 if not x:
235 return 0
235 return 0
236 i = 0
236 i = 0
237 while x:
237 while x:
238 x >>= 1
238 x >>= 1
239 i += 1
239 i += 1
240 return i - 1
240 return i - 1
241
241
242 buf = []
242 buf = []
243 blen = 0
243 blen = 0
244 for chunk in source:
244 for chunk in source:
245 buf.append(chunk)
245 buf.append(chunk)
246 blen += len(chunk)
246 blen += len(chunk)
247 if blen >= min:
247 if blen >= min:
248 if min < max:
248 if min < max:
249 min = min << 1
249 min = min << 1
250 nmin = 1 << log2(blen)
250 nmin = 1 << log2(blen)
251 if nmin > min:
251 if nmin > min:
252 min = nmin
252 min = nmin
253 if min > max:
253 if min > max:
254 min = max
254 min = max
255 yield ''.join(buf)
255 yield ''.join(buf)
256 blen = 0
256 blen = 0
257 buf = []
257 buf = []
258 if buf:
258 if buf:
259 yield ''.join(buf)
259 yield ''.join(buf)
260
260
261 Abort = error.Abort
261 Abort = error.Abort
262
262
263 def always(fn):
263 def always(fn):
264 return True
264 return True
265
265
266 def never(fn):
266 def never(fn):
267 return False
267 return False
268
268
269 def pathto(root, n1, n2):
269 def pathto(root, n1, n2):
270 '''return the relative path from one place to another.
270 '''return the relative path from one place to another.
271 root should use os.sep to separate directories
271 root should use os.sep to separate directories
272 n1 should use os.sep to separate directories
272 n1 should use os.sep to separate directories
273 n2 should use "/" to separate directories
273 n2 should use "/" to separate directories
274 returns an os.sep-separated path.
274 returns an os.sep-separated path.
275
275
276 If n1 is a relative path, it's assumed it's
276 If n1 is a relative path, it's assumed it's
277 relative to root.
277 relative to root.
278 n2 should always be relative to root.
278 n2 should always be relative to root.
279 '''
279 '''
280 if not n1:
280 if not n1:
281 return localpath(n2)
281 return localpath(n2)
282 if os.path.isabs(n1):
282 if os.path.isabs(n1):
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 return os.path.join(root, localpath(n2))
284 return os.path.join(root, localpath(n2))
285 n2 = '/'.join((pconvert(root), n2))
285 n2 = '/'.join((pconvert(root), n2))
286 a, b = splitpath(n1), n2.split('/')
286 a, b = splitpath(n1), n2.split('/')
287 a.reverse()
287 a.reverse()
288 b.reverse()
288 b.reverse()
289 while a and b and a[-1] == b[-1]:
289 while a and b and a[-1] == b[-1]:
290 a.pop()
290 a.pop()
291 b.pop()
291 b.pop()
292 b.reverse()
292 b.reverse()
293 return os.sep.join((['..'] * len(a)) + b) or '.'
293 return os.sep.join((['..'] * len(a)) + b) or '.'
294
294
295 def canonpath(root, cwd, myname, audit_path=None):
295 def canonpath(root, cwd, myname, auditor=None):
296 """return the canonical path of myname, given cwd and root"""
296 """return the canonical path of myname, given cwd and root"""
297 if endswithsep(root):
297 if endswithsep(root):
298 rootsep = root
298 rootsep = root
299 else:
299 else:
300 rootsep = root + os.sep
300 rootsep = root + os.sep
301 name = myname
301 name = myname
302 if not os.path.isabs(name):
302 if not os.path.isabs(name):
303 name = os.path.join(root, cwd, name)
303 name = os.path.join(root, cwd, name)
304 name = os.path.normpath(name)
304 name = os.path.normpath(name)
305 if audit_path is None:
305 if auditor is None:
306 audit_path = path_auditor(root)
306 auditor = path_auditor(root)
307 if name != rootsep and name.startswith(rootsep):
307 if name != rootsep and name.startswith(rootsep):
308 name = name[len(rootsep):]
308 name = name[len(rootsep):]
309 audit_path(name)
309 auditor(name)
310 return pconvert(name)
310 return pconvert(name)
311 elif name == root:
311 elif name == root:
312 return ''
312 return ''
313 else:
313 else:
314 # Determine whether `name' is in the hierarchy at or beneath `root',
314 # Determine whether `name' is in the hierarchy at or beneath `root',
315 # by iterating name=dirname(name) until that causes no change (can't
315 # by iterating name=dirname(name) until that causes no change (can't
316 # check name == '/', because that doesn't work on windows). For each
316 # check name == '/', because that doesn't work on windows). For each
317 # `name', compare dev/inode numbers. If they match, the list `rel'
317 # `name', compare dev/inode numbers. If they match, the list `rel'
318 # holds the reversed list of components making up the relative file
318 # holds the reversed list of components making up the relative file
319 # name we want.
319 # name we want.
320 root_st = os.stat(root)
320 root_st = os.stat(root)
321 rel = []
321 rel = []
322 while True:
322 while True:
323 try:
323 try:
324 name_st = os.stat(name)
324 name_st = os.stat(name)
325 except OSError:
325 except OSError:
326 break
326 break
327 if samestat(name_st, root_st):
327 if samestat(name_st, root_st):
328 if not rel:
328 if not rel:
329 # name was actually the same as root (maybe a symlink)
329 # name was actually the same as root (maybe a symlink)
330 return ''
330 return ''
331 rel.reverse()
331 rel.reverse()
332 name = os.path.join(*rel)
332 name = os.path.join(*rel)
333 audit_path(name)
333 auditor(name)
334 return pconvert(name)
334 return pconvert(name)
335 dirname, basename = os.path.split(name)
335 dirname, basename = os.path.split(name)
336 rel.append(basename)
336 rel.append(basename)
337 if dirname == name:
337 if dirname == name:
338 break
338 break
339 name = dirname
339 name = dirname
340
340
341 raise Abort('%s not under root' % myname)
341 raise Abort('%s not under root' % myname)
342
342
343 _hgexecutable = None
343 _hgexecutable = None
344
344
345 def main_is_frozen():
345 def main_is_frozen():
346 """return True if we are a frozen executable.
346 """return True if we are a frozen executable.
347
347
348 The code supports py2exe (most common, Windows only) and tools/freeze
348 The code supports py2exe (most common, Windows only) and tools/freeze
349 (portable, not much used).
349 (portable, not much used).
350 """
350 """
351 return (hasattr(sys, "frozen") or # new py2exe
351 return (hasattr(sys, "frozen") or # new py2exe
352 hasattr(sys, "importers") or # old py2exe
352 hasattr(sys, "importers") or # old py2exe
353 imp.is_frozen("__main__")) # tools/freeze
353 imp.is_frozen("__main__")) # tools/freeze
354
354
355 def hgexecutable():
355 def hgexecutable():
356 """return location of the 'hg' executable.
356 """return location of the 'hg' executable.
357
357
358 Defaults to $HG or 'hg' in the search path.
358 Defaults to $HG or 'hg' in the search path.
359 """
359 """
360 if _hgexecutable is None:
360 if _hgexecutable is None:
361 hg = os.environ.get('HG')
361 hg = os.environ.get('HG')
362 if hg:
362 if hg:
363 set_hgexecutable(hg)
363 set_hgexecutable(hg)
364 elif main_is_frozen():
364 elif main_is_frozen():
365 set_hgexecutable(sys.executable)
365 set_hgexecutable(sys.executable)
366 else:
366 else:
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
367 exe = find_exe('hg') or os.path.basename(sys.argv[0])
368 set_hgexecutable(exe)
368 set_hgexecutable(exe)
369 return _hgexecutable
369 return _hgexecutable
370
370
371 def set_hgexecutable(path):
371 def set_hgexecutable(path):
372 """set location of the 'hg' executable"""
372 """set location of the 'hg' executable"""
373 global _hgexecutable
373 global _hgexecutable
374 _hgexecutable = path
374 _hgexecutable = path
375
375
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
376 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
377 '''enhanced shell command execution.
377 '''enhanced shell command execution.
378 run with environment maybe modified, maybe in different dir.
378 run with environment maybe modified, maybe in different dir.
379
379
380 if command fails and onerr is None, return status. if ui object,
380 if command fails and onerr is None, return status. if ui object,
381 print error message and return status, else raise onerr object as
381 print error message and return status, else raise onerr object as
382 exception.
382 exception.
383
383
384 if out is specified, it is assumed to be a file-like object that has a
384 if out is specified, it is assumed to be a file-like object that has a
385 write() method. stdout and stderr will be redirected to out.'''
385 write() method. stdout and stderr will be redirected to out.'''
386 def py2shell(val):
386 def py2shell(val):
387 'convert python object into string that is useful to shell'
387 'convert python object into string that is useful to shell'
388 if val is None or val is False:
388 if val is None or val is False:
389 return '0'
389 return '0'
390 if val is True:
390 if val is True:
391 return '1'
391 return '1'
392 return str(val)
392 return str(val)
393 origcmd = cmd
393 origcmd = cmd
394 if os.name == 'nt':
394 if os.name == 'nt':
395 cmd = '"%s"' % cmd
395 cmd = '"%s"' % cmd
396 env = dict(os.environ)
396 env = dict(os.environ)
397 env.update((k, py2shell(v)) for k, v in environ.iteritems())
397 env.update((k, py2shell(v)) for k, v in environ.iteritems())
398 env['HG'] = hgexecutable()
398 env['HG'] = hgexecutable()
399 if out is None:
399 if out is None:
400 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
400 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
401 env=env, cwd=cwd)
401 env=env, cwd=cwd)
402 else:
402 else:
403 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
403 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
404 env=env, cwd=cwd, stdout=subprocess.PIPE,
404 env=env, cwd=cwd, stdout=subprocess.PIPE,
405 stderr=subprocess.STDOUT)
405 stderr=subprocess.STDOUT)
406 for line in proc.stdout:
406 for line in proc.stdout:
407 out.write(line)
407 out.write(line)
408 proc.wait()
408 proc.wait()
409 rc = proc.returncode
409 rc = proc.returncode
410 if sys.platform == 'OpenVMS' and rc & 1:
410 if sys.platform == 'OpenVMS' and rc & 1:
411 rc = 0
411 rc = 0
412 if rc and onerr:
412 if rc and onerr:
413 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
413 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
414 explain_exit(rc)[0])
414 explain_exit(rc)[0])
415 if errprefix:
415 if errprefix:
416 errmsg = '%s: %s' % (errprefix, errmsg)
416 errmsg = '%s: %s' % (errprefix, errmsg)
417 try:
417 try:
418 onerr.warn(errmsg + '\n')
418 onerr.warn(errmsg + '\n')
419 except AttributeError:
419 except AttributeError:
420 raise onerr(errmsg)
420 raise onerr(errmsg)
421 return rc
421 return rc
422
422
423 def checksignature(func):
423 def checksignature(func):
424 '''wrap a function with code to check for calling errors'''
424 '''wrap a function with code to check for calling errors'''
425 def check(*args, **kwargs):
425 def check(*args, **kwargs):
426 try:
426 try:
427 return func(*args, **kwargs)
427 return func(*args, **kwargs)
428 except TypeError:
428 except TypeError:
429 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
429 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
430 raise error.SignatureError
430 raise error.SignatureError
431 raise
431 raise
432
432
433 return check
433 return check
434
434
435 def unlink(f):
435 def unlink(f):
436 """unlink and remove the directory if it is empty"""
436 """unlink and remove the directory if it is empty"""
437 os.unlink(f)
437 os.unlink(f)
438 # try removing directories that might now be empty
438 # try removing directories that might now be empty
439 try:
439 try:
440 os.removedirs(os.path.dirname(f))
440 os.removedirs(os.path.dirname(f))
441 except OSError:
441 except OSError:
442 pass
442 pass
443
443
444 def copyfile(src, dest):
444 def copyfile(src, dest):
445 "copy a file, preserving mode and atime/mtime"
445 "copy a file, preserving mode and atime/mtime"
446 if os.path.islink(src):
446 if os.path.islink(src):
447 try:
447 try:
448 os.unlink(dest)
448 os.unlink(dest)
449 except:
449 except:
450 pass
450 pass
451 os.symlink(os.readlink(src), dest)
451 os.symlink(os.readlink(src), dest)
452 else:
452 else:
453 try:
453 try:
454 shutil.copyfile(src, dest)
454 shutil.copyfile(src, dest)
455 shutil.copystat(src, dest)
455 shutil.copystat(src, dest)
456 except shutil.Error, inst:
456 except shutil.Error, inst:
457 raise Abort(str(inst))
457 raise Abort(str(inst))
458
458
459 def copyfiles(src, dst, hardlink=None):
459 def copyfiles(src, dst, hardlink=None):
460 """Copy a directory tree using hardlinks if possible"""
460 """Copy a directory tree using hardlinks if possible"""
461
461
462 if hardlink is None:
462 if hardlink is None:
463 hardlink = (os.stat(src).st_dev ==
463 hardlink = (os.stat(src).st_dev ==
464 os.stat(os.path.dirname(dst)).st_dev)
464 os.stat(os.path.dirname(dst)).st_dev)
465
465
466 num = 0
466 num = 0
467 if os.path.isdir(src):
467 if os.path.isdir(src):
468 os.mkdir(dst)
468 os.mkdir(dst)
469 for name, kind in osutil.listdir(src):
469 for name, kind in osutil.listdir(src):
470 srcname = os.path.join(src, name)
470 srcname = os.path.join(src, name)
471 dstname = os.path.join(dst, name)
471 dstname = os.path.join(dst, name)
472 hardlink, n = copyfiles(srcname, dstname, hardlink)
472 hardlink, n = copyfiles(srcname, dstname, hardlink)
473 num += n
473 num += n
474 else:
474 else:
475 if hardlink:
475 if hardlink:
476 try:
476 try:
477 os_link(src, dst)
477 os_link(src, dst)
478 except (IOError, OSError):
478 except (IOError, OSError):
479 hardlink = False
479 hardlink = False
480 shutil.copy(src, dst)
480 shutil.copy(src, dst)
481 else:
481 else:
482 shutil.copy(src, dst)
482 shutil.copy(src, dst)
483 num += 1
483 num += 1
484
484
485 return hardlink, num
485 return hardlink, num
486
486
487 class path_auditor(object):
487 class path_auditor(object):
488 '''ensure that a filesystem path contains no banned components.
488 '''ensure that a filesystem path contains no banned components.
489 the following properties of a path are checked:
489 the following properties of a path are checked:
490
490
491 - under top-level .hg
491 - under top-level .hg
492 - starts at the root of a windows drive
492 - starts at the root of a windows drive
493 - contains ".."
493 - contains ".."
494 - traverses a symlink (e.g. a/symlink_here/b)
494 - traverses a symlink (e.g. a/symlink_here/b)
495 - inside a nested repository'''
495 - inside a nested repository'''
496
496
497 def __init__(self, root):
497 def __init__(self, root):
498 self.audited = set()
498 self.audited = set()
499 self.auditeddir = set()
499 self.auditeddir = set()
500 self.root = root
500 self.root = root
501
501
502 def __call__(self, path):
502 def __call__(self, path):
503 if path in self.audited:
503 if path in self.audited:
504 return
504 return
505 normpath = os.path.normcase(path)
505 normpath = os.path.normcase(path)
506 parts = splitpath(normpath)
506 parts = splitpath(normpath)
507 if (os.path.splitdrive(path)[0]
507 if (os.path.splitdrive(path)[0]
508 or parts[0].lower() in ('.hg', '.hg.', '')
508 or parts[0].lower() in ('.hg', '.hg.', '')
509 or os.pardir in parts):
509 or os.pardir in parts):
510 raise Abort(_("path contains illegal component: %s") % path)
510 raise Abort(_("path contains illegal component: %s") % path)
511 if '.hg' in path.lower():
511 if '.hg' in path.lower():
512 lparts = [p.lower() for p in parts]
512 lparts = [p.lower() for p in parts]
513 for p in '.hg', '.hg.':
513 for p in '.hg', '.hg.':
514 if p in lparts[1:]:
514 if p in lparts[1:]:
515 pos = lparts.index(p)
515 pos = lparts.index(p)
516 base = os.path.join(*parts[:pos])
516 base = os.path.join(*parts[:pos])
517 raise Abort(_('path %r is inside repo %r') % (path, base))
517 raise Abort(_('path %r is inside repo %r') % (path, base))
518 def check(prefix):
518 def check(prefix):
519 curpath = os.path.join(self.root, prefix)
519 curpath = os.path.join(self.root, prefix)
520 try:
520 try:
521 st = os.lstat(curpath)
521 st = os.lstat(curpath)
522 except OSError, err:
522 except OSError, err:
523 # EINVAL can be raised as invalid path syntax under win32.
523 # EINVAL can be raised as invalid path syntax under win32.
524 # They must be ignored for patterns can be checked too.
524 # They must be ignored for patterns can be checked too.
525 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
525 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
526 raise
526 raise
527 else:
527 else:
528 if stat.S_ISLNK(st.st_mode):
528 if stat.S_ISLNK(st.st_mode):
529 raise Abort(_('path %r traverses symbolic link %r') %
529 raise Abort(_('path %r traverses symbolic link %r') %
530 (path, prefix))
530 (path, prefix))
531 elif (stat.S_ISDIR(st.st_mode) and
531 elif (stat.S_ISDIR(st.st_mode) and
532 os.path.isdir(os.path.join(curpath, '.hg'))):
532 os.path.isdir(os.path.join(curpath, '.hg'))):
533 raise Abort(_('path %r is inside repo %r') %
533 raise Abort(_('path %r is inside repo %r') %
534 (path, prefix))
534 (path, prefix))
535 parts.pop()
535 parts.pop()
536 prefixes = []
536 prefixes = []
537 while parts:
537 while parts:
538 prefix = os.sep.join(parts)
538 prefix = os.sep.join(parts)
539 if prefix in self.auditeddir:
539 if prefix in self.auditeddir:
540 break
540 break
541 check(prefix)
541 check(prefix)
542 prefixes.append(prefix)
542 prefixes.append(prefix)
543 parts.pop()
543 parts.pop()
544
544
545 self.audited.add(path)
545 self.audited.add(path)
546 # only add prefixes to the cache after checking everything: we don't
546 # only add prefixes to the cache after checking everything: we don't
547 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
547 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
548 self.auditeddir.update(prefixes)
548 self.auditeddir.update(prefixes)
549
549
550 def nlinks(pathname):
550 def nlinks(pathname):
551 """Return number of hardlinks for the given file."""
551 """Return number of hardlinks for the given file."""
552 return os.lstat(pathname).st_nlink
552 return os.lstat(pathname).st_nlink
553
553
554 if hasattr(os, 'link'):
554 if hasattr(os, 'link'):
555 os_link = os.link
555 os_link = os.link
556 else:
556 else:
557 def os_link(src, dst):
557 def os_link(src, dst):
558 raise OSError(0, _("Hardlinks not supported"))
558 raise OSError(0, _("Hardlinks not supported"))
559
559
560 def lookup_reg(key, name=None, scope=None):
560 def lookup_reg(key, name=None, scope=None):
561 return None
561 return None
562
562
563 def hidewindow():
563 def hidewindow():
564 """Hide current shell window.
564 """Hide current shell window.
565
565
566 Used to hide the window opened when starting asynchronous
566 Used to hide the window opened when starting asynchronous
567 child process under Windows, unneeded on other systems.
567 child process under Windows, unneeded on other systems.
568 """
568 """
569 pass
569 pass
570
570
571 if os.name == 'nt':
571 if os.name == 'nt':
572 from windows import *
572 from windows import *
573 else:
573 else:
574 from posix import *
574 from posix import *
575
575
576 def makelock(info, pathname):
576 def makelock(info, pathname):
577 try:
577 try:
578 return os.symlink(info, pathname)
578 return os.symlink(info, pathname)
579 except OSError, why:
579 except OSError, why:
580 if why.errno == errno.EEXIST:
580 if why.errno == errno.EEXIST:
581 raise
581 raise
582 except AttributeError: # no symlink in os
582 except AttributeError: # no symlink in os
583 pass
583 pass
584
584
585 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
585 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
586 os.write(ld, info)
586 os.write(ld, info)
587 os.close(ld)
587 os.close(ld)
588
588
589 def readlock(pathname):
589 def readlock(pathname):
590 try:
590 try:
591 return os.readlink(pathname)
591 return os.readlink(pathname)
592 except OSError, why:
592 except OSError, why:
593 if why.errno not in (errno.EINVAL, errno.ENOSYS):
593 if why.errno not in (errno.EINVAL, errno.ENOSYS):
594 raise
594 raise
595 except AttributeError: # no symlink in os
595 except AttributeError: # no symlink in os
596 pass
596 pass
597 return posixfile(pathname).read()
597 return posixfile(pathname).read()
598
598
599 def fstat(fp):
599 def fstat(fp):
600 '''stat file object that may not have fileno method.'''
600 '''stat file object that may not have fileno method.'''
601 try:
601 try:
602 return os.fstat(fp.fileno())
602 return os.fstat(fp.fileno())
603 except AttributeError:
603 except AttributeError:
604 return os.stat(fp.name)
604 return os.stat(fp.name)
605
605
606 # File system features
606 # File system features
607
607
608 def checkcase(path):
608 def checkcase(path):
609 """
609 """
610 Check whether the given path is on a case-sensitive filesystem
610 Check whether the given path is on a case-sensitive filesystem
611
611
612 Requires a path (like /foo/.hg) ending with a foldable final
612 Requires a path (like /foo/.hg) ending with a foldable final
613 directory component.
613 directory component.
614 """
614 """
615 s1 = os.stat(path)
615 s1 = os.stat(path)
616 d, b = os.path.split(path)
616 d, b = os.path.split(path)
617 p2 = os.path.join(d, b.upper())
617 p2 = os.path.join(d, b.upper())
618 if path == p2:
618 if path == p2:
619 p2 = os.path.join(d, b.lower())
619 p2 = os.path.join(d, b.lower())
620 try:
620 try:
621 s2 = os.stat(p2)
621 s2 = os.stat(p2)
622 if s2 == s1:
622 if s2 == s1:
623 return False
623 return False
624 return True
624 return True
625 except:
625 except:
626 return True
626 return True
627
627
628 _fspathcache = {}
628 _fspathcache = {}
629 def fspath(name, root):
629 def fspath(name, root):
630 '''Get name in the case stored in the filesystem
630 '''Get name in the case stored in the filesystem
631
631
632 The name is either relative to root, or it is an absolute path starting
632 The name is either relative to root, or it is an absolute path starting
633 with root. Note that this function is unnecessary, and should not be
633 with root. Note that this function is unnecessary, and should not be
634 called, for case-sensitive filesystems (simply because it's expensive).
634 called, for case-sensitive filesystems (simply because it's expensive).
635 '''
635 '''
636 # If name is absolute, make it relative
636 # If name is absolute, make it relative
637 if name.lower().startswith(root.lower()):
637 if name.lower().startswith(root.lower()):
638 l = len(root)
638 l = len(root)
639 if name[l] == os.sep or name[l] == os.altsep:
639 if name[l] == os.sep or name[l] == os.altsep:
640 l = l + 1
640 l = l + 1
641 name = name[l:]
641 name = name[l:]
642
642
643 if not os.path.exists(os.path.join(root, name)):
643 if not os.path.exists(os.path.join(root, name)):
644 return None
644 return None
645
645
646 seps = os.sep
646 seps = os.sep
647 if os.altsep:
647 if os.altsep:
648 seps = seps + os.altsep
648 seps = seps + os.altsep
649 # Protect backslashes. This gets silly very quickly.
649 # Protect backslashes. This gets silly very quickly.
650 seps.replace('\\','\\\\')
650 seps.replace('\\','\\\\')
651 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
651 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
652 dir = os.path.normcase(os.path.normpath(root))
652 dir = os.path.normcase(os.path.normpath(root))
653 result = []
653 result = []
654 for part, sep in pattern.findall(name):
654 for part, sep in pattern.findall(name):
655 if sep:
655 if sep:
656 result.append(sep)
656 result.append(sep)
657 continue
657 continue
658
658
659 if dir not in _fspathcache:
659 if dir not in _fspathcache:
660 _fspathcache[dir] = os.listdir(dir)
660 _fspathcache[dir] = os.listdir(dir)
661 contents = _fspathcache[dir]
661 contents = _fspathcache[dir]
662
662
663 lpart = part.lower()
663 lpart = part.lower()
664 lenp = len(part)
664 lenp = len(part)
665 for n in contents:
665 for n in contents:
666 if lenp == len(n) and n.lower() == lpart:
666 if lenp == len(n) and n.lower() == lpart:
667 result.append(n)
667 result.append(n)
668 break
668 break
669 else:
669 else:
670 # Cannot happen, as the file exists!
670 # Cannot happen, as the file exists!
671 result.append(part)
671 result.append(part)
672 dir = os.path.join(dir, lpart)
672 dir = os.path.join(dir, lpart)
673
673
674 return ''.join(result)
674 return ''.join(result)
675
675
676 def checkexec(path):
676 def checkexec(path):
677 """
677 """
678 Check whether the given path is on a filesystem with UNIX-like exec flags
678 Check whether the given path is on a filesystem with UNIX-like exec flags
679
679
680 Requires a directory (like /foo/.hg)
680 Requires a directory (like /foo/.hg)
681 """
681 """
682
682
683 # VFAT on some Linux versions can flip mode but it doesn't persist
683 # VFAT on some Linux versions can flip mode but it doesn't persist
684 # a FS remount. Frequently we can detect it if files are created
684 # a FS remount. Frequently we can detect it if files are created
685 # with exec bit on.
685 # with exec bit on.
686
686
687 try:
687 try:
688 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
688 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
689 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
689 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
690 try:
690 try:
691 os.close(fh)
691 os.close(fh)
692 m = os.stat(fn).st_mode & 0777
692 m = os.stat(fn).st_mode & 0777
693 new_file_has_exec = m & EXECFLAGS
693 new_file_has_exec = m & EXECFLAGS
694 os.chmod(fn, m ^ EXECFLAGS)
694 os.chmod(fn, m ^ EXECFLAGS)
695 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
695 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
696 finally:
696 finally:
697 os.unlink(fn)
697 os.unlink(fn)
698 except (IOError, OSError):
698 except (IOError, OSError):
699 # we don't care, the user probably won't be able to commit anyway
699 # we don't care, the user probably won't be able to commit anyway
700 return False
700 return False
701 return not (new_file_has_exec or exec_flags_cannot_flip)
701 return not (new_file_has_exec or exec_flags_cannot_flip)
702
702
703 def checklink(path):
703 def checklink(path):
704 """check whether the given path is on a symlink-capable filesystem"""
704 """check whether the given path is on a symlink-capable filesystem"""
705 # mktemp is not racy because symlink creation will fail if the
705 # mktemp is not racy because symlink creation will fail if the
706 # file already exists
706 # file already exists
707 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
707 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
708 try:
708 try:
709 os.symlink(".", name)
709 os.symlink(".", name)
710 os.unlink(name)
710 os.unlink(name)
711 return True
711 return True
712 except (OSError, AttributeError):
712 except (OSError, AttributeError):
713 return False
713 return False
714
714
715 def needbinarypatch():
715 def needbinarypatch():
716 """return True if patches should be applied in binary mode by default."""
716 """return True if patches should be applied in binary mode by default."""
717 return os.name == 'nt'
717 return os.name == 'nt'
718
718
719 def endswithsep(path):
719 def endswithsep(path):
720 '''Check path ends with os.sep or os.altsep.'''
720 '''Check path ends with os.sep or os.altsep.'''
721 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
721 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
722
722
723 def splitpath(path):
723 def splitpath(path):
724 '''Split path by os.sep.
724 '''Split path by os.sep.
725 Note that this function does not use os.altsep because this is
725 Note that this function does not use os.altsep because this is
726 an alternative of simple "xxx.split(os.sep)".
726 an alternative of simple "xxx.split(os.sep)".
727 It is recommended to use os.path.normpath() before using this
727 It is recommended to use os.path.normpath() before using this
728 function if need.'''
728 function if need.'''
729 return path.split(os.sep)
729 return path.split(os.sep)
730
730
731 def gui():
731 def gui():
732 '''Are we running in a GUI?'''
732 '''Are we running in a GUI?'''
733 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
733 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
734
734
735 def mktempcopy(name, emptyok=False, createmode=None):
735 def mktempcopy(name, emptyok=False, createmode=None):
736 """Create a temporary file with the same contents from name
736 """Create a temporary file with the same contents from name
737
737
738 The permission bits are copied from the original file.
738 The permission bits are copied from the original file.
739
739
740 If the temporary file is going to be truncated immediately, you
740 If the temporary file is going to be truncated immediately, you
741 can use emptyok=True as an optimization.
741 can use emptyok=True as an optimization.
742
742
743 Returns the name of the temporary file.
743 Returns the name of the temporary file.
744 """
744 """
745 d, fn = os.path.split(name)
745 d, fn = os.path.split(name)
746 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
746 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
747 os.close(fd)
747 os.close(fd)
748 # Temporary files are created with mode 0600, which is usually not
748 # Temporary files are created with mode 0600, which is usually not
749 # what we want. If the original file already exists, just copy
749 # what we want. If the original file already exists, just copy
750 # its mode. Otherwise, manually obey umask.
750 # its mode. Otherwise, manually obey umask.
751 try:
751 try:
752 st_mode = os.lstat(name).st_mode & 0777
752 st_mode = os.lstat(name).st_mode & 0777
753 except OSError, inst:
753 except OSError, inst:
754 if inst.errno != errno.ENOENT:
754 if inst.errno != errno.ENOENT:
755 raise
755 raise
756 st_mode = createmode
756 st_mode = createmode
757 if st_mode is None:
757 if st_mode is None:
758 st_mode = ~umask
758 st_mode = ~umask
759 st_mode &= 0666
759 st_mode &= 0666
760 os.chmod(temp, st_mode)
760 os.chmod(temp, st_mode)
761 if emptyok:
761 if emptyok:
762 return temp
762 return temp
763 try:
763 try:
764 try:
764 try:
765 ifp = posixfile(name, "rb")
765 ifp = posixfile(name, "rb")
766 except IOError, inst:
766 except IOError, inst:
767 if inst.errno == errno.ENOENT:
767 if inst.errno == errno.ENOENT:
768 return temp
768 return temp
769 if not getattr(inst, 'filename', None):
769 if not getattr(inst, 'filename', None):
770 inst.filename = name
770 inst.filename = name
771 raise
771 raise
772 ofp = posixfile(temp, "wb")
772 ofp = posixfile(temp, "wb")
773 for chunk in filechunkiter(ifp):
773 for chunk in filechunkiter(ifp):
774 ofp.write(chunk)
774 ofp.write(chunk)
775 ifp.close()
775 ifp.close()
776 ofp.close()
776 ofp.close()
777 except:
777 except:
778 try: os.unlink(temp)
778 try: os.unlink(temp)
779 except: pass
779 except: pass
780 raise
780 raise
781 return temp
781 return temp
782
782
783 class atomictempfile(object):
783 class atomictempfile(object):
784 """file-like object that atomically updates a file
784 """file-like object that atomically updates a file
785
785
786 All writes will be redirected to a temporary copy of the original
786 All writes will be redirected to a temporary copy of the original
787 file. When rename is called, the copy is renamed to the original
787 file. When rename is called, the copy is renamed to the original
788 name, making the changes visible.
788 name, making the changes visible.
789 """
789 """
790 def __init__(self, name, mode='w+b', createmode=None):
790 def __init__(self, name, mode='w+b', createmode=None):
791 self.__name = name
791 self.__name = name
792 self._fp = None
792 self._fp = None
793 self.temp = mktempcopy(name, emptyok=('w' in mode),
793 self.temp = mktempcopy(name, emptyok=('w' in mode),
794 createmode=createmode)
794 createmode=createmode)
795 self._fp = posixfile(self.temp, mode)
795 self._fp = posixfile(self.temp, mode)
796
796
797 def __getattr__(self, name):
797 def __getattr__(self, name):
798 return getattr(self._fp, name)
798 return getattr(self._fp, name)
799
799
800 def rename(self):
800 def rename(self):
801 if not self._fp.closed:
801 if not self._fp.closed:
802 self._fp.close()
802 self._fp.close()
803 rename(self.temp, localpath(self.__name))
803 rename(self.temp, localpath(self.__name))
804
804
805 def __del__(self):
805 def __del__(self):
806 if not self._fp:
806 if not self._fp:
807 return
807 return
808 if not self._fp.closed:
808 if not self._fp.closed:
809 try:
809 try:
810 os.unlink(self.temp)
810 os.unlink(self.temp)
811 except: pass
811 except: pass
812 self._fp.close()
812 self._fp.close()
813
813
814 def makedirs(name, mode=None):
814 def makedirs(name, mode=None):
815 """recursive directory creation with parent mode inheritance"""
815 """recursive directory creation with parent mode inheritance"""
816 try:
816 try:
817 os.mkdir(name)
817 os.mkdir(name)
818 if mode is not None:
818 if mode is not None:
819 os.chmod(name, mode)
819 os.chmod(name, mode)
820 return
820 return
821 except OSError, err:
821 except OSError, err:
822 if err.errno == errno.EEXIST:
822 if err.errno == errno.EEXIST:
823 return
823 return
824 if err.errno != errno.ENOENT:
824 if err.errno != errno.ENOENT:
825 raise
825 raise
826 parent = os.path.abspath(os.path.dirname(name))
826 parent = os.path.abspath(os.path.dirname(name))
827 makedirs(parent, mode)
827 makedirs(parent, mode)
828 makedirs(name, mode)
828 makedirs(name, mode)
829
829
830 class opener(object):
830 class opener(object):
831 """Open files relative to a base directory
831 """Open files relative to a base directory
832
832
833 This class is used to hide the details of COW semantics and
833 This class is used to hide the details of COW semantics and
834 remote file access from higher level code.
834 remote file access from higher level code.
835 """
835 """
836 def __init__(self, base, audit=True):
836 def __init__(self, base, audit=True):
837 self.base = base
837 self.base = base
838 if audit:
838 if audit:
839 self.audit_path = path_auditor(base)
839 self.auditor = path_auditor(base)
840 else:
840 else:
841 self.audit_path = always
841 self.auditor = always
842 self.createmode = None
842 self.createmode = None
843
843
844 @propertycache
844 @propertycache
845 def _can_symlink(self):
845 def _can_symlink(self):
846 return checklink(self.base)
846 return checklink(self.base)
847
847
848 def _fixfilemode(self, name):
848 def _fixfilemode(self, name):
849 if self.createmode is None:
849 if self.createmode is None:
850 return
850 return
851 os.chmod(name, self.createmode & 0666)
851 os.chmod(name, self.createmode & 0666)
852
852
853 def __call__(self, path, mode="r", text=False, atomictemp=False):
853 def __call__(self, path, mode="r", text=False, atomictemp=False):
854 self.audit_path(path)
854 self.auditor(path)
855 f = os.path.join(self.base, path)
855 f = os.path.join(self.base, path)
856
856
857 if not text and "b" not in mode:
857 if not text and "b" not in mode:
858 mode += "b" # for that other OS
858 mode += "b" # for that other OS
859
859
860 nlink = -1
860 nlink = -1
861 if mode not in ("r", "rb"):
861 if mode not in ("r", "rb"):
862 try:
862 try:
863 nlink = nlinks(f)
863 nlink = nlinks(f)
864 except OSError:
864 except OSError:
865 nlink = 0
865 nlink = 0
866 d = os.path.dirname(f)
866 d = os.path.dirname(f)
867 if not os.path.isdir(d):
867 if not os.path.isdir(d):
868 makedirs(d, self.createmode)
868 makedirs(d, self.createmode)
869 if atomictemp:
869 if atomictemp:
870 return atomictempfile(f, mode, self.createmode)
870 return atomictempfile(f, mode, self.createmode)
871 if nlink > 1:
871 if nlink > 1:
872 rename(mktempcopy(f), f)
872 rename(mktempcopy(f), f)
873 fp = posixfile(f, mode)
873 fp = posixfile(f, mode)
874 if nlink == 0:
874 if nlink == 0:
875 self._fixfilemode(f)
875 self._fixfilemode(f)
876 return fp
876 return fp
877
877
878 def symlink(self, src, dst):
878 def symlink(self, src, dst):
879 self.audit_path(dst)
879 self.auditor(dst)
880 linkname = os.path.join(self.base, dst)
880 linkname = os.path.join(self.base, dst)
881 try:
881 try:
882 os.unlink(linkname)
882 os.unlink(linkname)
883 except OSError:
883 except OSError:
884 pass
884 pass
885
885
886 dirname = os.path.dirname(linkname)
886 dirname = os.path.dirname(linkname)
887 if not os.path.exists(dirname):
887 if not os.path.exists(dirname):
888 makedirs(dirname, self.createmode)
888 makedirs(dirname, self.createmode)
889
889
890 if self._can_symlink:
890 if self._can_symlink:
891 try:
891 try:
892 os.symlink(src, linkname)
892 os.symlink(src, linkname)
893 except OSError, err:
893 except OSError, err:
894 raise OSError(err.errno, _('could not symlink to %r: %s') %
894 raise OSError(err.errno, _('could not symlink to %r: %s') %
895 (src, err.strerror), linkname)
895 (src, err.strerror), linkname)
896 else:
896 else:
897 f = self(dst, "w")
897 f = self(dst, "w")
898 f.write(src)
898 f.write(src)
899 f.close()
899 f.close()
900 self._fixfilemode(dst)
900 self._fixfilemode(dst)
901
901
902 class chunkbuffer(object):
902 class chunkbuffer(object):
903 """Allow arbitrary sized chunks of data to be efficiently read from an
903 """Allow arbitrary sized chunks of data to be efficiently read from an
904 iterator over chunks of arbitrary size."""
904 iterator over chunks of arbitrary size."""
905
905
906 def __init__(self, in_iter):
906 def __init__(self, in_iter):
907 """in_iter is the iterator that's iterating over the input chunks.
907 """in_iter is the iterator that's iterating over the input chunks.
908 targetsize is how big a buffer to try to maintain."""
908 targetsize is how big a buffer to try to maintain."""
909 def splitbig(chunks):
909 def splitbig(chunks):
910 for chunk in chunks:
910 for chunk in chunks:
911 if len(chunk) > 2**20:
911 if len(chunk) > 2**20:
912 pos = 0
912 pos = 0
913 while pos < len(chunk):
913 while pos < len(chunk):
914 end = pos + 2 ** 18
914 end = pos + 2 ** 18
915 yield chunk[pos:end]
915 yield chunk[pos:end]
916 pos = end
916 pos = end
917 else:
917 else:
918 yield chunk
918 yield chunk
919 self.iter = splitbig(in_iter)
919 self.iter = splitbig(in_iter)
920 self._queue = []
920 self._queue = []
921
921
922 def read(self, l):
922 def read(self, l):
923 """Read L bytes of data from the iterator of chunks of data.
923 """Read L bytes of data from the iterator of chunks of data.
924 Returns less than L bytes if the iterator runs dry."""
924 Returns less than L bytes if the iterator runs dry."""
925 left = l
925 left = l
926 buf = ''
926 buf = ''
927 queue = self._queue
927 queue = self._queue
928 while left > 0:
928 while left > 0:
929 # refill the queue
929 # refill the queue
930 if not queue:
930 if not queue:
931 target = 2**18
931 target = 2**18
932 for chunk in self.iter:
932 for chunk in self.iter:
933 queue.append(chunk)
933 queue.append(chunk)
934 target -= len(chunk)
934 target -= len(chunk)
935 if target <= 0:
935 if target <= 0:
936 break
936 break
937 if not queue:
937 if not queue:
938 break
938 break
939
939
940 chunk = queue.pop(0)
940 chunk = queue.pop(0)
941 left -= len(chunk)
941 left -= len(chunk)
942 if left < 0:
942 if left < 0:
943 queue.insert(0, chunk[left:])
943 queue.insert(0, chunk[left:])
944 buf += chunk[:left]
944 buf += chunk[:left]
945 else:
945 else:
946 buf += chunk
946 buf += chunk
947
947
948 return buf
948 return buf
949
949
950 def filechunkiter(f, size=65536, limit=None):
950 def filechunkiter(f, size=65536, limit=None):
951 """Create a generator that produces the data in the file size
951 """Create a generator that produces the data in the file size
952 (default 65536) bytes at a time, up to optional limit (default is
952 (default 65536) bytes at a time, up to optional limit (default is
953 to read all data). Chunks may be less than size bytes if the
953 to read all data). Chunks may be less than size bytes if the
954 chunk is the last chunk in the file, or the file is a socket or
954 chunk is the last chunk in the file, or the file is a socket or
955 some other type of file that sometimes reads less data than is
955 some other type of file that sometimes reads less data than is
956 requested."""
956 requested."""
957 assert size >= 0
957 assert size >= 0
958 assert limit is None or limit >= 0
958 assert limit is None or limit >= 0
959 while True:
959 while True:
960 if limit is None:
960 if limit is None:
961 nbytes = size
961 nbytes = size
962 else:
962 else:
963 nbytes = min(limit, size)
963 nbytes = min(limit, size)
964 s = nbytes and f.read(nbytes)
964 s = nbytes and f.read(nbytes)
965 if not s:
965 if not s:
966 break
966 break
967 if limit:
967 if limit:
968 limit -= len(s)
968 limit -= len(s)
969 yield s
969 yield s
970
970
971 def makedate():
971 def makedate():
972 lt = time.localtime()
972 lt = time.localtime()
973 if lt[8] == 1 and time.daylight:
973 if lt[8] == 1 and time.daylight:
974 tz = time.altzone
974 tz = time.altzone
975 else:
975 else:
976 tz = time.timezone
976 tz = time.timezone
977 return time.mktime(lt), tz
977 return time.mktime(lt), tz
978
978
979 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
979 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
980 """represent a (unixtime, offset) tuple as a localized time.
980 """represent a (unixtime, offset) tuple as a localized time.
981 unixtime is seconds since the epoch, and offset is the time zone's
981 unixtime is seconds since the epoch, and offset is the time zone's
982 number of seconds away from UTC. if timezone is false, do not
982 number of seconds away from UTC. if timezone is false, do not
983 append time zone to string."""
983 append time zone to string."""
984 t, tz = date or makedate()
984 t, tz = date or makedate()
985 if "%1" in format or "%2" in format:
985 if "%1" in format or "%2" in format:
986 sign = (tz > 0) and "-" or "+"
986 sign = (tz > 0) and "-" or "+"
987 minutes = abs(tz) // 60
987 minutes = abs(tz) // 60
988 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
988 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
989 format = format.replace("%2", "%02d" % (minutes % 60))
989 format = format.replace("%2", "%02d" % (minutes % 60))
990 s = time.strftime(format, time.gmtime(float(t) - tz))
990 s = time.strftime(format, time.gmtime(float(t) - tz))
991 return s
991 return s
992
992
993 def shortdate(date=None):
993 def shortdate(date=None):
994 """turn (timestamp, tzoff) tuple into iso 8631 date."""
994 """turn (timestamp, tzoff) tuple into iso 8631 date."""
995 return datestr(date, format='%Y-%m-%d')
995 return datestr(date, format='%Y-%m-%d')
996
996
997 def strdate(string, format, defaults=[]):
997 def strdate(string, format, defaults=[]):
998 """parse a localized time string and return a (unixtime, offset) tuple.
998 """parse a localized time string and return a (unixtime, offset) tuple.
999 if the string cannot be parsed, ValueError is raised."""
999 if the string cannot be parsed, ValueError is raised."""
1000 def timezone(string):
1000 def timezone(string):
1001 tz = string.split()[-1]
1001 tz = string.split()[-1]
1002 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1002 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1003 sign = (tz[0] == "+") and 1 or -1
1003 sign = (tz[0] == "+") and 1 or -1
1004 hours = int(tz[1:3])
1004 hours = int(tz[1:3])
1005 minutes = int(tz[3:5])
1005 minutes = int(tz[3:5])
1006 return -sign * (hours * 60 + minutes) * 60
1006 return -sign * (hours * 60 + minutes) * 60
1007 if tz == "GMT" or tz == "UTC":
1007 if tz == "GMT" or tz == "UTC":
1008 return 0
1008 return 0
1009 return None
1009 return None
1010
1010
1011 # NOTE: unixtime = localunixtime + offset
1011 # NOTE: unixtime = localunixtime + offset
1012 offset, date = timezone(string), string
1012 offset, date = timezone(string), string
1013 if offset != None:
1013 if offset != None:
1014 date = " ".join(string.split()[:-1])
1014 date = " ".join(string.split()[:-1])
1015
1015
1016 # add missing elements from defaults
1016 # add missing elements from defaults
1017 for part in defaults:
1017 for part in defaults:
1018 found = [True for p in part if ("%"+p) in format]
1018 found = [True for p in part if ("%"+p) in format]
1019 if not found:
1019 if not found:
1020 date += "@" + defaults[part]
1020 date += "@" + defaults[part]
1021 format += "@%" + part[0]
1021 format += "@%" + part[0]
1022
1022
1023 timetuple = time.strptime(date, format)
1023 timetuple = time.strptime(date, format)
1024 localunixtime = int(calendar.timegm(timetuple))
1024 localunixtime = int(calendar.timegm(timetuple))
1025 if offset is None:
1025 if offset is None:
1026 # local timezone
1026 # local timezone
1027 unixtime = int(time.mktime(timetuple))
1027 unixtime = int(time.mktime(timetuple))
1028 offset = unixtime - localunixtime
1028 offset = unixtime - localunixtime
1029 else:
1029 else:
1030 unixtime = localunixtime + offset
1030 unixtime = localunixtime + offset
1031 return unixtime, offset
1031 return unixtime, offset
1032
1032
1033 def parsedate(date, formats=None, defaults=None):
1033 def parsedate(date, formats=None, defaults=None):
1034 """parse a localized date/time string and return a (unixtime, offset) tuple.
1034 """parse a localized date/time string and return a (unixtime, offset) tuple.
1035
1035
1036 The date may be a "unixtime offset" string or in one of the specified
1036 The date may be a "unixtime offset" string or in one of the specified
1037 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1037 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1038 """
1038 """
1039 if not date:
1039 if not date:
1040 return 0, 0
1040 return 0, 0
1041 if isinstance(date, tuple) and len(date) == 2:
1041 if isinstance(date, tuple) and len(date) == 2:
1042 return date
1042 return date
1043 if not formats:
1043 if not formats:
1044 formats = defaultdateformats
1044 formats = defaultdateformats
1045 date = date.strip()
1045 date = date.strip()
1046 try:
1046 try:
1047 when, offset = map(int, date.split(' '))
1047 when, offset = map(int, date.split(' '))
1048 except ValueError:
1048 except ValueError:
1049 # fill out defaults
1049 # fill out defaults
1050 if not defaults:
1050 if not defaults:
1051 defaults = {}
1051 defaults = {}
1052 now = makedate()
1052 now = makedate()
1053 for part in "d mb yY HI M S".split():
1053 for part in "d mb yY HI M S".split():
1054 if part not in defaults:
1054 if part not in defaults:
1055 if part[0] in "HMS":
1055 if part[0] in "HMS":
1056 defaults[part] = "00"
1056 defaults[part] = "00"
1057 else:
1057 else:
1058 defaults[part] = datestr(now, "%" + part[0])
1058 defaults[part] = datestr(now, "%" + part[0])
1059
1059
1060 for format in formats:
1060 for format in formats:
1061 try:
1061 try:
1062 when, offset = strdate(date, format, defaults)
1062 when, offset = strdate(date, format, defaults)
1063 except (ValueError, OverflowError):
1063 except (ValueError, OverflowError):
1064 pass
1064 pass
1065 else:
1065 else:
1066 break
1066 break
1067 else:
1067 else:
1068 raise Abort(_('invalid date: %r ') % date)
1068 raise Abort(_('invalid date: %r ') % date)
1069 # validate explicit (probably user-specified) date and
1069 # validate explicit (probably user-specified) date and
1070 # time zone offset. values must fit in signed 32 bits for
1070 # time zone offset. values must fit in signed 32 bits for
1071 # current 32-bit linux runtimes. timezones go from UTC-12
1071 # current 32-bit linux runtimes. timezones go from UTC-12
1072 # to UTC+14
1072 # to UTC+14
1073 if abs(when) > 0x7fffffff:
1073 if abs(when) > 0x7fffffff:
1074 raise Abort(_('date exceeds 32 bits: %d') % when)
1074 raise Abort(_('date exceeds 32 bits: %d') % when)
1075 if offset < -50400 or offset > 43200:
1075 if offset < -50400 or offset > 43200:
1076 raise Abort(_('impossible time zone offset: %d') % offset)
1076 raise Abort(_('impossible time zone offset: %d') % offset)
1077 return when, offset
1077 return when, offset
1078
1078
1079 def matchdate(date):
1079 def matchdate(date):
1080 """Return a function that matches a given date match specifier
1080 """Return a function that matches a given date match specifier
1081
1081
1082 Formats include:
1082 Formats include:
1083
1083
1084 '{date}' match a given date to the accuracy provided
1084 '{date}' match a given date to the accuracy provided
1085
1085
1086 '<{date}' on or before a given date
1086 '<{date}' on or before a given date
1087
1087
1088 '>{date}' on or after a given date
1088 '>{date}' on or after a given date
1089
1089
1090 """
1090 """
1091
1091
1092 def lower(date):
1092 def lower(date):
1093 d = dict(mb="1", d="1")
1093 d = dict(mb="1", d="1")
1094 return parsedate(date, extendeddateformats, d)[0]
1094 return parsedate(date, extendeddateformats, d)[0]
1095
1095
1096 def upper(date):
1096 def upper(date):
1097 d = dict(mb="12", HI="23", M="59", S="59")
1097 d = dict(mb="12", HI="23", M="59", S="59")
1098 for days in "31 30 29".split():
1098 for days in "31 30 29".split():
1099 try:
1099 try:
1100 d["d"] = days
1100 d["d"] = days
1101 return parsedate(date, extendeddateformats, d)[0]
1101 return parsedate(date, extendeddateformats, d)[0]
1102 except:
1102 except:
1103 pass
1103 pass
1104 d["d"] = "28"
1104 d["d"] = "28"
1105 return parsedate(date, extendeddateformats, d)[0]
1105 return parsedate(date, extendeddateformats, d)[0]
1106
1106
1107 date = date.strip()
1107 date = date.strip()
1108 if date[0] == "<":
1108 if date[0] == "<":
1109 when = upper(date[1:])
1109 when = upper(date[1:])
1110 return lambda x: x <= when
1110 return lambda x: x <= when
1111 elif date[0] == ">":
1111 elif date[0] == ">":
1112 when = lower(date[1:])
1112 when = lower(date[1:])
1113 return lambda x: x >= when
1113 return lambda x: x >= when
1114 elif date[0] == "-":
1114 elif date[0] == "-":
1115 try:
1115 try:
1116 days = int(date[1:])
1116 days = int(date[1:])
1117 except ValueError:
1117 except ValueError:
1118 raise Abort(_("invalid day spec: %s") % date[1:])
1118 raise Abort(_("invalid day spec: %s") % date[1:])
1119 when = makedate()[0] - days * 3600 * 24
1119 when = makedate()[0] - days * 3600 * 24
1120 return lambda x: x >= when
1120 return lambda x: x >= when
1121 elif " to " in date:
1121 elif " to " in date:
1122 a, b = date.split(" to ")
1122 a, b = date.split(" to ")
1123 start, stop = lower(a), upper(b)
1123 start, stop = lower(a), upper(b)
1124 return lambda x: x >= start and x <= stop
1124 return lambda x: x >= start and x <= stop
1125 else:
1125 else:
1126 start, stop = lower(date), upper(date)
1126 start, stop = lower(date), upper(date)
1127 return lambda x: x >= start and x <= stop
1127 return lambda x: x >= start and x <= stop
1128
1128
1129 def shortuser(user):
1129 def shortuser(user):
1130 """Return a short representation of a user name or email address."""
1130 """Return a short representation of a user name or email address."""
1131 f = user.find('@')
1131 f = user.find('@')
1132 if f >= 0:
1132 if f >= 0:
1133 user = user[:f]
1133 user = user[:f]
1134 f = user.find('<')
1134 f = user.find('<')
1135 if f >= 0:
1135 if f >= 0:
1136 user = user[f + 1:]
1136 user = user[f + 1:]
1137 f = user.find(' ')
1137 f = user.find(' ')
1138 if f >= 0:
1138 if f >= 0:
1139 user = user[:f]
1139 user = user[:f]
1140 f = user.find('.')
1140 f = user.find('.')
1141 if f >= 0:
1141 if f >= 0:
1142 user = user[:f]
1142 user = user[:f]
1143 return user
1143 return user
1144
1144
1145 def email(author):
1145 def email(author):
1146 '''get email of author.'''
1146 '''get email of author.'''
1147 r = author.find('>')
1147 r = author.find('>')
1148 if r == -1:
1148 if r == -1:
1149 r = None
1149 r = None
1150 return author[author.find('<') + 1:r]
1150 return author[author.find('<') + 1:r]
1151
1151
1152 def ellipsis(text, maxlength=400):
1152 def ellipsis(text, maxlength=400):
1153 """Trim string to at most maxlength (default: 400) characters."""
1153 """Trim string to at most maxlength (default: 400) characters."""
1154 if len(text) <= maxlength:
1154 if len(text) <= maxlength:
1155 return text
1155 return text
1156 else:
1156 else:
1157 return "%s..." % (text[:maxlength - 3])
1157 return "%s..." % (text[:maxlength - 3])
1158
1158
1159 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1159 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1160 '''yield every hg repository under path, recursively.'''
1160 '''yield every hg repository under path, recursively.'''
1161 def errhandler(err):
1161 def errhandler(err):
1162 if err.filename == path:
1162 if err.filename == path:
1163 raise err
1163 raise err
1164 if followsym and hasattr(os.path, 'samestat'):
1164 if followsym and hasattr(os.path, 'samestat'):
1165 def _add_dir_if_not_there(dirlst, dirname):
1165 def _add_dir_if_not_there(dirlst, dirname):
1166 match = False
1166 match = False
1167 samestat = os.path.samestat
1167 samestat = os.path.samestat
1168 dirstat = os.stat(dirname)
1168 dirstat = os.stat(dirname)
1169 for lstdirstat in dirlst:
1169 for lstdirstat in dirlst:
1170 if samestat(dirstat, lstdirstat):
1170 if samestat(dirstat, lstdirstat):
1171 match = True
1171 match = True
1172 break
1172 break
1173 if not match:
1173 if not match:
1174 dirlst.append(dirstat)
1174 dirlst.append(dirstat)
1175 return not match
1175 return not match
1176 else:
1176 else:
1177 followsym = False
1177 followsym = False
1178
1178
1179 if (seen_dirs is None) and followsym:
1179 if (seen_dirs is None) and followsym:
1180 seen_dirs = []
1180 seen_dirs = []
1181 _add_dir_if_not_there(seen_dirs, path)
1181 _add_dir_if_not_there(seen_dirs, path)
1182 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1182 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1183 dirs.sort()
1183 dirs.sort()
1184 if '.hg' in dirs:
1184 if '.hg' in dirs:
1185 yield root # found a repository
1185 yield root # found a repository
1186 qroot = os.path.join(root, '.hg', 'patches')
1186 qroot = os.path.join(root, '.hg', 'patches')
1187 if os.path.isdir(os.path.join(qroot, '.hg')):
1187 if os.path.isdir(os.path.join(qroot, '.hg')):
1188 yield qroot # we have a patch queue repo here
1188 yield qroot # we have a patch queue repo here
1189 if recurse:
1189 if recurse:
1190 # avoid recursing inside the .hg directory
1190 # avoid recursing inside the .hg directory
1191 dirs.remove('.hg')
1191 dirs.remove('.hg')
1192 else:
1192 else:
1193 dirs[:] = [] # don't descend further
1193 dirs[:] = [] # don't descend further
1194 elif followsym:
1194 elif followsym:
1195 newdirs = []
1195 newdirs = []
1196 for d in dirs:
1196 for d in dirs:
1197 fname = os.path.join(root, d)
1197 fname = os.path.join(root, d)
1198 if _add_dir_if_not_there(seen_dirs, fname):
1198 if _add_dir_if_not_there(seen_dirs, fname):
1199 if os.path.islink(fname):
1199 if os.path.islink(fname):
1200 for hgname in walkrepos(fname, True, seen_dirs):
1200 for hgname in walkrepos(fname, True, seen_dirs):
1201 yield hgname
1201 yield hgname
1202 else:
1202 else:
1203 newdirs.append(d)
1203 newdirs.append(d)
1204 dirs[:] = newdirs
1204 dirs[:] = newdirs
1205
1205
1206 _rcpath = None
1206 _rcpath = None
1207
1207
1208 def os_rcpath():
1208 def os_rcpath():
1209 '''return default os-specific hgrc search path'''
1209 '''return default os-specific hgrc search path'''
1210 path = system_rcpath()
1210 path = system_rcpath()
1211 path.extend(user_rcpath())
1211 path.extend(user_rcpath())
1212 path = [os.path.normpath(f) for f in path]
1212 path = [os.path.normpath(f) for f in path]
1213 return path
1213 return path
1214
1214
1215 def rcpath():
1215 def rcpath():
1216 '''return hgrc search path. if env var HGRCPATH is set, use it.
1216 '''return hgrc search path. if env var HGRCPATH is set, use it.
1217 for each item in path, if directory, use files ending in .rc,
1217 for each item in path, if directory, use files ending in .rc,
1218 else use item.
1218 else use item.
1219 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1219 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1220 if no HGRCPATH, use default os-specific path.'''
1220 if no HGRCPATH, use default os-specific path.'''
1221 global _rcpath
1221 global _rcpath
1222 if _rcpath is None:
1222 if _rcpath is None:
1223 if 'HGRCPATH' in os.environ:
1223 if 'HGRCPATH' in os.environ:
1224 _rcpath = []
1224 _rcpath = []
1225 for p in os.environ['HGRCPATH'].split(os.pathsep):
1225 for p in os.environ['HGRCPATH'].split(os.pathsep):
1226 if not p:
1226 if not p:
1227 continue
1227 continue
1228 p = expandpath(p)
1228 p = expandpath(p)
1229 if os.path.isdir(p):
1229 if os.path.isdir(p):
1230 for f, kind in osutil.listdir(p):
1230 for f, kind in osutil.listdir(p):
1231 if f.endswith('.rc'):
1231 if f.endswith('.rc'):
1232 _rcpath.append(os.path.join(p, f))
1232 _rcpath.append(os.path.join(p, f))
1233 else:
1233 else:
1234 _rcpath.append(p)
1234 _rcpath.append(p)
1235 else:
1235 else:
1236 _rcpath = os_rcpath()
1236 _rcpath = os_rcpath()
1237 return _rcpath
1237 return _rcpath
1238
1238
1239 def bytecount(nbytes):
1239 def bytecount(nbytes):
1240 '''return byte count formatted as readable string, with units'''
1240 '''return byte count formatted as readable string, with units'''
1241
1241
1242 units = (
1242 units = (
1243 (100, 1 << 30, _('%.0f GB')),
1243 (100, 1 << 30, _('%.0f GB')),
1244 (10, 1 << 30, _('%.1f GB')),
1244 (10, 1 << 30, _('%.1f GB')),
1245 (1, 1 << 30, _('%.2f GB')),
1245 (1, 1 << 30, _('%.2f GB')),
1246 (100, 1 << 20, _('%.0f MB')),
1246 (100, 1 << 20, _('%.0f MB')),
1247 (10, 1 << 20, _('%.1f MB')),
1247 (10, 1 << 20, _('%.1f MB')),
1248 (1, 1 << 20, _('%.2f MB')),
1248 (1, 1 << 20, _('%.2f MB')),
1249 (100, 1 << 10, _('%.0f KB')),
1249 (100, 1 << 10, _('%.0f KB')),
1250 (10, 1 << 10, _('%.1f KB')),
1250 (10, 1 << 10, _('%.1f KB')),
1251 (1, 1 << 10, _('%.2f KB')),
1251 (1, 1 << 10, _('%.2f KB')),
1252 (1, 1, _('%.0f bytes')),
1252 (1, 1, _('%.0f bytes')),
1253 )
1253 )
1254
1254
1255 for multiplier, divisor, format in units:
1255 for multiplier, divisor, format in units:
1256 if nbytes >= divisor * multiplier:
1256 if nbytes >= divisor * multiplier:
1257 return format % (nbytes / float(divisor))
1257 return format % (nbytes / float(divisor))
1258 return units[-1][2] % nbytes
1258 return units[-1][2] % nbytes
1259
1259
1260 def drop_scheme(scheme, path):
1260 def drop_scheme(scheme, path):
1261 sc = scheme + ':'
1261 sc = scheme + ':'
1262 if path.startswith(sc):
1262 if path.startswith(sc):
1263 path = path[len(sc):]
1263 path = path[len(sc):]
1264 if path.startswith('//'):
1264 if path.startswith('//'):
1265 if scheme == 'file':
1265 if scheme == 'file':
1266 i = path.find('/', 2)
1266 i = path.find('/', 2)
1267 if i == -1:
1267 if i == -1:
1268 return ''
1268 return ''
1269 # On Windows, absolute paths are rooted at the current drive
1269 # On Windows, absolute paths are rooted at the current drive
1270 # root. On POSIX they are rooted at the file system root.
1270 # root. On POSIX they are rooted at the file system root.
1271 if os.name == 'nt':
1271 if os.name == 'nt':
1272 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1272 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1273 path = os.path.join(droot, path[i + 1:])
1273 path = os.path.join(droot, path[i + 1:])
1274 else:
1274 else:
1275 path = path[i:]
1275 path = path[i:]
1276 else:
1276 else:
1277 path = path[2:]
1277 path = path[2:]
1278 return path
1278 return path
1279
1279
1280 def uirepr(s):
1280 def uirepr(s):
1281 # Avoid double backslash in Windows path repr()
1281 # Avoid double backslash in Windows path repr()
1282 return repr(s).replace('\\\\', '\\')
1282 return repr(s).replace('\\\\', '\\')
1283
1283
1284 #### naming convention of below implementation follows 'textwrap' module
1284 #### naming convention of below implementation follows 'textwrap' module
1285
1285
1286 class MBTextWrapper(textwrap.TextWrapper):
1286 class MBTextWrapper(textwrap.TextWrapper):
1287 def __init__(self, **kwargs):
1287 def __init__(self, **kwargs):
1288 textwrap.TextWrapper.__init__(self, **kwargs)
1288 textwrap.TextWrapper.__init__(self, **kwargs)
1289
1289
1290 def _cutdown(self, str, space_left):
1290 def _cutdown(self, str, space_left):
1291 l = 0
1291 l = 0
1292 ucstr = unicode(str, encoding.encoding)
1292 ucstr = unicode(str, encoding.encoding)
1293 w = unicodedata.east_asian_width
1293 w = unicodedata.east_asian_width
1294 for i in xrange(len(ucstr)):
1294 for i in xrange(len(ucstr)):
1295 l += w(ucstr[i]) in 'WFA' and 2 or 1
1295 l += w(ucstr[i]) in 'WFA' and 2 or 1
1296 if space_left < l:
1296 if space_left < l:
1297 return (ucstr[:i].encode(encoding.encoding),
1297 return (ucstr[:i].encode(encoding.encoding),
1298 ucstr[i:].encode(encoding.encoding))
1298 ucstr[i:].encode(encoding.encoding))
1299 return str, ''
1299 return str, ''
1300
1300
1301 # ----------------------------------------
1301 # ----------------------------------------
1302 # overriding of base class
1302 # overriding of base class
1303
1303
1304 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1304 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1305 space_left = max(width - cur_len, 1)
1305 space_left = max(width - cur_len, 1)
1306
1306
1307 if self.break_long_words:
1307 if self.break_long_words:
1308 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1308 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1309 cur_line.append(cut)
1309 cur_line.append(cut)
1310 reversed_chunks[-1] = res
1310 reversed_chunks[-1] = res
1311 elif not cur_line:
1311 elif not cur_line:
1312 cur_line.append(reversed_chunks.pop())
1312 cur_line.append(reversed_chunks.pop())
1313
1313
1314 #### naming convention of above implementation follows 'textwrap' module
1314 #### naming convention of above implementation follows 'textwrap' module
1315
1315
1316 def wrap(line, width=None, initindent='', hangindent=''):
1316 def wrap(line, width=None, initindent='', hangindent=''):
1317 if width is None:
1317 if width is None:
1318 width = termwidth() - 2
1318 width = termwidth() - 2
1319 maxindent = max(len(hangindent), len(initindent))
1319 maxindent = max(len(hangindent), len(initindent))
1320 if width <= maxindent:
1320 if width <= maxindent:
1321 # adjust for weird terminal size
1321 # adjust for weird terminal size
1322 width = max(78, maxindent + 1)
1322 width = max(78, maxindent + 1)
1323 wrapper = MBTextWrapper(width=width,
1323 wrapper = MBTextWrapper(width=width,
1324 initial_indent=initindent,
1324 initial_indent=initindent,
1325 subsequent_indent=hangindent)
1325 subsequent_indent=hangindent)
1326 return wrapper.fill(line)
1326 return wrapper.fill(line)
1327
1327
1328 def iterlines(iterator):
1328 def iterlines(iterator):
1329 for chunk in iterator:
1329 for chunk in iterator:
1330 for line in chunk.splitlines():
1330 for line in chunk.splitlines():
1331 yield line
1331 yield line
1332
1332
1333 def expandpath(path):
1333 def expandpath(path):
1334 return os.path.expanduser(os.path.expandvars(path))
1334 return os.path.expanduser(os.path.expandvars(path))
1335
1335
1336 def hgcmd():
1336 def hgcmd():
1337 """Return the command used to execute current hg
1337 """Return the command used to execute current hg
1338
1338
1339 This is different from hgexecutable() because on Windows we want
1339 This is different from hgexecutable() because on Windows we want
1340 to avoid things opening new shell windows like batch files, so we
1340 to avoid things opening new shell windows like batch files, so we
1341 get either the python call or current executable.
1341 get either the python call or current executable.
1342 """
1342 """
1343 if main_is_frozen():
1343 if main_is_frozen():
1344 return [sys.executable]
1344 return [sys.executable]
1345 return gethgcmd()
1345 return gethgcmd()
1346
1346
1347 def rundetached(args, condfn):
1347 def rundetached(args, condfn):
1348 """Execute the argument list in a detached process.
1348 """Execute the argument list in a detached process.
1349
1349
1350 condfn is a callable which is called repeatedly and should return
1350 condfn is a callable which is called repeatedly and should return
1351 True once the child process is known to have started successfully.
1351 True once the child process is known to have started successfully.
1352 At this point, the child process PID is returned. If the child
1352 At this point, the child process PID is returned. If the child
1353 process fails to start or finishes before condfn() evaluates to
1353 process fails to start or finishes before condfn() evaluates to
1354 True, return -1.
1354 True, return -1.
1355 """
1355 """
1356 # Windows case is easier because the child process is either
1356 # Windows case is easier because the child process is either
1357 # successfully starting and validating the condition or exiting
1357 # successfully starting and validating the condition or exiting
1358 # on failure. We just poll on its PID. On Unix, if the child
1358 # on failure. We just poll on its PID. On Unix, if the child
1359 # process fails to start, it will be left in a zombie state until
1359 # process fails to start, it will be left in a zombie state until
1360 # the parent wait on it, which we cannot do since we expect a long
1360 # the parent wait on it, which we cannot do since we expect a long
1361 # running process on success. Instead we listen for SIGCHLD telling
1361 # running process on success. Instead we listen for SIGCHLD telling
1362 # us our child process terminated.
1362 # us our child process terminated.
1363 terminated = set()
1363 terminated = set()
1364 def handler(signum, frame):
1364 def handler(signum, frame):
1365 terminated.add(os.wait())
1365 terminated.add(os.wait())
1366 prevhandler = None
1366 prevhandler = None
1367 if hasattr(signal, 'SIGCHLD'):
1367 if hasattr(signal, 'SIGCHLD'):
1368 prevhandler = signal.signal(signal.SIGCHLD, handler)
1368 prevhandler = signal.signal(signal.SIGCHLD, handler)
1369 try:
1369 try:
1370 pid = spawndetached(args)
1370 pid = spawndetached(args)
1371 while not condfn():
1371 while not condfn():
1372 if ((pid in terminated or not testpid(pid))
1372 if ((pid in terminated or not testpid(pid))
1373 and not condfn()):
1373 and not condfn()):
1374 return -1
1374 return -1
1375 time.sleep(0.1)
1375 time.sleep(0.1)
1376 return pid
1376 return pid
1377 finally:
1377 finally:
1378 if prevhandler is not None:
1378 if prevhandler is not None:
1379 signal.signal(signal.SIGCHLD, prevhandler)
1379 signal.signal(signal.SIGCHLD, prevhandler)
1380
1380
1381 try:
1381 try:
1382 any, all = any, all
1382 any, all = any, all
1383 except NameError:
1383 except NameError:
1384 def any(iterable):
1384 def any(iterable):
1385 for i in iterable:
1385 for i in iterable:
1386 if i:
1386 if i:
1387 return True
1387 return True
1388 return False
1388 return False
1389
1389
1390 def all(iterable):
1390 def all(iterable):
1391 for i in iterable:
1391 for i in iterable:
1392 if not i:
1392 if not i:
1393 return False
1393 return False
1394 return True
1394 return True
1395
1395
1396 def termwidth():
1396 def termwidth():
1397 if 'COLUMNS' in os.environ:
1397 if 'COLUMNS' in os.environ:
1398 try:
1398 try:
1399 return int(os.environ['COLUMNS'])
1399 return int(os.environ['COLUMNS'])
1400 except ValueError:
1400 except ValueError:
1401 pass
1401 pass
1402 return termwidth_()
1402 return termwidth_()
1403
1403
1404 def interpolate(prefix, mapping, s, fn=None):
1404 def interpolate(prefix, mapping, s, fn=None):
1405 """Return the result of interpolating items in the mapping into string s.
1405 """Return the result of interpolating items in the mapping into string s.
1406
1406
1407 prefix is a single character string, or a two character string with
1407 prefix is a single character string, or a two character string with
1408 a backslash as the first character if the prefix needs to be escaped in
1408 a backslash as the first character if the prefix needs to be escaped in
1409 a regular expression.
1409 a regular expression.
1410
1410
1411 fn is an optional function that will be applied to the replacement text
1411 fn is an optional function that will be applied to the replacement text
1412 just before replacement.
1412 just before replacement.
1413 """
1413 """
1414 fn = fn or (lambda s: s)
1414 fn = fn or (lambda s: s)
1415 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1415 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1416 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1416 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1417
1417
1418 def getport(port):
1418 def getport(port):
1419 """Return the port for a given network service.
1419 """Return the port for a given network service.
1420
1420
1421 If port is an integer, it's returned as is. If it's a string, it's
1421 If port is an integer, it's returned as is. If it's a string, it's
1422 looked up using socket.getservbyname(). If there's no matching
1422 looked up using socket.getservbyname(). If there's no matching
1423 service, util.Abort is raised.
1423 service, util.Abort is raised.
1424 """
1424 """
1425 try:
1425 try:
1426 return int(port)
1426 return int(port)
1427 except ValueError:
1427 except ValueError:
1428 pass
1428 pass
1429
1429
1430 try:
1430 try:
1431 return socket.getservbyname(port)
1431 return socket.getservbyname(port)
1432 except socket.error:
1432 except socket.error:
1433 raise Abort(_("no port number associated with service '%s'") % port)
1433 raise Abort(_("no port number associated with service '%s'") % port)
General Comments 0
You need to be logged in to leave comments. Login now