##// END OF EJS Templates
util: Fix date format for 12-hour time.
Carey Evans -
r9383:7116494c default
parent child Browse files
Show More
@@ -1,1284 +1,1284 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, random, textwrap
19 import os, stat, time, calendar, random, textwrap
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd):
42 # Setting bufsize to -1 lets the system decide the buffer size.
42 # Setting bufsize to -1 lets the system decide the buffer size.
43 # The default for bufsize is 0, meaning unbuffered. This leads to
43 # The default for bufsize is 0, meaning unbuffered. This leads to
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
44 # poor performance on Mac OS X: http://bugs.python.org/issue4194
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
45 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
46 close_fds=closefds,
46 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
48 return p.stdin, p.stdout
48 return p.stdin, p.stdout
49 def popen3(cmd):
49 def popen3(cmd):
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
50 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
51 close_fds=closefds,
51 close_fds=closefds,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
52 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
53 stderr=subprocess.PIPE)
53 stderr=subprocess.PIPE)
54 return p.stdin, p.stdout, p.stderr
54 return p.stdin, p.stdout, p.stderr
55
55
56 def version():
56 def version():
57 """Return version information if available."""
57 """Return version information if available."""
58 try:
58 try:
59 import __version__
59 import __version__
60 return __version__.version
60 return __version__.version
61 except ImportError:
61 except ImportError:
62 return 'unknown'
62 return 'unknown'
63
63
64 # used by parsedate
64 # used by parsedate
65 defaultdateformats = (
65 defaultdateformats = (
66 '%Y-%m-%d %H:%M:%S',
66 '%Y-%m-%d %H:%M:%S',
67 '%Y-%m-%d %I:%M:%S%p',
67 '%Y-%m-%d %I:%M:%S%p',
68 '%Y-%m-%d %H:%M',
68 '%Y-%m-%d %H:%M',
69 '%Y-%m-%d %I:%M%p',
69 '%Y-%m-%d %I:%M%p',
70 '%Y-%m-%d',
70 '%Y-%m-%d',
71 '%m-%d',
71 '%m-%d',
72 '%m/%d',
72 '%m/%d',
73 '%m/%d/%y',
73 '%m/%d/%y',
74 '%m/%d/%Y',
74 '%m/%d/%Y',
75 '%a %b %d %H:%M:%S %Y',
75 '%a %b %d %H:%M:%S %Y',
76 '%a %b %d %I:%M:%S%p %Y',
76 '%a %b %d %I:%M:%S%p %Y',
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
77 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
78 '%b %d %H:%M:%S %Y',
78 '%b %d %H:%M:%S %Y',
79 '%b %d %I:%M:%S%p %Y',
79 '%b %d %I:%M:%S%p %Y',
80 '%b %d %H:%M:%S',
80 '%b %d %H:%M:%S',
81 '%b %d %I:%M:%S%p',
81 '%b %d %I:%M:%S%p',
82 '%b %d %H:%M',
82 '%b %d %H:%M',
83 '%b %d %I:%M%p',
83 '%b %d %I:%M%p',
84 '%b %d %Y',
84 '%b %d %Y',
85 '%b %d',
85 '%b %d',
86 '%H:%M:%S',
86 '%H:%M:%S',
87 '%I:%M:%SP',
87 '%I:%M:%S%p',
88 '%H:%M',
88 '%H:%M',
89 '%I:%M%p',
89 '%I:%M%p',
90 )
90 )
91
91
92 extendeddateformats = defaultdateformats + (
92 extendeddateformats = defaultdateformats + (
93 "%Y",
93 "%Y",
94 "%Y-%m",
94 "%Y-%m",
95 "%b",
95 "%b",
96 "%b %Y",
96 "%b %Y",
97 )
97 )
98
98
99 def cachefunc(func):
99 def cachefunc(func):
100 '''cache the result of function calls'''
100 '''cache the result of function calls'''
101 # XXX doesn't handle keywords args
101 # XXX doesn't handle keywords args
102 cache = {}
102 cache = {}
103 if func.func_code.co_argcount == 1:
103 if func.func_code.co_argcount == 1:
104 # we gain a small amount of time because
104 # we gain a small amount of time because
105 # we don't need to pack/unpack the list
105 # we don't need to pack/unpack the list
106 def f(arg):
106 def f(arg):
107 if arg not in cache:
107 if arg not in cache:
108 cache[arg] = func(arg)
108 cache[arg] = func(arg)
109 return cache[arg]
109 return cache[arg]
110 else:
110 else:
111 def f(*args):
111 def f(*args):
112 if args not in cache:
112 if args not in cache:
113 cache[args] = func(*args)
113 cache[args] = func(*args)
114 return cache[args]
114 return cache[args]
115
115
116 return f
116 return f
117
117
118 def lrucachefunc(func):
118 def lrucachefunc(func):
119 '''cache most recent results of function calls'''
119 '''cache most recent results of function calls'''
120 cache = {}
120 cache = {}
121 order = []
121 order = []
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 def f(arg):
123 def f(arg):
124 if arg not in cache:
124 if arg not in cache:
125 if len(cache) > 20:
125 if len(cache) > 20:
126 del cache[order.pop(0)]
126 del cache[order.pop(0)]
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 else:
128 else:
129 order.remove(arg)
129 order.remove(arg)
130 order.append(arg)
130 order.append(arg)
131 return cache[arg]
131 return cache[arg]
132 else:
132 else:
133 def f(*args):
133 def f(*args):
134 if args not in cache:
134 if args not in cache:
135 if len(cache) > 20:
135 if len(cache) > 20:
136 del cache[order.pop(0)]
136 del cache[order.pop(0)]
137 cache[args] = func(*args)
137 cache[args] = func(*args)
138 else:
138 else:
139 order.remove(args)
139 order.remove(args)
140 order.append(args)
140 order.append(args)
141 return cache[args]
141 return cache[args]
142
142
143 return f
143 return f
144
144
145 class propertycache(object):
145 class propertycache(object):
146 def __init__(self, func):
146 def __init__(self, func):
147 self.func = func
147 self.func = func
148 self.name = func.__name__
148 self.name = func.__name__
149 def __get__(self, obj, type=None):
149 def __get__(self, obj, type=None):
150 result = self.func(obj)
150 result = self.func(obj)
151 setattr(obj, self.name, result)
151 setattr(obj, self.name, result)
152 return result
152 return result
153
153
154 def pipefilter(s, cmd):
154 def pipefilter(s, cmd):
155 '''filter string S through command CMD, returning its output'''
155 '''filter string S through command CMD, returning its output'''
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
156 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
157 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
158 pout, perr = p.communicate(s)
158 pout, perr = p.communicate(s)
159 return pout
159 return pout
160
160
161 def tempfilter(s, cmd):
161 def tempfilter(s, cmd):
162 '''filter string S through a pair of temporary files with CMD.
162 '''filter string S through a pair of temporary files with CMD.
163 CMD is used as a template to create the real command to be run,
163 CMD is used as a template to create the real command to be run,
164 with the strings INFILE and OUTFILE replaced by the real names of
164 with the strings INFILE and OUTFILE replaced by the real names of
165 the temporary files generated.'''
165 the temporary files generated.'''
166 inname, outname = None, None
166 inname, outname = None, None
167 try:
167 try:
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
168 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
169 fp = os.fdopen(infd, 'wb')
169 fp = os.fdopen(infd, 'wb')
170 fp.write(s)
170 fp.write(s)
171 fp.close()
171 fp.close()
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
172 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
173 os.close(outfd)
173 os.close(outfd)
174 cmd = cmd.replace('INFILE', inname)
174 cmd = cmd.replace('INFILE', inname)
175 cmd = cmd.replace('OUTFILE', outname)
175 cmd = cmd.replace('OUTFILE', outname)
176 code = os.system(cmd)
176 code = os.system(cmd)
177 if sys.platform == 'OpenVMS' and code & 1:
177 if sys.platform == 'OpenVMS' and code & 1:
178 code = 0
178 code = 0
179 if code: raise Abort(_("command '%s' failed: %s") %
179 if code: raise Abort(_("command '%s' failed: %s") %
180 (cmd, explain_exit(code)))
180 (cmd, explain_exit(code)))
181 return open(outname, 'rb').read()
181 return open(outname, 'rb').read()
182 finally:
182 finally:
183 try:
183 try:
184 if inname: os.unlink(inname)
184 if inname: os.unlink(inname)
185 except: pass
185 except: pass
186 try:
186 try:
187 if outname: os.unlink(outname)
187 if outname: os.unlink(outname)
188 except: pass
188 except: pass
189
189
190 filtertable = {
190 filtertable = {
191 'tempfile:': tempfilter,
191 'tempfile:': tempfilter,
192 'pipe:': pipefilter,
192 'pipe:': pipefilter,
193 }
193 }
194
194
195 def filter(s, cmd):
195 def filter(s, cmd):
196 "filter a string through a command that transforms its input to its output"
196 "filter a string through a command that transforms its input to its output"
197 for name, fn in filtertable.iteritems():
197 for name, fn in filtertable.iteritems():
198 if cmd.startswith(name):
198 if cmd.startswith(name):
199 return fn(s, cmd[len(name):].lstrip())
199 return fn(s, cmd[len(name):].lstrip())
200 return pipefilter(s, cmd)
200 return pipefilter(s, cmd)
201
201
202 def binary(s):
202 def binary(s):
203 """return true if a string is binary data"""
203 """return true if a string is binary data"""
204 return bool(s and '\0' in s)
204 return bool(s and '\0' in s)
205
205
206 def increasingchunks(source, min=1024, max=65536):
206 def increasingchunks(source, min=1024, max=65536):
207 '''return no less than min bytes per chunk while data remains,
207 '''return no less than min bytes per chunk while data remains,
208 doubling min after each chunk until it reaches max'''
208 doubling min after each chunk until it reaches max'''
209 def log2(x):
209 def log2(x):
210 if not x:
210 if not x:
211 return 0
211 return 0
212 i = 0
212 i = 0
213 while x:
213 while x:
214 x >>= 1
214 x >>= 1
215 i += 1
215 i += 1
216 return i - 1
216 return i - 1
217
217
218 buf = []
218 buf = []
219 blen = 0
219 blen = 0
220 for chunk in source:
220 for chunk in source:
221 buf.append(chunk)
221 buf.append(chunk)
222 blen += len(chunk)
222 blen += len(chunk)
223 if blen >= min:
223 if blen >= min:
224 if min < max:
224 if min < max:
225 min = min << 1
225 min = min << 1
226 nmin = 1 << log2(blen)
226 nmin = 1 << log2(blen)
227 if nmin > min:
227 if nmin > min:
228 min = nmin
228 min = nmin
229 if min > max:
229 if min > max:
230 min = max
230 min = max
231 yield ''.join(buf)
231 yield ''.join(buf)
232 blen = 0
232 blen = 0
233 buf = []
233 buf = []
234 if buf:
234 if buf:
235 yield ''.join(buf)
235 yield ''.join(buf)
236
236
237 Abort = error.Abort
237 Abort = error.Abort
238
238
239 def always(fn): return True
239 def always(fn): return True
240 def never(fn): return False
240 def never(fn): return False
241
241
242 def pathto(root, n1, n2):
242 def pathto(root, n1, n2):
243 '''return the relative path from one place to another.
243 '''return the relative path from one place to another.
244 root should use os.sep to separate directories
244 root should use os.sep to separate directories
245 n1 should use os.sep to separate directories
245 n1 should use os.sep to separate directories
246 n2 should use "/" to separate directories
246 n2 should use "/" to separate directories
247 returns an os.sep-separated path.
247 returns an os.sep-separated path.
248
248
249 If n1 is a relative path, it's assumed it's
249 If n1 is a relative path, it's assumed it's
250 relative to root.
250 relative to root.
251 n2 should always be relative to root.
251 n2 should always be relative to root.
252 '''
252 '''
253 if not n1: return localpath(n2)
253 if not n1: return localpath(n2)
254 if os.path.isabs(n1):
254 if os.path.isabs(n1):
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
255 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
256 return os.path.join(root, localpath(n2))
256 return os.path.join(root, localpath(n2))
257 n2 = '/'.join((pconvert(root), n2))
257 n2 = '/'.join((pconvert(root), n2))
258 a, b = splitpath(n1), n2.split('/')
258 a, b = splitpath(n1), n2.split('/')
259 a.reverse()
259 a.reverse()
260 b.reverse()
260 b.reverse()
261 while a and b and a[-1] == b[-1]:
261 while a and b and a[-1] == b[-1]:
262 a.pop()
262 a.pop()
263 b.pop()
263 b.pop()
264 b.reverse()
264 b.reverse()
265 return os.sep.join((['..'] * len(a)) + b) or '.'
265 return os.sep.join((['..'] * len(a)) + b) or '.'
266
266
267 def canonpath(root, cwd, myname):
267 def canonpath(root, cwd, myname):
268 """return the canonical path of myname, given cwd and root"""
268 """return the canonical path of myname, given cwd and root"""
269 if root == os.sep:
269 if root == os.sep:
270 rootsep = os.sep
270 rootsep = os.sep
271 elif endswithsep(root):
271 elif endswithsep(root):
272 rootsep = root
272 rootsep = root
273 else:
273 else:
274 rootsep = root + os.sep
274 rootsep = root + os.sep
275 name = myname
275 name = myname
276 if not os.path.isabs(name):
276 if not os.path.isabs(name):
277 name = os.path.join(root, cwd, name)
277 name = os.path.join(root, cwd, name)
278 name = os.path.normpath(name)
278 name = os.path.normpath(name)
279 audit_path = path_auditor(root)
279 audit_path = path_auditor(root)
280 if name != rootsep and name.startswith(rootsep):
280 if name != rootsep and name.startswith(rootsep):
281 name = name[len(rootsep):]
281 name = name[len(rootsep):]
282 audit_path(name)
282 audit_path(name)
283 return pconvert(name)
283 return pconvert(name)
284 elif name == root:
284 elif name == root:
285 return ''
285 return ''
286 else:
286 else:
287 # Determine whether `name' is in the hierarchy at or beneath `root',
287 # Determine whether `name' is in the hierarchy at or beneath `root',
288 # by iterating name=dirname(name) until that causes no change (can't
288 # by iterating name=dirname(name) until that causes no change (can't
289 # check name == '/', because that doesn't work on windows). For each
289 # check name == '/', because that doesn't work on windows). For each
290 # `name', compare dev/inode numbers. If they match, the list `rel'
290 # `name', compare dev/inode numbers. If they match, the list `rel'
291 # holds the reversed list of components making up the relative file
291 # holds the reversed list of components making up the relative file
292 # name we want.
292 # name we want.
293 root_st = os.stat(root)
293 root_st = os.stat(root)
294 rel = []
294 rel = []
295 while True:
295 while True:
296 try:
296 try:
297 name_st = os.stat(name)
297 name_st = os.stat(name)
298 except OSError:
298 except OSError:
299 break
299 break
300 if samestat(name_st, root_st):
300 if samestat(name_st, root_st):
301 if not rel:
301 if not rel:
302 # name was actually the same as root (maybe a symlink)
302 # name was actually the same as root (maybe a symlink)
303 return ''
303 return ''
304 rel.reverse()
304 rel.reverse()
305 name = os.path.join(*rel)
305 name = os.path.join(*rel)
306 audit_path(name)
306 audit_path(name)
307 return pconvert(name)
307 return pconvert(name)
308 dirname, basename = os.path.split(name)
308 dirname, basename = os.path.split(name)
309 rel.append(basename)
309 rel.append(basename)
310 if dirname == name:
310 if dirname == name:
311 break
311 break
312 name = dirname
312 name = dirname
313
313
314 raise Abort('%s not under root' % myname)
314 raise Abort('%s not under root' % myname)
315
315
316 _hgexecutable = None
316 _hgexecutable = None
317
317
318 def main_is_frozen():
318 def main_is_frozen():
319 """return True if we are a frozen executable.
319 """return True if we are a frozen executable.
320
320
321 The code supports py2exe (most common, Windows only) and tools/freeze
321 The code supports py2exe (most common, Windows only) and tools/freeze
322 (portable, not much used).
322 (portable, not much used).
323 """
323 """
324 return (hasattr(sys, "frozen") or # new py2exe
324 return (hasattr(sys, "frozen") or # new py2exe
325 hasattr(sys, "importers") or # old py2exe
325 hasattr(sys, "importers") or # old py2exe
326 imp.is_frozen("__main__")) # tools/freeze
326 imp.is_frozen("__main__")) # tools/freeze
327
327
328 def hgexecutable():
328 def hgexecutable():
329 """return location of the 'hg' executable.
329 """return location of the 'hg' executable.
330
330
331 Defaults to $HG or 'hg' in the search path.
331 Defaults to $HG or 'hg' in the search path.
332 """
332 """
333 if _hgexecutable is None:
333 if _hgexecutable is None:
334 hg = os.environ.get('HG')
334 hg = os.environ.get('HG')
335 if hg:
335 if hg:
336 set_hgexecutable(hg)
336 set_hgexecutable(hg)
337 elif main_is_frozen():
337 elif main_is_frozen():
338 set_hgexecutable(sys.executable)
338 set_hgexecutable(sys.executable)
339 else:
339 else:
340 set_hgexecutable(find_exe('hg') or 'hg')
340 set_hgexecutable(find_exe('hg') or 'hg')
341 return _hgexecutable
341 return _hgexecutable
342
342
343 def set_hgexecutable(path):
343 def set_hgexecutable(path):
344 """set location of the 'hg' executable"""
344 """set location of the 'hg' executable"""
345 global _hgexecutable
345 global _hgexecutable
346 _hgexecutable = path
346 _hgexecutable = path
347
347
348 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
348 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
349 '''enhanced shell command execution.
349 '''enhanced shell command execution.
350 run with environment maybe modified, maybe in different dir.
350 run with environment maybe modified, maybe in different dir.
351
351
352 if command fails and onerr is None, return status. if ui object,
352 if command fails and onerr is None, return status. if ui object,
353 print error message and return status, else raise onerr object as
353 print error message and return status, else raise onerr object as
354 exception.'''
354 exception.'''
355 def py2shell(val):
355 def py2shell(val):
356 'convert python object into string that is useful to shell'
356 'convert python object into string that is useful to shell'
357 if val is None or val is False:
357 if val is None or val is False:
358 return '0'
358 return '0'
359 if val is True:
359 if val is True:
360 return '1'
360 return '1'
361 return str(val)
361 return str(val)
362 oldenv = {}
362 oldenv = {}
363 for k in environ:
363 for k in environ:
364 oldenv[k] = os.environ.get(k)
364 oldenv[k] = os.environ.get(k)
365 if cwd is not None:
365 if cwd is not None:
366 oldcwd = os.getcwd()
366 oldcwd = os.getcwd()
367 origcmd = cmd
367 origcmd = cmd
368 if os.name == 'nt':
368 if os.name == 'nt':
369 cmd = '"%s"' % cmd
369 cmd = '"%s"' % cmd
370 try:
370 try:
371 for k, v in environ.iteritems():
371 for k, v in environ.iteritems():
372 os.environ[k] = py2shell(v)
372 os.environ[k] = py2shell(v)
373 os.environ['HG'] = hgexecutable()
373 os.environ['HG'] = hgexecutable()
374 if cwd is not None and oldcwd != cwd:
374 if cwd is not None and oldcwd != cwd:
375 os.chdir(cwd)
375 os.chdir(cwd)
376 rc = os.system(cmd)
376 rc = os.system(cmd)
377 if sys.platform == 'OpenVMS' and rc & 1:
377 if sys.platform == 'OpenVMS' and rc & 1:
378 rc = 0
378 rc = 0
379 if rc and onerr:
379 if rc and onerr:
380 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
380 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
381 explain_exit(rc)[0])
381 explain_exit(rc)[0])
382 if errprefix:
382 if errprefix:
383 errmsg = '%s: %s' % (errprefix, errmsg)
383 errmsg = '%s: %s' % (errprefix, errmsg)
384 try:
384 try:
385 onerr.warn(errmsg + '\n')
385 onerr.warn(errmsg + '\n')
386 except AttributeError:
386 except AttributeError:
387 raise onerr(errmsg)
387 raise onerr(errmsg)
388 return rc
388 return rc
389 finally:
389 finally:
390 for k, v in oldenv.iteritems():
390 for k, v in oldenv.iteritems():
391 if v is None:
391 if v is None:
392 del os.environ[k]
392 del os.environ[k]
393 else:
393 else:
394 os.environ[k] = v
394 os.environ[k] = v
395 if cwd is not None and oldcwd != cwd:
395 if cwd is not None and oldcwd != cwd:
396 os.chdir(oldcwd)
396 os.chdir(oldcwd)
397
397
398 def checksignature(func):
398 def checksignature(func):
399 '''wrap a function with code to check for calling errors'''
399 '''wrap a function with code to check for calling errors'''
400 def check(*args, **kwargs):
400 def check(*args, **kwargs):
401 try:
401 try:
402 return func(*args, **kwargs)
402 return func(*args, **kwargs)
403 except TypeError:
403 except TypeError:
404 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
404 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
405 raise error.SignatureError
405 raise error.SignatureError
406 raise
406 raise
407
407
408 return check
408 return check
409
409
410 # os.path.lexists is not available on python2.3
410 # os.path.lexists is not available on python2.3
411 def lexists(filename):
411 def lexists(filename):
412 "test whether a file with this name exists. does not follow symlinks"
412 "test whether a file with this name exists. does not follow symlinks"
413 try:
413 try:
414 os.lstat(filename)
414 os.lstat(filename)
415 except:
415 except:
416 return False
416 return False
417 return True
417 return True
418
418
419 def rename(src, dst):
419 def rename(src, dst):
420 """forcibly rename a file"""
420 """forcibly rename a file"""
421 try:
421 try:
422 os.rename(src, dst)
422 os.rename(src, dst)
423 except OSError, err: # FIXME: check err (EEXIST ?)
423 except OSError, err: # FIXME: check err (EEXIST ?)
424
424
425 # On windows, rename to existing file is not allowed, so we
425 # On windows, rename to existing file is not allowed, so we
426 # must delete destination first. But if a file is open, unlink
426 # must delete destination first. But if a file is open, unlink
427 # schedules it for delete but does not delete it. Rename
427 # schedules it for delete but does not delete it. Rename
428 # happens immediately even for open files, so we rename
428 # happens immediately even for open files, so we rename
429 # destination to a temporary name, then delete that. Then
429 # destination to a temporary name, then delete that. Then
430 # rename is safe to do.
430 # rename is safe to do.
431 # The temporary name is chosen at random to avoid the situation
431 # The temporary name is chosen at random to avoid the situation
432 # where a file is left lying around from a previous aborted run.
432 # where a file is left lying around from a previous aborted run.
433 # The usual race condition this introduces can't be avoided as
433 # The usual race condition this introduces can't be avoided as
434 # we need the name to rename into, and not the file itself. Due
434 # we need the name to rename into, and not the file itself. Due
435 # to the nature of the operation however, any races will at worst
435 # to the nature of the operation however, any races will at worst
436 # lead to the rename failing and the current operation aborting.
436 # lead to the rename failing and the current operation aborting.
437
437
438 def tempname(prefix):
438 def tempname(prefix):
439 for tries in xrange(10):
439 for tries in xrange(10):
440 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
440 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
441 if not os.path.exists(temp):
441 if not os.path.exists(temp):
442 return temp
442 return temp
443 raise IOError, (errno.EEXIST, "No usable temporary filename found")
443 raise IOError, (errno.EEXIST, "No usable temporary filename found")
444
444
445 temp = tempname(dst)
445 temp = tempname(dst)
446 os.rename(dst, temp)
446 os.rename(dst, temp)
447 os.unlink(temp)
447 os.unlink(temp)
448 os.rename(src, dst)
448 os.rename(src, dst)
449
449
450 def unlink(f):
450 def unlink(f):
451 """unlink and remove the directory if it is empty"""
451 """unlink and remove the directory if it is empty"""
452 os.unlink(f)
452 os.unlink(f)
453 # try removing directories that might now be empty
453 # try removing directories that might now be empty
454 try:
454 try:
455 os.removedirs(os.path.dirname(f))
455 os.removedirs(os.path.dirname(f))
456 except OSError:
456 except OSError:
457 pass
457 pass
458
458
459 def copyfile(src, dest):
459 def copyfile(src, dest):
460 "copy a file, preserving mode and atime/mtime"
460 "copy a file, preserving mode and atime/mtime"
461 if os.path.islink(src):
461 if os.path.islink(src):
462 try:
462 try:
463 os.unlink(dest)
463 os.unlink(dest)
464 except:
464 except:
465 pass
465 pass
466 os.symlink(os.readlink(src), dest)
466 os.symlink(os.readlink(src), dest)
467 else:
467 else:
468 try:
468 try:
469 shutil.copyfile(src, dest)
469 shutil.copyfile(src, dest)
470 shutil.copystat(src, dest)
470 shutil.copystat(src, dest)
471 except shutil.Error, inst:
471 except shutil.Error, inst:
472 raise Abort(str(inst))
472 raise Abort(str(inst))
473
473
474 def copyfiles(src, dst, hardlink=None):
474 def copyfiles(src, dst, hardlink=None):
475 """Copy a directory tree using hardlinks if possible"""
475 """Copy a directory tree using hardlinks if possible"""
476
476
477 if hardlink is None:
477 if hardlink is None:
478 hardlink = (os.stat(src).st_dev ==
478 hardlink = (os.stat(src).st_dev ==
479 os.stat(os.path.dirname(dst)).st_dev)
479 os.stat(os.path.dirname(dst)).st_dev)
480
480
481 if os.path.isdir(src):
481 if os.path.isdir(src):
482 os.mkdir(dst)
482 os.mkdir(dst)
483 for name, kind in osutil.listdir(src):
483 for name, kind in osutil.listdir(src):
484 srcname = os.path.join(src, name)
484 srcname = os.path.join(src, name)
485 dstname = os.path.join(dst, name)
485 dstname = os.path.join(dst, name)
486 copyfiles(srcname, dstname, hardlink)
486 copyfiles(srcname, dstname, hardlink)
487 else:
487 else:
488 if hardlink:
488 if hardlink:
489 try:
489 try:
490 os_link(src, dst)
490 os_link(src, dst)
491 except (IOError, OSError):
491 except (IOError, OSError):
492 hardlink = False
492 hardlink = False
493 shutil.copy(src, dst)
493 shutil.copy(src, dst)
494 else:
494 else:
495 shutil.copy(src, dst)
495 shutil.copy(src, dst)
496
496
497 class path_auditor(object):
497 class path_auditor(object):
498 '''ensure that a filesystem path contains no banned components.
498 '''ensure that a filesystem path contains no banned components.
499 the following properties of a path are checked:
499 the following properties of a path are checked:
500
500
501 - under top-level .hg
501 - under top-level .hg
502 - starts at the root of a windows drive
502 - starts at the root of a windows drive
503 - contains ".."
503 - contains ".."
504 - traverses a symlink (e.g. a/symlink_here/b)
504 - traverses a symlink (e.g. a/symlink_here/b)
505 - inside a nested repository'''
505 - inside a nested repository'''
506
506
507 def __init__(self, root):
507 def __init__(self, root):
508 self.audited = set()
508 self.audited = set()
509 self.auditeddir = set()
509 self.auditeddir = set()
510 self.root = root
510 self.root = root
511
511
512 def __call__(self, path):
512 def __call__(self, path):
513 if path in self.audited:
513 if path in self.audited:
514 return
514 return
515 normpath = os.path.normcase(path)
515 normpath = os.path.normcase(path)
516 parts = splitpath(normpath)
516 parts = splitpath(normpath)
517 if (os.path.splitdrive(path)[0]
517 if (os.path.splitdrive(path)[0]
518 or parts[0].lower() in ('.hg', '.hg.', '')
518 or parts[0].lower() in ('.hg', '.hg.', '')
519 or os.pardir in parts):
519 or os.pardir in parts):
520 raise Abort(_("path contains illegal component: %s") % path)
520 raise Abort(_("path contains illegal component: %s") % path)
521 if '.hg' in path.lower():
521 if '.hg' in path.lower():
522 lparts = [p.lower() for p in parts]
522 lparts = [p.lower() for p in parts]
523 for p in '.hg', '.hg.':
523 for p in '.hg', '.hg.':
524 if p in lparts[1:]:
524 if p in lparts[1:]:
525 pos = lparts.index(p)
525 pos = lparts.index(p)
526 base = os.path.join(*parts[:pos])
526 base = os.path.join(*parts[:pos])
527 raise Abort(_('path %r is inside repo %r') % (path, base))
527 raise Abort(_('path %r is inside repo %r') % (path, base))
528 def check(prefix):
528 def check(prefix):
529 curpath = os.path.join(self.root, prefix)
529 curpath = os.path.join(self.root, prefix)
530 try:
530 try:
531 st = os.lstat(curpath)
531 st = os.lstat(curpath)
532 except OSError, err:
532 except OSError, err:
533 # EINVAL can be raised as invalid path syntax under win32.
533 # EINVAL can be raised as invalid path syntax under win32.
534 # They must be ignored for patterns can be checked too.
534 # They must be ignored for patterns can be checked too.
535 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
535 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
536 raise
536 raise
537 else:
537 else:
538 if stat.S_ISLNK(st.st_mode):
538 if stat.S_ISLNK(st.st_mode):
539 raise Abort(_('path %r traverses symbolic link %r') %
539 raise Abort(_('path %r traverses symbolic link %r') %
540 (path, prefix))
540 (path, prefix))
541 elif (stat.S_ISDIR(st.st_mode) and
541 elif (stat.S_ISDIR(st.st_mode) and
542 os.path.isdir(os.path.join(curpath, '.hg'))):
542 os.path.isdir(os.path.join(curpath, '.hg'))):
543 raise Abort(_('path %r is inside repo %r') %
543 raise Abort(_('path %r is inside repo %r') %
544 (path, prefix))
544 (path, prefix))
545 parts.pop()
545 parts.pop()
546 prefixes = []
546 prefixes = []
547 while parts:
547 while parts:
548 prefix = os.sep.join(parts)
548 prefix = os.sep.join(parts)
549 if prefix in self.auditeddir:
549 if prefix in self.auditeddir:
550 break
550 break
551 check(prefix)
551 check(prefix)
552 prefixes.append(prefix)
552 prefixes.append(prefix)
553 parts.pop()
553 parts.pop()
554
554
555 self.audited.add(path)
555 self.audited.add(path)
556 # only add prefixes to the cache after checking everything: we don't
556 # only add prefixes to the cache after checking everything: we don't
557 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
557 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
558 self.auditeddir.update(prefixes)
558 self.auditeddir.update(prefixes)
559
559
560 def nlinks(pathname):
560 def nlinks(pathname):
561 """Return number of hardlinks for the given file."""
561 """Return number of hardlinks for the given file."""
562 return os.lstat(pathname).st_nlink
562 return os.lstat(pathname).st_nlink
563
563
564 if hasattr(os, 'link'):
564 if hasattr(os, 'link'):
565 os_link = os.link
565 os_link = os.link
566 else:
566 else:
567 def os_link(src, dst):
567 def os_link(src, dst):
568 raise OSError(0, _("Hardlinks not supported"))
568 raise OSError(0, _("Hardlinks not supported"))
569
569
570 def lookup_reg(key, name=None, scope=None):
570 def lookup_reg(key, name=None, scope=None):
571 return None
571 return None
572
572
573 if os.name == 'nt':
573 if os.name == 'nt':
574 from windows import *
574 from windows import *
575 else:
575 else:
576 from posix import *
576 from posix import *
577
577
578 def makelock(info, pathname):
578 def makelock(info, pathname):
579 try:
579 try:
580 return os.symlink(info, pathname)
580 return os.symlink(info, pathname)
581 except OSError, why:
581 except OSError, why:
582 if why.errno == errno.EEXIST:
582 if why.errno == errno.EEXIST:
583 raise
583 raise
584 except AttributeError: # no symlink in os
584 except AttributeError: # no symlink in os
585 pass
585 pass
586
586
587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
587 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
588 os.write(ld, info)
588 os.write(ld, info)
589 os.close(ld)
589 os.close(ld)
590
590
591 def readlock(pathname):
591 def readlock(pathname):
592 try:
592 try:
593 return os.readlink(pathname)
593 return os.readlink(pathname)
594 except OSError, why:
594 except OSError, why:
595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
595 if why.errno not in (errno.EINVAL, errno.ENOSYS):
596 raise
596 raise
597 except AttributeError: # no symlink in os
597 except AttributeError: # no symlink in os
598 pass
598 pass
599 return posixfile(pathname).read()
599 return posixfile(pathname).read()
600
600
601 def fstat(fp):
601 def fstat(fp):
602 '''stat file object that may not have fileno method.'''
602 '''stat file object that may not have fileno method.'''
603 try:
603 try:
604 return os.fstat(fp.fileno())
604 return os.fstat(fp.fileno())
605 except AttributeError:
605 except AttributeError:
606 return os.stat(fp.name)
606 return os.stat(fp.name)
607
607
608 # File system features
608 # File system features
609
609
610 def checkcase(path):
610 def checkcase(path):
611 """
611 """
612 Check whether the given path is on a case-sensitive filesystem
612 Check whether the given path is on a case-sensitive filesystem
613
613
614 Requires a path (like /foo/.hg) ending with a foldable final
614 Requires a path (like /foo/.hg) ending with a foldable final
615 directory component.
615 directory component.
616 """
616 """
617 s1 = os.stat(path)
617 s1 = os.stat(path)
618 d, b = os.path.split(path)
618 d, b = os.path.split(path)
619 p2 = os.path.join(d, b.upper())
619 p2 = os.path.join(d, b.upper())
620 if path == p2:
620 if path == p2:
621 p2 = os.path.join(d, b.lower())
621 p2 = os.path.join(d, b.lower())
622 try:
622 try:
623 s2 = os.stat(p2)
623 s2 = os.stat(p2)
624 if s2 == s1:
624 if s2 == s1:
625 return False
625 return False
626 return True
626 return True
627 except:
627 except:
628 return True
628 return True
629
629
630 _fspathcache = {}
630 _fspathcache = {}
631 def fspath(name, root):
631 def fspath(name, root):
632 '''Get name in the case stored in the filesystem
632 '''Get name in the case stored in the filesystem
633
633
634 The name is either relative to root, or it is an absolute path starting
634 The name is either relative to root, or it is an absolute path starting
635 with root. Note that this function is unnecessary, and should not be
635 with root. Note that this function is unnecessary, and should not be
636 called, for case-sensitive filesystems (simply because it's expensive).
636 called, for case-sensitive filesystems (simply because it's expensive).
637 '''
637 '''
638 # If name is absolute, make it relative
638 # If name is absolute, make it relative
639 if name.lower().startswith(root.lower()):
639 if name.lower().startswith(root.lower()):
640 l = len(root)
640 l = len(root)
641 if name[l] == os.sep or name[l] == os.altsep:
641 if name[l] == os.sep or name[l] == os.altsep:
642 l = l + 1
642 l = l + 1
643 name = name[l:]
643 name = name[l:]
644
644
645 if not os.path.exists(os.path.join(root, name)):
645 if not os.path.exists(os.path.join(root, name)):
646 return None
646 return None
647
647
648 seps = os.sep
648 seps = os.sep
649 if os.altsep:
649 if os.altsep:
650 seps = seps + os.altsep
650 seps = seps + os.altsep
651 # Protect backslashes. This gets silly very quickly.
651 # Protect backslashes. This gets silly very quickly.
652 seps.replace('\\','\\\\')
652 seps.replace('\\','\\\\')
653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
653 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
654 dir = os.path.normcase(os.path.normpath(root))
654 dir = os.path.normcase(os.path.normpath(root))
655 result = []
655 result = []
656 for part, sep in pattern.findall(name):
656 for part, sep in pattern.findall(name):
657 if sep:
657 if sep:
658 result.append(sep)
658 result.append(sep)
659 continue
659 continue
660
660
661 if dir not in _fspathcache:
661 if dir not in _fspathcache:
662 _fspathcache[dir] = os.listdir(dir)
662 _fspathcache[dir] = os.listdir(dir)
663 contents = _fspathcache[dir]
663 contents = _fspathcache[dir]
664
664
665 lpart = part.lower()
665 lpart = part.lower()
666 for n in contents:
666 for n in contents:
667 if n.lower() == lpart:
667 if n.lower() == lpart:
668 result.append(n)
668 result.append(n)
669 break
669 break
670 else:
670 else:
671 # Cannot happen, as the file exists!
671 # Cannot happen, as the file exists!
672 result.append(part)
672 result.append(part)
673 dir = os.path.join(dir, lpart)
673 dir = os.path.join(dir, lpart)
674
674
675 return ''.join(result)
675 return ''.join(result)
676
676
677 def checkexec(path):
677 def checkexec(path):
678 """
678 """
679 Check whether the given path is on a filesystem with UNIX-like exec flags
679 Check whether the given path is on a filesystem with UNIX-like exec flags
680
680
681 Requires a directory (like /foo/.hg)
681 Requires a directory (like /foo/.hg)
682 """
682 """
683
683
684 # VFAT on some Linux versions can flip mode but it doesn't persist
684 # VFAT on some Linux versions can flip mode but it doesn't persist
685 # a FS remount. Frequently we can detect it if files are created
685 # a FS remount. Frequently we can detect it if files are created
686 # with exec bit on.
686 # with exec bit on.
687
687
688 try:
688 try:
689 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
689 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
690 fh, fn = tempfile.mkstemp("", "", path)
690 fh, fn = tempfile.mkstemp("", "", path)
691 try:
691 try:
692 os.close(fh)
692 os.close(fh)
693 m = os.stat(fn).st_mode & 0777
693 m = os.stat(fn).st_mode & 0777
694 new_file_has_exec = m & EXECFLAGS
694 new_file_has_exec = m & EXECFLAGS
695 os.chmod(fn, m ^ EXECFLAGS)
695 os.chmod(fn, m ^ EXECFLAGS)
696 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
696 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
697 finally:
697 finally:
698 os.unlink(fn)
698 os.unlink(fn)
699 except (IOError, OSError):
699 except (IOError, OSError):
700 # we don't care, the user probably won't be able to commit anyway
700 # we don't care, the user probably won't be able to commit anyway
701 return False
701 return False
702 return not (new_file_has_exec or exec_flags_cannot_flip)
702 return not (new_file_has_exec or exec_flags_cannot_flip)
703
703
704 def checklink(path):
704 def checklink(path):
705 """check whether the given path is on a symlink-capable filesystem"""
705 """check whether the given path is on a symlink-capable filesystem"""
706 # mktemp is not racy because symlink creation will fail if the
706 # mktemp is not racy because symlink creation will fail if the
707 # file already exists
707 # file already exists
708 name = tempfile.mktemp(dir=path)
708 name = tempfile.mktemp(dir=path)
709 try:
709 try:
710 os.symlink(".", name)
710 os.symlink(".", name)
711 os.unlink(name)
711 os.unlink(name)
712 return True
712 return True
713 except (OSError, AttributeError):
713 except (OSError, AttributeError):
714 return False
714 return False
715
715
716 def needbinarypatch():
716 def needbinarypatch():
717 """return True if patches should be applied in binary mode by default."""
717 """return True if patches should be applied in binary mode by default."""
718 return os.name == 'nt'
718 return os.name == 'nt'
719
719
720 def endswithsep(path):
720 def endswithsep(path):
721 '''Check path ends with os.sep or os.altsep.'''
721 '''Check path ends with os.sep or os.altsep.'''
722 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
722 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
723
723
724 def splitpath(path):
724 def splitpath(path):
725 '''Split path by os.sep.
725 '''Split path by os.sep.
726 Note that this function does not use os.altsep because this is
726 Note that this function does not use os.altsep because this is
727 an alternative of simple "xxx.split(os.sep)".
727 an alternative of simple "xxx.split(os.sep)".
728 It is recommended to use os.path.normpath() before using this
728 It is recommended to use os.path.normpath() before using this
729 function if need.'''
729 function if need.'''
730 return path.split(os.sep)
730 return path.split(os.sep)
731
731
732 def gui():
732 def gui():
733 '''Are we running in a GUI?'''
733 '''Are we running in a GUI?'''
734 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
734 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
735
735
736 def mktempcopy(name, emptyok=False, createmode=None):
736 def mktempcopy(name, emptyok=False, createmode=None):
737 """Create a temporary file with the same contents from name
737 """Create a temporary file with the same contents from name
738
738
739 The permission bits are copied from the original file.
739 The permission bits are copied from the original file.
740
740
741 If the temporary file is going to be truncated immediately, you
741 If the temporary file is going to be truncated immediately, you
742 can use emptyok=True as an optimization.
742 can use emptyok=True as an optimization.
743
743
744 Returns the name of the temporary file.
744 Returns the name of the temporary file.
745 """
745 """
746 d, fn = os.path.split(name)
746 d, fn = os.path.split(name)
747 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
747 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
748 os.close(fd)
748 os.close(fd)
749 # Temporary files are created with mode 0600, which is usually not
749 # Temporary files are created with mode 0600, which is usually not
750 # what we want. If the original file already exists, just copy
750 # what we want. If the original file already exists, just copy
751 # its mode. Otherwise, manually obey umask.
751 # its mode. Otherwise, manually obey umask.
752 try:
752 try:
753 st_mode = os.lstat(name).st_mode & 0777
753 st_mode = os.lstat(name).st_mode & 0777
754 except OSError, inst:
754 except OSError, inst:
755 if inst.errno != errno.ENOENT:
755 if inst.errno != errno.ENOENT:
756 raise
756 raise
757 st_mode = createmode
757 st_mode = createmode
758 if st_mode is None:
758 if st_mode is None:
759 st_mode = ~umask
759 st_mode = ~umask
760 st_mode &= 0666
760 st_mode &= 0666
761 os.chmod(temp, st_mode)
761 os.chmod(temp, st_mode)
762 if emptyok:
762 if emptyok:
763 return temp
763 return temp
764 try:
764 try:
765 try:
765 try:
766 ifp = posixfile(name, "rb")
766 ifp = posixfile(name, "rb")
767 except IOError, inst:
767 except IOError, inst:
768 if inst.errno == errno.ENOENT:
768 if inst.errno == errno.ENOENT:
769 return temp
769 return temp
770 if not getattr(inst, 'filename', None):
770 if not getattr(inst, 'filename', None):
771 inst.filename = name
771 inst.filename = name
772 raise
772 raise
773 ofp = posixfile(temp, "wb")
773 ofp = posixfile(temp, "wb")
774 for chunk in filechunkiter(ifp):
774 for chunk in filechunkiter(ifp):
775 ofp.write(chunk)
775 ofp.write(chunk)
776 ifp.close()
776 ifp.close()
777 ofp.close()
777 ofp.close()
778 except:
778 except:
779 try: os.unlink(temp)
779 try: os.unlink(temp)
780 except: pass
780 except: pass
781 raise
781 raise
782 return temp
782 return temp
783
783
784 class atomictempfile(object):
784 class atomictempfile(object):
785 """file-like object that atomically updates a file
785 """file-like object that atomically updates a file
786
786
787 All writes will be redirected to a temporary copy of the original
787 All writes will be redirected to a temporary copy of the original
788 file. When rename is called, the copy is renamed to the original
788 file. When rename is called, the copy is renamed to the original
789 name, making the changes visible.
789 name, making the changes visible.
790 """
790 """
791 def __init__(self, name, mode, createmode):
791 def __init__(self, name, mode, createmode):
792 self.__name = name
792 self.__name = name
793 self._fp = None
793 self._fp = None
794 self.temp = mktempcopy(name, emptyok=('w' in mode),
794 self.temp = mktempcopy(name, emptyok=('w' in mode),
795 createmode=createmode)
795 createmode=createmode)
796 self._fp = posixfile(self.temp, mode)
796 self._fp = posixfile(self.temp, mode)
797
797
798 def __getattr__(self, name):
798 def __getattr__(self, name):
799 return getattr(self._fp, name)
799 return getattr(self._fp, name)
800
800
801 def rename(self):
801 def rename(self):
802 if not self._fp.closed:
802 if not self._fp.closed:
803 self._fp.close()
803 self._fp.close()
804 rename(self.temp, localpath(self.__name))
804 rename(self.temp, localpath(self.__name))
805
805
806 def __del__(self):
806 def __del__(self):
807 if not self._fp:
807 if not self._fp:
808 return
808 return
809 if not self._fp.closed:
809 if not self._fp.closed:
810 try:
810 try:
811 os.unlink(self.temp)
811 os.unlink(self.temp)
812 except: pass
812 except: pass
813 self._fp.close()
813 self._fp.close()
814
814
815 def makedirs(name, mode=None):
815 def makedirs(name, mode=None):
816 """recursive directory creation with parent mode inheritance"""
816 """recursive directory creation with parent mode inheritance"""
817 try:
817 try:
818 os.mkdir(name)
818 os.mkdir(name)
819 if mode is not None:
819 if mode is not None:
820 os.chmod(name, mode)
820 os.chmod(name, mode)
821 return
821 return
822 except OSError, err:
822 except OSError, err:
823 if err.errno == errno.EEXIST:
823 if err.errno == errno.EEXIST:
824 return
824 return
825 if err.errno != errno.ENOENT:
825 if err.errno != errno.ENOENT:
826 raise
826 raise
827 parent = os.path.abspath(os.path.dirname(name))
827 parent = os.path.abspath(os.path.dirname(name))
828 makedirs(parent, mode)
828 makedirs(parent, mode)
829 makedirs(name, mode)
829 makedirs(name, mode)
830
830
831 class opener(object):
831 class opener(object):
832 """Open files relative to a base directory
832 """Open files relative to a base directory
833
833
834 This class is used to hide the details of COW semantics and
834 This class is used to hide the details of COW semantics and
835 remote file access from higher level code.
835 remote file access from higher level code.
836 """
836 """
837 def __init__(self, base, audit=True):
837 def __init__(self, base, audit=True):
838 self.base = base
838 self.base = base
839 if audit:
839 if audit:
840 self.audit_path = path_auditor(base)
840 self.audit_path = path_auditor(base)
841 else:
841 else:
842 self.audit_path = always
842 self.audit_path = always
843 self.createmode = None
843 self.createmode = None
844
844
845 def __getattr__(self, name):
845 def __getattr__(self, name):
846 if name == '_can_symlink':
846 if name == '_can_symlink':
847 self._can_symlink = checklink(self.base)
847 self._can_symlink = checklink(self.base)
848 return self._can_symlink
848 return self._can_symlink
849 raise AttributeError(name)
849 raise AttributeError(name)
850
850
851 def _fixfilemode(self, name):
851 def _fixfilemode(self, name):
852 if self.createmode is None:
852 if self.createmode is None:
853 return
853 return
854 os.chmod(name, self.createmode & 0666)
854 os.chmod(name, self.createmode & 0666)
855
855
856 def __call__(self, path, mode="r", text=False, atomictemp=False):
856 def __call__(self, path, mode="r", text=False, atomictemp=False):
857 self.audit_path(path)
857 self.audit_path(path)
858 f = os.path.join(self.base, path)
858 f = os.path.join(self.base, path)
859
859
860 if not text and "b" not in mode:
860 if not text and "b" not in mode:
861 mode += "b" # for that other OS
861 mode += "b" # for that other OS
862
862
863 nlink = -1
863 nlink = -1
864 if mode not in ("r", "rb"):
864 if mode not in ("r", "rb"):
865 try:
865 try:
866 nlink = nlinks(f)
866 nlink = nlinks(f)
867 except OSError:
867 except OSError:
868 nlink = 0
868 nlink = 0
869 d = os.path.dirname(f)
869 d = os.path.dirname(f)
870 if not os.path.isdir(d):
870 if not os.path.isdir(d):
871 makedirs(d, self.createmode)
871 makedirs(d, self.createmode)
872 if atomictemp:
872 if atomictemp:
873 return atomictempfile(f, mode, self.createmode)
873 return atomictempfile(f, mode, self.createmode)
874 if nlink > 1:
874 if nlink > 1:
875 rename(mktempcopy(f), f)
875 rename(mktempcopy(f), f)
876 fp = posixfile(f, mode)
876 fp = posixfile(f, mode)
877 if nlink == 0:
877 if nlink == 0:
878 self._fixfilemode(f)
878 self._fixfilemode(f)
879 return fp
879 return fp
880
880
881 def symlink(self, src, dst):
881 def symlink(self, src, dst):
882 self.audit_path(dst)
882 self.audit_path(dst)
883 linkname = os.path.join(self.base, dst)
883 linkname = os.path.join(self.base, dst)
884 try:
884 try:
885 os.unlink(linkname)
885 os.unlink(linkname)
886 except OSError:
886 except OSError:
887 pass
887 pass
888
888
889 dirname = os.path.dirname(linkname)
889 dirname = os.path.dirname(linkname)
890 if not os.path.exists(dirname):
890 if not os.path.exists(dirname):
891 makedirs(dirname, self.createmode)
891 makedirs(dirname, self.createmode)
892
892
893 if self._can_symlink:
893 if self._can_symlink:
894 try:
894 try:
895 os.symlink(src, linkname)
895 os.symlink(src, linkname)
896 except OSError, err:
896 except OSError, err:
897 raise OSError(err.errno, _('could not symlink to %r: %s') %
897 raise OSError(err.errno, _('could not symlink to %r: %s') %
898 (src, err.strerror), linkname)
898 (src, err.strerror), linkname)
899 else:
899 else:
900 f = self(dst, "w")
900 f = self(dst, "w")
901 f.write(src)
901 f.write(src)
902 f.close()
902 f.close()
903 self._fixfilemode(dst)
903 self._fixfilemode(dst)
904
904
905 class chunkbuffer(object):
905 class chunkbuffer(object):
906 """Allow arbitrary sized chunks of data to be efficiently read from an
906 """Allow arbitrary sized chunks of data to be efficiently read from an
907 iterator over chunks of arbitrary size."""
907 iterator over chunks of arbitrary size."""
908
908
909 def __init__(self, in_iter):
909 def __init__(self, in_iter):
910 """in_iter is the iterator that's iterating over the input chunks.
910 """in_iter is the iterator that's iterating over the input chunks.
911 targetsize is how big a buffer to try to maintain."""
911 targetsize is how big a buffer to try to maintain."""
912 self.iter = iter(in_iter)
912 self.iter = iter(in_iter)
913 self.buf = ''
913 self.buf = ''
914 self.targetsize = 2**16
914 self.targetsize = 2**16
915
915
916 def read(self, l):
916 def read(self, l):
917 """Read L bytes of data from the iterator of chunks of data.
917 """Read L bytes of data from the iterator of chunks of data.
918 Returns less than L bytes if the iterator runs dry."""
918 Returns less than L bytes if the iterator runs dry."""
919 if l > len(self.buf) and self.iter:
919 if l > len(self.buf) and self.iter:
920 # Clamp to a multiple of self.targetsize
920 # Clamp to a multiple of self.targetsize
921 targetsize = max(l, self.targetsize)
921 targetsize = max(l, self.targetsize)
922 collector = cStringIO.StringIO()
922 collector = cStringIO.StringIO()
923 collector.write(self.buf)
923 collector.write(self.buf)
924 collected = len(self.buf)
924 collected = len(self.buf)
925 for chunk in self.iter:
925 for chunk in self.iter:
926 collector.write(chunk)
926 collector.write(chunk)
927 collected += len(chunk)
927 collected += len(chunk)
928 if collected >= targetsize:
928 if collected >= targetsize:
929 break
929 break
930 if collected < targetsize:
930 if collected < targetsize:
931 self.iter = False
931 self.iter = False
932 self.buf = collector.getvalue()
932 self.buf = collector.getvalue()
933 if len(self.buf) == l:
933 if len(self.buf) == l:
934 s, self.buf = str(self.buf), ''
934 s, self.buf = str(self.buf), ''
935 else:
935 else:
936 s, self.buf = self.buf[:l], buffer(self.buf, l)
936 s, self.buf = self.buf[:l], buffer(self.buf, l)
937 return s
937 return s
938
938
939 def filechunkiter(f, size=65536, limit=None):
939 def filechunkiter(f, size=65536, limit=None):
940 """Create a generator that produces the data in the file size
940 """Create a generator that produces the data in the file size
941 (default 65536) bytes at a time, up to optional limit (default is
941 (default 65536) bytes at a time, up to optional limit (default is
942 to read all data). Chunks may be less than size bytes if the
942 to read all data). Chunks may be less than size bytes if the
943 chunk is the last chunk in the file, or the file is a socket or
943 chunk is the last chunk in the file, or the file is a socket or
944 some other type of file that sometimes reads less data than is
944 some other type of file that sometimes reads less data than is
945 requested."""
945 requested."""
946 assert size >= 0
946 assert size >= 0
947 assert limit is None or limit >= 0
947 assert limit is None or limit >= 0
948 while True:
948 while True:
949 if limit is None: nbytes = size
949 if limit is None: nbytes = size
950 else: nbytes = min(limit, size)
950 else: nbytes = min(limit, size)
951 s = nbytes and f.read(nbytes)
951 s = nbytes and f.read(nbytes)
952 if not s: break
952 if not s: break
953 if limit: limit -= len(s)
953 if limit: limit -= len(s)
954 yield s
954 yield s
955
955
956 def makedate():
956 def makedate():
957 lt = time.localtime()
957 lt = time.localtime()
958 if lt[8] == 1 and time.daylight:
958 if lt[8] == 1 and time.daylight:
959 tz = time.altzone
959 tz = time.altzone
960 else:
960 else:
961 tz = time.timezone
961 tz = time.timezone
962 return time.mktime(lt), tz
962 return time.mktime(lt), tz
963
963
964 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
964 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
965 """represent a (unixtime, offset) tuple as a localized time.
965 """represent a (unixtime, offset) tuple as a localized time.
966 unixtime is seconds since the epoch, and offset is the time zone's
966 unixtime is seconds since the epoch, and offset is the time zone's
967 number of seconds away from UTC. if timezone is false, do not
967 number of seconds away from UTC. if timezone is false, do not
968 append time zone to string."""
968 append time zone to string."""
969 t, tz = date or makedate()
969 t, tz = date or makedate()
970 if "%1" in format or "%2" in format:
970 if "%1" in format or "%2" in format:
971 sign = (tz > 0) and "-" or "+"
971 sign = (tz > 0) and "-" or "+"
972 minutes = abs(tz) / 60
972 minutes = abs(tz) / 60
973 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
973 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
974 format = format.replace("%2", "%02d" % (minutes % 60))
974 format = format.replace("%2", "%02d" % (minutes % 60))
975 s = time.strftime(format, time.gmtime(float(t) - tz))
975 s = time.strftime(format, time.gmtime(float(t) - tz))
976 return s
976 return s
977
977
978 def shortdate(date=None):
978 def shortdate(date=None):
979 """turn (timestamp, tzoff) tuple into iso 8631 date."""
979 """turn (timestamp, tzoff) tuple into iso 8631 date."""
980 return datestr(date, format='%Y-%m-%d')
980 return datestr(date, format='%Y-%m-%d')
981
981
982 def strdate(string, format, defaults=[]):
982 def strdate(string, format, defaults=[]):
983 """parse a localized time string and return a (unixtime, offset) tuple.
983 """parse a localized time string and return a (unixtime, offset) tuple.
984 if the string cannot be parsed, ValueError is raised."""
984 if the string cannot be parsed, ValueError is raised."""
985 def timezone(string):
985 def timezone(string):
986 tz = string.split()[-1]
986 tz = string.split()[-1]
987 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
987 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
988 sign = (tz[0] == "+") and 1 or -1
988 sign = (tz[0] == "+") and 1 or -1
989 hours = int(tz[1:3])
989 hours = int(tz[1:3])
990 minutes = int(tz[3:5])
990 minutes = int(tz[3:5])
991 return -sign * (hours * 60 + minutes) * 60
991 return -sign * (hours * 60 + minutes) * 60
992 if tz == "GMT" or tz == "UTC":
992 if tz == "GMT" or tz == "UTC":
993 return 0
993 return 0
994 return None
994 return None
995
995
996 # NOTE: unixtime = localunixtime + offset
996 # NOTE: unixtime = localunixtime + offset
997 offset, date = timezone(string), string
997 offset, date = timezone(string), string
998 if offset != None:
998 if offset != None:
999 date = " ".join(string.split()[:-1])
999 date = " ".join(string.split()[:-1])
1000
1000
1001 # add missing elements from defaults
1001 # add missing elements from defaults
1002 for part in defaults:
1002 for part in defaults:
1003 found = [True for p in part if ("%"+p) in format]
1003 found = [True for p in part if ("%"+p) in format]
1004 if not found:
1004 if not found:
1005 date += "@" + defaults[part]
1005 date += "@" + defaults[part]
1006 format += "@%" + part[0]
1006 format += "@%" + part[0]
1007
1007
1008 timetuple = time.strptime(date, format)
1008 timetuple = time.strptime(date, format)
1009 localunixtime = int(calendar.timegm(timetuple))
1009 localunixtime = int(calendar.timegm(timetuple))
1010 if offset is None:
1010 if offset is None:
1011 # local timezone
1011 # local timezone
1012 unixtime = int(time.mktime(timetuple))
1012 unixtime = int(time.mktime(timetuple))
1013 offset = unixtime - localunixtime
1013 offset = unixtime - localunixtime
1014 else:
1014 else:
1015 unixtime = localunixtime + offset
1015 unixtime = localunixtime + offset
1016 return unixtime, offset
1016 return unixtime, offset
1017
1017
1018 def parsedate(date, formats=None, defaults=None):
1018 def parsedate(date, formats=None, defaults=None):
1019 """parse a localized date/time string and return a (unixtime, offset) tuple.
1019 """parse a localized date/time string and return a (unixtime, offset) tuple.
1020
1020
1021 The date may be a "unixtime offset" string or in one of the specified
1021 The date may be a "unixtime offset" string or in one of the specified
1022 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1022 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1023 """
1023 """
1024 if not date:
1024 if not date:
1025 return 0, 0
1025 return 0, 0
1026 if isinstance(date, tuple) and len(date) == 2:
1026 if isinstance(date, tuple) and len(date) == 2:
1027 return date
1027 return date
1028 if not formats:
1028 if not formats:
1029 formats = defaultdateformats
1029 formats = defaultdateformats
1030 date = date.strip()
1030 date = date.strip()
1031 try:
1031 try:
1032 when, offset = map(int, date.split(' '))
1032 when, offset = map(int, date.split(' '))
1033 except ValueError:
1033 except ValueError:
1034 # fill out defaults
1034 # fill out defaults
1035 if not defaults:
1035 if not defaults:
1036 defaults = {}
1036 defaults = {}
1037 now = makedate()
1037 now = makedate()
1038 for part in "d mb yY HI M S".split():
1038 for part in "d mb yY HI M S".split():
1039 if part not in defaults:
1039 if part not in defaults:
1040 if part[0] in "HMS":
1040 if part[0] in "HMS":
1041 defaults[part] = "00"
1041 defaults[part] = "00"
1042 else:
1042 else:
1043 defaults[part] = datestr(now, "%" + part[0])
1043 defaults[part] = datestr(now, "%" + part[0])
1044
1044
1045 for format in formats:
1045 for format in formats:
1046 try:
1046 try:
1047 when, offset = strdate(date, format, defaults)
1047 when, offset = strdate(date, format, defaults)
1048 except (ValueError, OverflowError):
1048 except (ValueError, OverflowError):
1049 pass
1049 pass
1050 else:
1050 else:
1051 break
1051 break
1052 else:
1052 else:
1053 raise Abort(_('invalid date: %r ') % date)
1053 raise Abort(_('invalid date: %r ') % date)
1054 # validate explicit (probably user-specified) date and
1054 # validate explicit (probably user-specified) date and
1055 # time zone offset. values must fit in signed 32 bits for
1055 # time zone offset. values must fit in signed 32 bits for
1056 # current 32-bit linux runtimes. timezones go from UTC-12
1056 # current 32-bit linux runtimes. timezones go from UTC-12
1057 # to UTC+14
1057 # to UTC+14
1058 if abs(when) > 0x7fffffff:
1058 if abs(when) > 0x7fffffff:
1059 raise Abort(_('date exceeds 32 bits: %d') % when)
1059 raise Abort(_('date exceeds 32 bits: %d') % when)
1060 if offset < -50400 or offset > 43200:
1060 if offset < -50400 or offset > 43200:
1061 raise Abort(_('impossible time zone offset: %d') % offset)
1061 raise Abort(_('impossible time zone offset: %d') % offset)
1062 return when, offset
1062 return when, offset
1063
1063
1064 def matchdate(date):
1064 def matchdate(date):
1065 """Return a function that matches a given date match specifier
1065 """Return a function that matches a given date match specifier
1066
1066
1067 Formats include:
1067 Formats include:
1068
1068
1069 '{date}' match a given date to the accuracy provided
1069 '{date}' match a given date to the accuracy provided
1070
1070
1071 '<{date}' on or before a given date
1071 '<{date}' on or before a given date
1072
1072
1073 '>{date}' on or after a given date
1073 '>{date}' on or after a given date
1074
1074
1075 """
1075 """
1076
1076
1077 def lower(date):
1077 def lower(date):
1078 d = dict(mb="1", d="1")
1078 d = dict(mb="1", d="1")
1079 return parsedate(date, extendeddateformats, d)[0]
1079 return parsedate(date, extendeddateformats, d)[0]
1080
1080
1081 def upper(date):
1081 def upper(date):
1082 d = dict(mb="12", HI="23", M="59", S="59")
1082 d = dict(mb="12", HI="23", M="59", S="59")
1083 for days in "31 30 29".split():
1083 for days in "31 30 29".split():
1084 try:
1084 try:
1085 d["d"] = days
1085 d["d"] = days
1086 return parsedate(date, extendeddateformats, d)[0]
1086 return parsedate(date, extendeddateformats, d)[0]
1087 except:
1087 except:
1088 pass
1088 pass
1089 d["d"] = "28"
1089 d["d"] = "28"
1090 return parsedate(date, extendeddateformats, d)[0]
1090 return parsedate(date, extendeddateformats, d)[0]
1091
1091
1092 date = date.strip()
1092 date = date.strip()
1093 if date[0] == "<":
1093 if date[0] == "<":
1094 when = upper(date[1:])
1094 when = upper(date[1:])
1095 return lambda x: x <= when
1095 return lambda x: x <= when
1096 elif date[0] == ">":
1096 elif date[0] == ">":
1097 when = lower(date[1:])
1097 when = lower(date[1:])
1098 return lambda x: x >= when
1098 return lambda x: x >= when
1099 elif date[0] == "-":
1099 elif date[0] == "-":
1100 try:
1100 try:
1101 days = int(date[1:])
1101 days = int(date[1:])
1102 except ValueError:
1102 except ValueError:
1103 raise Abort(_("invalid day spec: %s") % date[1:])
1103 raise Abort(_("invalid day spec: %s") % date[1:])
1104 when = makedate()[0] - days * 3600 * 24
1104 when = makedate()[0] - days * 3600 * 24
1105 return lambda x: x >= when
1105 return lambda x: x >= when
1106 elif " to " in date:
1106 elif " to " in date:
1107 a, b = date.split(" to ")
1107 a, b = date.split(" to ")
1108 start, stop = lower(a), upper(b)
1108 start, stop = lower(a), upper(b)
1109 return lambda x: x >= start and x <= stop
1109 return lambda x: x >= start and x <= stop
1110 else:
1110 else:
1111 start, stop = lower(date), upper(date)
1111 start, stop = lower(date), upper(date)
1112 return lambda x: x >= start and x <= stop
1112 return lambda x: x >= start and x <= stop
1113
1113
1114 def shortuser(user):
1114 def shortuser(user):
1115 """Return a short representation of a user name or email address."""
1115 """Return a short representation of a user name or email address."""
1116 f = user.find('@')
1116 f = user.find('@')
1117 if f >= 0:
1117 if f >= 0:
1118 user = user[:f]
1118 user = user[:f]
1119 f = user.find('<')
1119 f = user.find('<')
1120 if f >= 0:
1120 if f >= 0:
1121 user = user[f+1:]
1121 user = user[f+1:]
1122 f = user.find(' ')
1122 f = user.find(' ')
1123 if f >= 0:
1123 if f >= 0:
1124 user = user[:f]
1124 user = user[:f]
1125 f = user.find('.')
1125 f = user.find('.')
1126 if f >= 0:
1126 if f >= 0:
1127 user = user[:f]
1127 user = user[:f]
1128 return user
1128 return user
1129
1129
1130 def email(author):
1130 def email(author):
1131 '''get email of author.'''
1131 '''get email of author.'''
1132 r = author.find('>')
1132 r = author.find('>')
1133 if r == -1: r = None
1133 if r == -1: r = None
1134 return author[author.find('<')+1:r]
1134 return author[author.find('<')+1:r]
1135
1135
1136 def ellipsis(text, maxlength=400):
1136 def ellipsis(text, maxlength=400):
1137 """Trim string to at most maxlength (default: 400) characters."""
1137 """Trim string to at most maxlength (default: 400) characters."""
1138 if len(text) <= maxlength:
1138 if len(text) <= maxlength:
1139 return text
1139 return text
1140 else:
1140 else:
1141 return "%s..." % (text[:maxlength-3])
1141 return "%s..." % (text[:maxlength-3])
1142
1142
1143 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1143 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1144 '''yield every hg repository under path, recursively.'''
1144 '''yield every hg repository under path, recursively.'''
1145 def errhandler(err):
1145 def errhandler(err):
1146 if err.filename == path:
1146 if err.filename == path:
1147 raise err
1147 raise err
1148 if followsym and hasattr(os.path, 'samestat'):
1148 if followsym and hasattr(os.path, 'samestat'):
1149 def _add_dir_if_not_there(dirlst, dirname):
1149 def _add_dir_if_not_there(dirlst, dirname):
1150 match = False
1150 match = False
1151 samestat = os.path.samestat
1151 samestat = os.path.samestat
1152 dirstat = os.stat(dirname)
1152 dirstat = os.stat(dirname)
1153 for lstdirstat in dirlst:
1153 for lstdirstat in dirlst:
1154 if samestat(dirstat, lstdirstat):
1154 if samestat(dirstat, lstdirstat):
1155 match = True
1155 match = True
1156 break
1156 break
1157 if not match:
1157 if not match:
1158 dirlst.append(dirstat)
1158 dirlst.append(dirstat)
1159 return not match
1159 return not match
1160 else:
1160 else:
1161 followsym = False
1161 followsym = False
1162
1162
1163 if (seen_dirs is None) and followsym:
1163 if (seen_dirs is None) and followsym:
1164 seen_dirs = []
1164 seen_dirs = []
1165 _add_dir_if_not_there(seen_dirs, path)
1165 _add_dir_if_not_there(seen_dirs, path)
1166 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1166 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1167 if '.hg' in dirs:
1167 if '.hg' in dirs:
1168 yield root # found a repository
1168 yield root # found a repository
1169 qroot = os.path.join(root, '.hg', 'patches')
1169 qroot = os.path.join(root, '.hg', 'patches')
1170 if os.path.isdir(os.path.join(qroot, '.hg')):
1170 if os.path.isdir(os.path.join(qroot, '.hg')):
1171 yield qroot # we have a patch queue repo here
1171 yield qroot # we have a patch queue repo here
1172 if recurse:
1172 if recurse:
1173 # avoid recursing inside the .hg directory
1173 # avoid recursing inside the .hg directory
1174 dirs.remove('.hg')
1174 dirs.remove('.hg')
1175 else:
1175 else:
1176 dirs[:] = [] # don't descend further
1176 dirs[:] = [] # don't descend further
1177 elif followsym:
1177 elif followsym:
1178 newdirs = []
1178 newdirs = []
1179 for d in dirs:
1179 for d in dirs:
1180 fname = os.path.join(root, d)
1180 fname = os.path.join(root, d)
1181 if _add_dir_if_not_there(seen_dirs, fname):
1181 if _add_dir_if_not_there(seen_dirs, fname):
1182 if os.path.islink(fname):
1182 if os.path.islink(fname):
1183 for hgname in walkrepos(fname, True, seen_dirs):
1183 for hgname in walkrepos(fname, True, seen_dirs):
1184 yield hgname
1184 yield hgname
1185 else:
1185 else:
1186 newdirs.append(d)
1186 newdirs.append(d)
1187 dirs[:] = newdirs
1187 dirs[:] = newdirs
1188
1188
1189 _rcpath = None
1189 _rcpath = None
1190
1190
1191 def os_rcpath():
1191 def os_rcpath():
1192 '''return default os-specific hgrc search path'''
1192 '''return default os-specific hgrc search path'''
1193 path = system_rcpath()
1193 path = system_rcpath()
1194 path.extend(user_rcpath())
1194 path.extend(user_rcpath())
1195 path = [os.path.normpath(f) for f in path]
1195 path = [os.path.normpath(f) for f in path]
1196 return path
1196 return path
1197
1197
1198 def rcpath():
1198 def rcpath():
1199 '''return hgrc search path. if env var HGRCPATH is set, use it.
1199 '''return hgrc search path. if env var HGRCPATH is set, use it.
1200 for each item in path, if directory, use files ending in .rc,
1200 for each item in path, if directory, use files ending in .rc,
1201 else use item.
1201 else use item.
1202 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1202 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1203 if no HGRCPATH, use default os-specific path.'''
1203 if no HGRCPATH, use default os-specific path.'''
1204 global _rcpath
1204 global _rcpath
1205 if _rcpath is None:
1205 if _rcpath is None:
1206 if 'HGRCPATH' in os.environ:
1206 if 'HGRCPATH' in os.environ:
1207 _rcpath = []
1207 _rcpath = []
1208 for p in os.environ['HGRCPATH'].split(os.pathsep):
1208 for p in os.environ['HGRCPATH'].split(os.pathsep):
1209 if not p: continue
1209 if not p: continue
1210 if os.path.isdir(p):
1210 if os.path.isdir(p):
1211 for f, kind in osutil.listdir(p):
1211 for f, kind in osutil.listdir(p):
1212 if f.endswith('.rc'):
1212 if f.endswith('.rc'):
1213 _rcpath.append(os.path.join(p, f))
1213 _rcpath.append(os.path.join(p, f))
1214 else:
1214 else:
1215 _rcpath.append(p)
1215 _rcpath.append(p)
1216 else:
1216 else:
1217 _rcpath = os_rcpath()
1217 _rcpath = os_rcpath()
1218 return _rcpath
1218 return _rcpath
1219
1219
1220 def bytecount(nbytes):
1220 def bytecount(nbytes):
1221 '''return byte count formatted as readable string, with units'''
1221 '''return byte count formatted as readable string, with units'''
1222
1222
1223 units = (
1223 units = (
1224 (100, 1<<30, _('%.0f GB')),
1224 (100, 1<<30, _('%.0f GB')),
1225 (10, 1<<30, _('%.1f GB')),
1225 (10, 1<<30, _('%.1f GB')),
1226 (1, 1<<30, _('%.2f GB')),
1226 (1, 1<<30, _('%.2f GB')),
1227 (100, 1<<20, _('%.0f MB')),
1227 (100, 1<<20, _('%.0f MB')),
1228 (10, 1<<20, _('%.1f MB')),
1228 (10, 1<<20, _('%.1f MB')),
1229 (1, 1<<20, _('%.2f MB')),
1229 (1, 1<<20, _('%.2f MB')),
1230 (100, 1<<10, _('%.0f KB')),
1230 (100, 1<<10, _('%.0f KB')),
1231 (10, 1<<10, _('%.1f KB')),
1231 (10, 1<<10, _('%.1f KB')),
1232 (1, 1<<10, _('%.2f KB')),
1232 (1, 1<<10, _('%.2f KB')),
1233 (1, 1, _('%.0f bytes')),
1233 (1, 1, _('%.0f bytes')),
1234 )
1234 )
1235
1235
1236 for multiplier, divisor, format in units:
1236 for multiplier, divisor, format in units:
1237 if nbytes >= divisor * multiplier:
1237 if nbytes >= divisor * multiplier:
1238 return format % (nbytes / float(divisor))
1238 return format % (nbytes / float(divisor))
1239 return units[-1][2] % nbytes
1239 return units[-1][2] % nbytes
1240
1240
1241 def drop_scheme(scheme, path):
1241 def drop_scheme(scheme, path):
1242 sc = scheme + ':'
1242 sc = scheme + ':'
1243 if path.startswith(sc):
1243 if path.startswith(sc):
1244 path = path[len(sc):]
1244 path = path[len(sc):]
1245 if path.startswith('//'):
1245 if path.startswith('//'):
1246 path = path[2:]
1246 path = path[2:]
1247 return path
1247 return path
1248
1248
1249 def uirepr(s):
1249 def uirepr(s):
1250 # Avoid double backslash in Windows path repr()
1250 # Avoid double backslash in Windows path repr()
1251 return repr(s).replace('\\\\', '\\')
1251 return repr(s).replace('\\\\', '\\')
1252
1252
1253 def termwidth():
1253 def termwidth():
1254 if 'COLUMNS' in os.environ:
1254 if 'COLUMNS' in os.environ:
1255 try:
1255 try:
1256 return int(os.environ['COLUMNS'])
1256 return int(os.environ['COLUMNS'])
1257 except ValueError:
1257 except ValueError:
1258 pass
1258 pass
1259 try:
1259 try:
1260 import termios, array, fcntl
1260 import termios, array, fcntl
1261 for dev in (sys.stdout, sys.stdin):
1261 for dev in (sys.stdout, sys.stdin):
1262 try:
1262 try:
1263 try:
1263 try:
1264 fd = dev.fileno()
1264 fd = dev.fileno()
1265 except AttributeError:
1265 except AttributeError:
1266 continue
1266 continue
1267 if not os.isatty(fd):
1267 if not os.isatty(fd):
1268 continue
1268 continue
1269 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1269 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1270 return array.array('h', arri)[1]
1270 return array.array('h', arri)[1]
1271 except ValueError:
1271 except ValueError:
1272 pass
1272 pass
1273 except ImportError:
1273 except ImportError:
1274 pass
1274 pass
1275 return 80
1275 return 80
1276
1276
1277 def wrap(line, hangindent, width=78):
1277 def wrap(line, hangindent, width=78):
1278 padding = '\n' + ' ' * hangindent
1278 padding = '\n' + ' ' * hangindent
1279 return padding.join(textwrap.wrap(line, width=width - hangindent))
1279 return padding.join(textwrap.wrap(line, width=width - hangindent))
1280
1280
1281 def iterlines(iterator):
1281 def iterlines(iterator):
1282 for chunk in iterator:
1282 for chunk in iterator:
1283 for line in chunk.splitlines():
1283 for line in chunk.splitlines():
1284 yield line
1284 yield line
General Comments 0
You need to be logged in to leave comments. Login now