##// END OF EJS Templates
util: kill configparser wrapper
Matt Mackall -
r8181:03d93882 default
parent child Browse files
Show More
@@ -1,1491 +1,1474 b''
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import os, stat, threading, time, calendar, ConfigParser, glob, osutil
17 import os, stat, threading, time, calendar, glob, osutil
18 import imp
18 import imp
19
19
20 # Python compatibility
20 # Python compatibility
21
21
22 _md5 = None
22 _md5 = None
23 def md5(s):
23 def md5(s):
24 global _md5
24 global _md5
25 if _md5 is None:
25 if _md5 is None:
26 try:
26 try:
27 import hashlib
27 import hashlib
28 _md5 = hashlib.md5
28 _md5 = hashlib.md5
29 except ImportError:
29 except ImportError:
30 import md5
30 import md5
31 _md5 = md5.md5
31 _md5 = md5.md5
32 return _md5(s)
32 return _md5(s)
33
33
34 _sha1 = None
34 _sha1 = None
35 def sha1(s):
35 def sha1(s):
36 global _sha1
36 global _sha1
37 if _sha1 is None:
37 if _sha1 is None:
38 try:
38 try:
39 import hashlib
39 import hashlib
40 _sha1 = hashlib.sha1
40 _sha1 = hashlib.sha1
41 except ImportError:
41 except ImportError:
42 import sha
42 import sha
43 _sha1 = sha.sha
43 _sha1 = sha.sha
44 return _sha1(s)
44 return _sha1(s)
45
45
46 try:
46 try:
47 import subprocess
47 import subprocess
48 subprocess.Popen # trigger ImportError early
48 subprocess.Popen # trigger ImportError early
49 closefds = os.name == 'posix'
49 closefds = os.name == 'posix'
50 def popen2(cmd, mode='t', bufsize=-1):
50 def popen2(cmd, mode='t', bufsize=-1):
51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
51 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
52 close_fds=closefds,
52 close_fds=closefds,
53 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
53 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
54 return p.stdin, p.stdout
54 return p.stdin, p.stdout
55 def popen3(cmd, mode='t', bufsize=-1):
55 def popen3(cmd, mode='t', bufsize=-1):
56 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
56 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
57 close_fds=closefds,
57 close_fds=closefds,
58 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
58 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
59 stderr=subprocess.PIPE)
59 stderr=subprocess.PIPE)
60 return p.stdin, p.stdout, p.stderr
60 return p.stdin, p.stdout, p.stderr
61 def Popen3(cmd, capturestderr=False, bufsize=-1):
61 def Popen3(cmd, capturestderr=False, bufsize=-1):
62 stderr = capturestderr and subprocess.PIPE or None
62 stderr = capturestderr and subprocess.PIPE or None
63 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
64 close_fds=closefds,
64 close_fds=closefds,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
66 stderr=stderr)
66 stderr=stderr)
67 p.fromchild = p.stdout
67 p.fromchild = p.stdout
68 p.tochild = p.stdin
68 p.tochild = p.stdin
69 p.childerr = p.stderr
69 p.childerr = p.stderr
70 return p
70 return p
71 except ImportError:
71 except ImportError:
72 subprocess = None
72 subprocess = None
73 from popen2 import Popen3
73 from popen2 import Popen3
74 popen2 = os.popen2
74 popen2 = os.popen2
75 popen3 = os.popen3
75 popen3 = os.popen3
76
76
77
77
78 def version():
78 def version():
79 """Return version information if available."""
79 """Return version information if available."""
80 try:
80 try:
81 import __version__
81 import __version__
82 return __version__.version
82 return __version__.version
83 except ImportError:
83 except ImportError:
84 return 'unknown'
84 return 'unknown'
85
85
86 # used by parsedate
86 # used by parsedate
87 defaultdateformats = (
87 defaultdateformats = (
88 '%Y-%m-%d %H:%M:%S',
88 '%Y-%m-%d %H:%M:%S',
89 '%Y-%m-%d %I:%M:%S%p',
89 '%Y-%m-%d %I:%M:%S%p',
90 '%Y-%m-%d %H:%M',
90 '%Y-%m-%d %H:%M',
91 '%Y-%m-%d %I:%M%p',
91 '%Y-%m-%d %I:%M%p',
92 '%Y-%m-%d',
92 '%Y-%m-%d',
93 '%m-%d',
93 '%m-%d',
94 '%m/%d',
94 '%m/%d',
95 '%m/%d/%y',
95 '%m/%d/%y',
96 '%m/%d/%Y',
96 '%m/%d/%Y',
97 '%a %b %d %H:%M:%S %Y',
97 '%a %b %d %H:%M:%S %Y',
98 '%a %b %d %I:%M:%S%p %Y',
98 '%a %b %d %I:%M:%S%p %Y',
99 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
99 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
100 '%b %d %H:%M:%S %Y',
100 '%b %d %H:%M:%S %Y',
101 '%b %d %I:%M:%S%p %Y',
101 '%b %d %I:%M:%S%p %Y',
102 '%b %d %H:%M:%S',
102 '%b %d %H:%M:%S',
103 '%b %d %I:%M:%S%p',
103 '%b %d %I:%M:%S%p',
104 '%b %d %H:%M',
104 '%b %d %H:%M',
105 '%b %d %I:%M%p',
105 '%b %d %I:%M%p',
106 '%b %d %Y',
106 '%b %d %Y',
107 '%b %d',
107 '%b %d',
108 '%H:%M:%S',
108 '%H:%M:%S',
109 '%I:%M:%SP',
109 '%I:%M:%SP',
110 '%H:%M',
110 '%H:%M',
111 '%I:%M%p',
111 '%I:%M%p',
112 )
112 )
113
113
114 extendeddateformats = defaultdateformats + (
114 extendeddateformats = defaultdateformats + (
115 "%Y",
115 "%Y",
116 "%Y-%m",
116 "%Y-%m",
117 "%b",
117 "%b",
118 "%b %Y",
118 "%b %Y",
119 )
119 )
120
120
121 # differences from SafeConfigParser:
122 # - case-sensitive keys
123 # - allows values that are not strings (this means that you may not
124 # be able to save the configuration to a file)
125 class configparser(ConfigParser.SafeConfigParser):
126 def optionxform(self, optionstr):
127 return optionstr
128
129 def set(self, section, option, value):
130 return ConfigParser.ConfigParser.set(self, section, option, value)
131
132 def _interpolate(self, section, option, rawval, vars):
133 if not isinstance(rawval, basestring):
134 return rawval
135 return ConfigParser.SafeConfigParser._interpolate(self, section,
136 option, rawval, vars)
137
138 def cachefunc(func):
121 def cachefunc(func):
139 '''cache the result of function calls'''
122 '''cache the result of function calls'''
140 # XXX doesn't handle keywords args
123 # XXX doesn't handle keywords args
141 cache = {}
124 cache = {}
142 if func.func_code.co_argcount == 1:
125 if func.func_code.co_argcount == 1:
143 # we gain a small amount of time because
126 # we gain a small amount of time because
144 # we don't need to pack/unpack the list
127 # we don't need to pack/unpack the list
145 def f(arg):
128 def f(arg):
146 if arg not in cache:
129 if arg not in cache:
147 cache[arg] = func(arg)
130 cache[arg] = func(arg)
148 return cache[arg]
131 return cache[arg]
149 else:
132 else:
150 def f(*args):
133 def f(*args):
151 if args not in cache:
134 if args not in cache:
152 cache[args] = func(*args)
135 cache[args] = func(*args)
153 return cache[args]
136 return cache[args]
154
137
155 return f
138 return f
156
139
157 def pipefilter(s, cmd):
140 def pipefilter(s, cmd):
158 '''filter string S through command CMD, returning its output'''
141 '''filter string S through command CMD, returning its output'''
159 (pin, pout) = popen2(cmd, 'b')
142 (pin, pout) = popen2(cmd, 'b')
160 def writer():
143 def writer():
161 try:
144 try:
162 pin.write(s)
145 pin.write(s)
163 pin.close()
146 pin.close()
164 except IOError, inst:
147 except IOError, inst:
165 if inst.errno != errno.EPIPE:
148 if inst.errno != errno.EPIPE:
166 raise
149 raise
167
150
168 # we should use select instead on UNIX, but this will work on most
151 # we should use select instead on UNIX, but this will work on most
169 # systems, including Windows
152 # systems, including Windows
170 w = threading.Thread(target=writer)
153 w = threading.Thread(target=writer)
171 w.start()
154 w.start()
172 f = pout.read()
155 f = pout.read()
173 pout.close()
156 pout.close()
174 w.join()
157 w.join()
175 return f
158 return f
176
159
177 def tempfilter(s, cmd):
160 def tempfilter(s, cmd):
178 '''filter string S through a pair of temporary files with CMD.
161 '''filter string S through a pair of temporary files with CMD.
179 CMD is used as a template to create the real command to be run,
162 CMD is used as a template to create the real command to be run,
180 with the strings INFILE and OUTFILE replaced by the real names of
163 with the strings INFILE and OUTFILE replaced by the real names of
181 the temporary files generated.'''
164 the temporary files generated.'''
182 inname, outname = None, None
165 inname, outname = None, None
183 try:
166 try:
184 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
167 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
185 fp = os.fdopen(infd, 'wb')
168 fp = os.fdopen(infd, 'wb')
186 fp.write(s)
169 fp.write(s)
187 fp.close()
170 fp.close()
188 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
171 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
189 os.close(outfd)
172 os.close(outfd)
190 cmd = cmd.replace('INFILE', inname)
173 cmd = cmd.replace('INFILE', inname)
191 cmd = cmd.replace('OUTFILE', outname)
174 cmd = cmd.replace('OUTFILE', outname)
192 code = os.system(cmd)
175 code = os.system(cmd)
193 if sys.platform == 'OpenVMS' and code & 1:
176 if sys.platform == 'OpenVMS' and code & 1:
194 code = 0
177 code = 0
195 if code: raise Abort(_("command '%s' failed: %s") %
178 if code: raise Abort(_("command '%s' failed: %s") %
196 (cmd, explain_exit(code)))
179 (cmd, explain_exit(code)))
197 return open(outname, 'rb').read()
180 return open(outname, 'rb').read()
198 finally:
181 finally:
199 try:
182 try:
200 if inname: os.unlink(inname)
183 if inname: os.unlink(inname)
201 except: pass
184 except: pass
202 try:
185 try:
203 if outname: os.unlink(outname)
186 if outname: os.unlink(outname)
204 except: pass
187 except: pass
205
188
206 filtertable = {
189 filtertable = {
207 'tempfile:': tempfilter,
190 'tempfile:': tempfilter,
208 'pipe:': pipefilter,
191 'pipe:': pipefilter,
209 }
192 }
210
193
211 def filter(s, cmd):
194 def filter(s, cmd):
212 "filter a string through a command that transforms its input to its output"
195 "filter a string through a command that transforms its input to its output"
213 for name, fn in filtertable.iteritems():
196 for name, fn in filtertable.iteritems():
214 if cmd.startswith(name):
197 if cmd.startswith(name):
215 return fn(s, cmd[len(name):].lstrip())
198 return fn(s, cmd[len(name):].lstrip())
216 return pipefilter(s, cmd)
199 return pipefilter(s, cmd)
217
200
218 def binary(s):
201 def binary(s):
219 """return true if a string is binary data"""
202 """return true if a string is binary data"""
220 return bool(s and '\0' in s)
203 return bool(s and '\0' in s)
221
204
222 def sort(l):
205 def sort(l):
223 if not isinstance(l, list):
206 if not isinstance(l, list):
224 l = list(l)
207 l = list(l)
225 l.sort()
208 l.sort()
226 return l
209 return l
227
210
228 def increasingchunks(source, min=1024, max=65536):
211 def increasingchunks(source, min=1024, max=65536):
229 '''return no less than min bytes per chunk while data remains,
212 '''return no less than min bytes per chunk while data remains,
230 doubling min after each chunk until it reaches max'''
213 doubling min after each chunk until it reaches max'''
231 def log2(x):
214 def log2(x):
232 if not x:
215 if not x:
233 return 0
216 return 0
234 i = 0
217 i = 0
235 while x:
218 while x:
236 x >>= 1
219 x >>= 1
237 i += 1
220 i += 1
238 return i - 1
221 return i - 1
239
222
240 buf = []
223 buf = []
241 blen = 0
224 blen = 0
242 for chunk in source:
225 for chunk in source:
243 buf.append(chunk)
226 buf.append(chunk)
244 blen += len(chunk)
227 blen += len(chunk)
245 if blen >= min:
228 if blen >= min:
246 if min < max:
229 if min < max:
247 min = min << 1
230 min = min << 1
248 nmin = 1 << log2(blen)
231 nmin = 1 << log2(blen)
249 if nmin > min:
232 if nmin > min:
250 min = nmin
233 min = nmin
251 if min > max:
234 if min > max:
252 min = max
235 min = max
253 yield ''.join(buf)
236 yield ''.join(buf)
254 blen = 0
237 blen = 0
255 buf = []
238 buf = []
256 if buf:
239 if buf:
257 yield ''.join(buf)
240 yield ''.join(buf)
258
241
259 Abort = error.Abort
242 Abort = error.Abort
260
243
261 def always(fn): return True
244 def always(fn): return True
262 def never(fn): return False
245 def never(fn): return False
263
246
264 def patkind(name, default):
247 def patkind(name, default):
265 """Split a string into an optional pattern kind prefix and the
248 """Split a string into an optional pattern kind prefix and the
266 actual pattern."""
249 actual pattern."""
267 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
250 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
268 if name.startswith(prefix + ':'): return name.split(':', 1)
251 if name.startswith(prefix + ':'): return name.split(':', 1)
269 return default, name
252 return default, name
270
253
271 def globre(pat, head='^', tail='$'):
254 def globre(pat, head='^', tail='$'):
272 "convert a glob pattern into a regexp"
255 "convert a glob pattern into a regexp"
273 i, n = 0, len(pat)
256 i, n = 0, len(pat)
274 res = ''
257 res = ''
275 group = 0
258 group = 0
276 def peek(): return i < n and pat[i]
259 def peek(): return i < n and pat[i]
277 while i < n:
260 while i < n:
278 c = pat[i]
261 c = pat[i]
279 i = i+1
262 i = i+1
280 if c == '*':
263 if c == '*':
281 if peek() == '*':
264 if peek() == '*':
282 i += 1
265 i += 1
283 res += '.*'
266 res += '.*'
284 else:
267 else:
285 res += '[^/]*'
268 res += '[^/]*'
286 elif c == '?':
269 elif c == '?':
287 res += '.'
270 res += '.'
288 elif c == '[':
271 elif c == '[':
289 j = i
272 j = i
290 if j < n and pat[j] in '!]':
273 if j < n and pat[j] in '!]':
291 j += 1
274 j += 1
292 while j < n and pat[j] != ']':
275 while j < n and pat[j] != ']':
293 j += 1
276 j += 1
294 if j >= n:
277 if j >= n:
295 res += '\\['
278 res += '\\['
296 else:
279 else:
297 stuff = pat[i:j].replace('\\','\\\\')
280 stuff = pat[i:j].replace('\\','\\\\')
298 i = j + 1
281 i = j + 1
299 if stuff[0] == '!':
282 if stuff[0] == '!':
300 stuff = '^' + stuff[1:]
283 stuff = '^' + stuff[1:]
301 elif stuff[0] == '^':
284 elif stuff[0] == '^':
302 stuff = '\\' + stuff
285 stuff = '\\' + stuff
303 res = '%s[%s]' % (res, stuff)
286 res = '%s[%s]' % (res, stuff)
304 elif c == '{':
287 elif c == '{':
305 group += 1
288 group += 1
306 res += '(?:'
289 res += '(?:'
307 elif c == '}' and group:
290 elif c == '}' and group:
308 res += ')'
291 res += ')'
309 group -= 1
292 group -= 1
310 elif c == ',' and group:
293 elif c == ',' and group:
311 res += '|'
294 res += '|'
312 elif c == '\\':
295 elif c == '\\':
313 p = peek()
296 p = peek()
314 if p:
297 if p:
315 i += 1
298 i += 1
316 res += re.escape(p)
299 res += re.escape(p)
317 else:
300 else:
318 res += re.escape(c)
301 res += re.escape(c)
319 else:
302 else:
320 res += re.escape(c)
303 res += re.escape(c)
321 return head + res + tail
304 return head + res + tail
322
305
323 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
306 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
324
307
325 def pathto(root, n1, n2):
308 def pathto(root, n1, n2):
326 '''return the relative path from one place to another.
309 '''return the relative path from one place to another.
327 root should use os.sep to separate directories
310 root should use os.sep to separate directories
328 n1 should use os.sep to separate directories
311 n1 should use os.sep to separate directories
329 n2 should use "/" to separate directories
312 n2 should use "/" to separate directories
330 returns an os.sep-separated path.
313 returns an os.sep-separated path.
331
314
332 If n1 is a relative path, it's assumed it's
315 If n1 is a relative path, it's assumed it's
333 relative to root.
316 relative to root.
334 n2 should always be relative to root.
317 n2 should always be relative to root.
335 '''
318 '''
336 if not n1: return localpath(n2)
319 if not n1: return localpath(n2)
337 if os.path.isabs(n1):
320 if os.path.isabs(n1):
338 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
321 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
339 return os.path.join(root, localpath(n2))
322 return os.path.join(root, localpath(n2))
340 n2 = '/'.join((pconvert(root), n2))
323 n2 = '/'.join((pconvert(root), n2))
341 a, b = splitpath(n1), n2.split('/')
324 a, b = splitpath(n1), n2.split('/')
342 a.reverse()
325 a.reverse()
343 b.reverse()
326 b.reverse()
344 while a and b and a[-1] == b[-1]:
327 while a and b and a[-1] == b[-1]:
345 a.pop()
328 a.pop()
346 b.pop()
329 b.pop()
347 b.reverse()
330 b.reverse()
348 return os.sep.join((['..'] * len(a)) + b) or '.'
331 return os.sep.join((['..'] * len(a)) + b) or '.'
349
332
350 def canonpath(root, cwd, myname):
333 def canonpath(root, cwd, myname):
351 """return the canonical path of myname, given cwd and root"""
334 """return the canonical path of myname, given cwd and root"""
352 if root == os.sep:
335 if root == os.sep:
353 rootsep = os.sep
336 rootsep = os.sep
354 elif endswithsep(root):
337 elif endswithsep(root):
355 rootsep = root
338 rootsep = root
356 else:
339 else:
357 rootsep = root + os.sep
340 rootsep = root + os.sep
358 name = myname
341 name = myname
359 if not os.path.isabs(name):
342 if not os.path.isabs(name):
360 name = os.path.join(root, cwd, name)
343 name = os.path.join(root, cwd, name)
361 name = os.path.normpath(name)
344 name = os.path.normpath(name)
362 audit_path = path_auditor(root)
345 audit_path = path_auditor(root)
363 if name != rootsep and name.startswith(rootsep):
346 if name != rootsep and name.startswith(rootsep):
364 name = name[len(rootsep):]
347 name = name[len(rootsep):]
365 audit_path(name)
348 audit_path(name)
366 return pconvert(name)
349 return pconvert(name)
367 elif name == root:
350 elif name == root:
368 return ''
351 return ''
369 else:
352 else:
370 # Determine whether `name' is in the hierarchy at or beneath `root',
353 # Determine whether `name' is in the hierarchy at or beneath `root',
371 # by iterating name=dirname(name) until that causes no change (can't
354 # by iterating name=dirname(name) until that causes no change (can't
372 # check name == '/', because that doesn't work on windows). For each
355 # check name == '/', because that doesn't work on windows). For each
373 # `name', compare dev/inode numbers. If they match, the list `rel'
356 # `name', compare dev/inode numbers. If they match, the list `rel'
374 # holds the reversed list of components making up the relative file
357 # holds the reversed list of components making up the relative file
375 # name we want.
358 # name we want.
376 root_st = os.stat(root)
359 root_st = os.stat(root)
377 rel = []
360 rel = []
378 while True:
361 while True:
379 try:
362 try:
380 name_st = os.stat(name)
363 name_st = os.stat(name)
381 except OSError:
364 except OSError:
382 break
365 break
383 if samestat(name_st, root_st):
366 if samestat(name_st, root_st):
384 if not rel:
367 if not rel:
385 # name was actually the same as root (maybe a symlink)
368 # name was actually the same as root (maybe a symlink)
386 return ''
369 return ''
387 rel.reverse()
370 rel.reverse()
388 name = os.path.join(*rel)
371 name = os.path.join(*rel)
389 audit_path(name)
372 audit_path(name)
390 return pconvert(name)
373 return pconvert(name)
391 dirname, basename = os.path.split(name)
374 dirname, basename = os.path.split(name)
392 rel.append(basename)
375 rel.append(basename)
393 if dirname == name:
376 if dirname == name:
394 break
377 break
395 name = dirname
378 name = dirname
396
379
397 raise Abort('%s not under root' % myname)
380 raise Abort('%s not under root' % myname)
398
381
399 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
382 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
400 """build a function to match a set of file patterns
383 """build a function to match a set of file patterns
401
384
402 arguments:
385 arguments:
403 canonroot - the canonical root of the tree you're matching against
386 canonroot - the canonical root of the tree you're matching against
404 cwd - the current working directory, if relevant
387 cwd - the current working directory, if relevant
405 names - patterns to find
388 names - patterns to find
406 inc - patterns to include
389 inc - patterns to include
407 exc - patterns to exclude
390 exc - patterns to exclude
408 dflt_pat - if a pattern in names has no explicit type, assume this one
391 dflt_pat - if a pattern in names has no explicit type, assume this one
409 src - where these patterns came from (e.g. .hgignore)
392 src - where these patterns came from (e.g. .hgignore)
410
393
411 a pattern is one of:
394 a pattern is one of:
412 'glob:<glob>' - a glob relative to cwd
395 'glob:<glob>' - a glob relative to cwd
413 're:<regexp>' - a regular expression
396 're:<regexp>' - a regular expression
414 'path:<path>' - a path relative to canonroot
397 'path:<path>' - a path relative to canonroot
415 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
398 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
416 'relpath:<path>' - a path relative to cwd
399 'relpath:<path>' - a path relative to cwd
417 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
400 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
418 '<something>' - one of the cases above, selected by the dflt_pat argument
401 '<something>' - one of the cases above, selected by the dflt_pat argument
419
402
420 returns:
403 returns:
421 a 3-tuple containing
404 a 3-tuple containing
422 - list of roots (places where one should start a recursive walk of the fs);
405 - list of roots (places where one should start a recursive walk of the fs);
423 this often matches the explicit non-pattern names passed in, but also
406 this often matches the explicit non-pattern names passed in, but also
424 includes the initial part of glob: patterns that has no glob characters
407 includes the initial part of glob: patterns that has no glob characters
425 - a bool match(filename) function
408 - a bool match(filename) function
426 - a bool indicating if any patterns were passed in
409 - a bool indicating if any patterns were passed in
427 """
410 """
428
411
429 # a common case: no patterns at all
412 # a common case: no patterns at all
430 if not names and not inc and not exc:
413 if not names and not inc and not exc:
431 return [], always, False
414 return [], always, False
432
415
433 def contains_glob(name):
416 def contains_glob(name):
434 for c in name:
417 for c in name:
435 if c in _globchars: return True
418 if c in _globchars: return True
436 return False
419 return False
437
420
438 def regex(kind, name, tail):
421 def regex(kind, name, tail):
439 '''convert a pattern into a regular expression'''
422 '''convert a pattern into a regular expression'''
440 if not name:
423 if not name:
441 return ''
424 return ''
442 if kind == 're':
425 if kind == 're':
443 return name
426 return name
444 elif kind == 'path':
427 elif kind == 'path':
445 return '^' + re.escape(name) + '(?:/|$)'
428 return '^' + re.escape(name) + '(?:/|$)'
446 elif kind == 'relglob':
429 elif kind == 'relglob':
447 return globre(name, '(?:|.*/)', tail)
430 return globre(name, '(?:|.*/)', tail)
448 elif kind == 'relpath':
431 elif kind == 'relpath':
449 return re.escape(name) + '(?:/|$)'
432 return re.escape(name) + '(?:/|$)'
450 elif kind == 'relre':
433 elif kind == 'relre':
451 if name.startswith('^'):
434 if name.startswith('^'):
452 return name
435 return name
453 return '.*' + name
436 return '.*' + name
454 return globre(name, '', tail)
437 return globre(name, '', tail)
455
438
456 def matchfn(pats, tail):
439 def matchfn(pats, tail):
457 """build a matching function from a set of patterns"""
440 """build a matching function from a set of patterns"""
458 if not pats:
441 if not pats:
459 return
442 return
460 try:
443 try:
461 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
444 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
462 if len(pat) > 20000:
445 if len(pat) > 20000:
463 raise OverflowError()
446 raise OverflowError()
464 return re.compile(pat).match
447 return re.compile(pat).match
465 except OverflowError:
448 except OverflowError:
466 # We're using a Python with a tiny regex engine and we
449 # We're using a Python with a tiny regex engine and we
467 # made it explode, so we'll divide the pattern list in two
450 # made it explode, so we'll divide the pattern list in two
468 # until it works
451 # until it works
469 l = len(pats)
452 l = len(pats)
470 if l < 2:
453 if l < 2:
471 raise
454 raise
472 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
455 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
473 return lambda s: a(s) or b(s)
456 return lambda s: a(s) or b(s)
474 except re.error:
457 except re.error:
475 for k, p in pats:
458 for k, p in pats:
476 try:
459 try:
477 re.compile('(?:%s)' % regex(k, p, tail))
460 re.compile('(?:%s)' % regex(k, p, tail))
478 except re.error:
461 except re.error:
479 if src:
462 if src:
480 raise Abort("%s: invalid pattern (%s): %s" %
463 raise Abort("%s: invalid pattern (%s): %s" %
481 (src, k, p))
464 (src, k, p))
482 else:
465 else:
483 raise Abort("invalid pattern (%s): %s" % (k, p))
466 raise Abort("invalid pattern (%s): %s" % (k, p))
484 raise Abort("invalid pattern")
467 raise Abort("invalid pattern")
485
468
486 def globprefix(pat):
469 def globprefix(pat):
487 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
470 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
488 root = []
471 root = []
489 for p in pat.split('/'):
472 for p in pat.split('/'):
490 if contains_glob(p): break
473 if contains_glob(p): break
491 root.append(p)
474 root.append(p)
492 return '/'.join(root) or '.'
475 return '/'.join(root) or '.'
493
476
494 def normalizepats(names, default):
477 def normalizepats(names, default):
495 pats = []
478 pats = []
496 roots = []
479 roots = []
497 anypats = False
480 anypats = False
498 for kind, name in [patkind(p, default) for p in names]:
481 for kind, name in [patkind(p, default) for p in names]:
499 if kind in ('glob', 'relpath'):
482 if kind in ('glob', 'relpath'):
500 name = canonpath(canonroot, cwd, name)
483 name = canonpath(canonroot, cwd, name)
501 elif kind in ('relglob', 'path'):
484 elif kind in ('relglob', 'path'):
502 name = normpath(name)
485 name = normpath(name)
503
486
504 pats.append((kind, name))
487 pats.append((kind, name))
505
488
506 if kind in ('glob', 're', 'relglob', 'relre'):
489 if kind in ('glob', 're', 'relglob', 'relre'):
507 anypats = True
490 anypats = True
508
491
509 if kind == 'glob':
492 if kind == 'glob':
510 root = globprefix(name)
493 root = globprefix(name)
511 roots.append(root)
494 roots.append(root)
512 elif kind in ('relpath', 'path'):
495 elif kind in ('relpath', 'path'):
513 roots.append(name or '.')
496 roots.append(name or '.')
514 elif kind == 'relglob':
497 elif kind == 'relglob':
515 roots.append('.')
498 roots.append('.')
516 return roots, pats, anypats
499 return roots, pats, anypats
517
500
518 roots, pats, anypats = normalizepats(names, dflt_pat)
501 roots, pats, anypats = normalizepats(names, dflt_pat)
519
502
520 patmatch = matchfn(pats, '$') or always
503 patmatch = matchfn(pats, '$') or always
521 incmatch = always
504 incmatch = always
522 if inc:
505 if inc:
523 dummy, inckinds, dummy = normalizepats(inc, 'glob')
506 dummy, inckinds, dummy = normalizepats(inc, 'glob')
524 incmatch = matchfn(inckinds, '(?:/|$)')
507 incmatch = matchfn(inckinds, '(?:/|$)')
525 excmatch = never
508 excmatch = never
526 if exc:
509 if exc:
527 dummy, exckinds, dummy = normalizepats(exc, 'glob')
510 dummy, exckinds, dummy = normalizepats(exc, 'glob')
528 excmatch = matchfn(exckinds, '(?:/|$)')
511 excmatch = matchfn(exckinds, '(?:/|$)')
529
512
530 if not names and inc and not exc:
513 if not names and inc and not exc:
531 # common case: hgignore patterns
514 # common case: hgignore patterns
532 match = incmatch
515 match = incmatch
533 else:
516 else:
534 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
517 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
535
518
536 return (roots, match, (inc or exc or anypats) and True)
519 return (roots, match, (inc or exc or anypats) and True)
537
520
538 _hgexecutable = None
521 _hgexecutable = None
539
522
540 def main_is_frozen():
523 def main_is_frozen():
541 """return True if we are a frozen executable.
524 """return True if we are a frozen executable.
542
525
543 The code supports py2exe (most common, Windows only) and tools/freeze
526 The code supports py2exe (most common, Windows only) and tools/freeze
544 (portable, not much used).
527 (portable, not much used).
545 """
528 """
546 return (hasattr(sys, "frozen") or # new py2exe
529 return (hasattr(sys, "frozen") or # new py2exe
547 hasattr(sys, "importers") or # old py2exe
530 hasattr(sys, "importers") or # old py2exe
548 imp.is_frozen("__main__")) # tools/freeze
531 imp.is_frozen("__main__")) # tools/freeze
549
532
550 def hgexecutable():
533 def hgexecutable():
551 """return location of the 'hg' executable.
534 """return location of the 'hg' executable.
552
535
553 Defaults to $HG or 'hg' in the search path.
536 Defaults to $HG or 'hg' in the search path.
554 """
537 """
555 if _hgexecutable is None:
538 if _hgexecutable is None:
556 hg = os.environ.get('HG')
539 hg = os.environ.get('HG')
557 if hg:
540 if hg:
558 set_hgexecutable(hg)
541 set_hgexecutable(hg)
559 elif main_is_frozen():
542 elif main_is_frozen():
560 set_hgexecutable(sys.executable)
543 set_hgexecutable(sys.executable)
561 else:
544 else:
562 set_hgexecutable(find_exe('hg') or 'hg')
545 set_hgexecutable(find_exe('hg') or 'hg')
563 return _hgexecutable
546 return _hgexecutable
564
547
565 def set_hgexecutable(path):
548 def set_hgexecutable(path):
566 """set location of the 'hg' executable"""
549 """set location of the 'hg' executable"""
567 global _hgexecutable
550 global _hgexecutable
568 _hgexecutable = path
551 _hgexecutable = path
569
552
570 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
553 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
571 '''enhanced shell command execution.
554 '''enhanced shell command execution.
572 run with environment maybe modified, maybe in different dir.
555 run with environment maybe modified, maybe in different dir.
573
556
574 if command fails and onerr is None, return status. if ui object,
557 if command fails and onerr is None, return status. if ui object,
575 print error message and return status, else raise onerr object as
558 print error message and return status, else raise onerr object as
576 exception.'''
559 exception.'''
577 def py2shell(val):
560 def py2shell(val):
578 'convert python object into string that is useful to shell'
561 'convert python object into string that is useful to shell'
579 if val in (None, False):
562 if val in (None, False):
580 return '0'
563 return '0'
581 if val == True:
564 if val == True:
582 return '1'
565 return '1'
583 return str(val)
566 return str(val)
584 oldenv = {}
567 oldenv = {}
585 for k in environ:
568 for k in environ:
586 oldenv[k] = os.environ.get(k)
569 oldenv[k] = os.environ.get(k)
587 if cwd is not None:
570 if cwd is not None:
588 oldcwd = os.getcwd()
571 oldcwd = os.getcwd()
589 origcmd = cmd
572 origcmd = cmd
590 if os.name == 'nt':
573 if os.name == 'nt':
591 cmd = '"%s"' % cmd
574 cmd = '"%s"' % cmd
592 try:
575 try:
593 for k, v in environ.iteritems():
576 for k, v in environ.iteritems():
594 os.environ[k] = py2shell(v)
577 os.environ[k] = py2shell(v)
595 os.environ['HG'] = hgexecutable()
578 os.environ['HG'] = hgexecutable()
596 if cwd is not None and oldcwd != cwd:
579 if cwd is not None and oldcwd != cwd:
597 os.chdir(cwd)
580 os.chdir(cwd)
598 rc = os.system(cmd)
581 rc = os.system(cmd)
599 if sys.platform == 'OpenVMS' and rc & 1:
582 if sys.platform == 'OpenVMS' and rc & 1:
600 rc = 0
583 rc = 0
601 if rc and onerr:
584 if rc and onerr:
602 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
585 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
603 explain_exit(rc)[0])
586 explain_exit(rc)[0])
604 if errprefix:
587 if errprefix:
605 errmsg = '%s: %s' % (errprefix, errmsg)
588 errmsg = '%s: %s' % (errprefix, errmsg)
606 try:
589 try:
607 onerr.warn(errmsg + '\n')
590 onerr.warn(errmsg + '\n')
608 except AttributeError:
591 except AttributeError:
609 raise onerr(errmsg)
592 raise onerr(errmsg)
610 return rc
593 return rc
611 finally:
594 finally:
612 for k, v in oldenv.iteritems():
595 for k, v in oldenv.iteritems():
613 if v is None:
596 if v is None:
614 del os.environ[k]
597 del os.environ[k]
615 else:
598 else:
616 os.environ[k] = v
599 os.environ[k] = v
617 if cwd is not None and oldcwd != cwd:
600 if cwd is not None and oldcwd != cwd:
618 os.chdir(oldcwd)
601 os.chdir(oldcwd)
619
602
620 def checksignature(func):
603 def checksignature(func):
621 '''wrap a function with code to check for calling errors'''
604 '''wrap a function with code to check for calling errors'''
622 def check(*args, **kwargs):
605 def check(*args, **kwargs):
623 try:
606 try:
624 return func(*args, **kwargs)
607 return func(*args, **kwargs)
625 except TypeError:
608 except TypeError:
626 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
609 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
627 raise error.SignatureError
610 raise error.SignatureError
628 raise
611 raise
629
612
630 return check
613 return check
631
614
632 # os.path.lexists is not available on python2.3
615 # os.path.lexists is not available on python2.3
633 def lexists(filename):
616 def lexists(filename):
634 "test whether a file with this name exists. does not follow symlinks"
617 "test whether a file with this name exists. does not follow symlinks"
635 try:
618 try:
636 os.lstat(filename)
619 os.lstat(filename)
637 except:
620 except:
638 return False
621 return False
639 return True
622 return True
640
623
641 def rename(src, dst):
624 def rename(src, dst):
642 """forcibly rename a file"""
625 """forcibly rename a file"""
643 try:
626 try:
644 os.rename(src, dst)
627 os.rename(src, dst)
645 except OSError, err: # FIXME: check err (EEXIST ?)
628 except OSError, err: # FIXME: check err (EEXIST ?)
646 # on windows, rename to existing file is not allowed, so we
629 # on windows, rename to existing file is not allowed, so we
647 # must delete destination first. but if file is open, unlink
630 # must delete destination first. but if file is open, unlink
648 # schedules it for delete but does not delete it. rename
631 # schedules it for delete but does not delete it. rename
649 # happens immediately even for open files, so we rename
632 # happens immediately even for open files, so we rename
650 # destination to a temporary name, then delete that. then
633 # destination to a temporary name, then delete that. then
651 # rename is safe to do.
634 # rename is safe to do.
652 temp = dst + "-force-rename"
635 temp = dst + "-force-rename"
653 os.rename(dst, temp)
636 os.rename(dst, temp)
654 os.unlink(temp)
637 os.unlink(temp)
655 os.rename(src, dst)
638 os.rename(src, dst)
656
639
657 def unlink(f):
640 def unlink(f):
658 """unlink and remove the directory if it is empty"""
641 """unlink and remove the directory if it is empty"""
659 os.unlink(f)
642 os.unlink(f)
660 # try removing directories that might now be empty
643 # try removing directories that might now be empty
661 try:
644 try:
662 os.removedirs(os.path.dirname(f))
645 os.removedirs(os.path.dirname(f))
663 except OSError:
646 except OSError:
664 pass
647 pass
665
648
666 def copyfile(src, dest):
649 def copyfile(src, dest):
667 "copy a file, preserving mode and atime/mtime"
650 "copy a file, preserving mode and atime/mtime"
668 if os.path.islink(src):
651 if os.path.islink(src):
669 try:
652 try:
670 os.unlink(dest)
653 os.unlink(dest)
671 except:
654 except:
672 pass
655 pass
673 os.symlink(os.readlink(src), dest)
656 os.symlink(os.readlink(src), dest)
674 else:
657 else:
675 try:
658 try:
676 shutil.copyfile(src, dest)
659 shutil.copyfile(src, dest)
677 shutil.copystat(src, dest)
660 shutil.copystat(src, dest)
678 except shutil.Error, inst:
661 except shutil.Error, inst:
679 raise Abort(str(inst))
662 raise Abort(str(inst))
680
663
681 def copyfiles(src, dst, hardlink=None):
664 def copyfiles(src, dst, hardlink=None):
682 """Copy a directory tree using hardlinks if possible"""
665 """Copy a directory tree using hardlinks if possible"""
683
666
684 if hardlink is None:
667 if hardlink is None:
685 hardlink = (os.stat(src).st_dev ==
668 hardlink = (os.stat(src).st_dev ==
686 os.stat(os.path.dirname(dst)).st_dev)
669 os.stat(os.path.dirname(dst)).st_dev)
687
670
688 if os.path.isdir(src):
671 if os.path.isdir(src):
689 os.mkdir(dst)
672 os.mkdir(dst)
690 for name, kind in osutil.listdir(src):
673 for name, kind in osutil.listdir(src):
691 srcname = os.path.join(src, name)
674 srcname = os.path.join(src, name)
692 dstname = os.path.join(dst, name)
675 dstname = os.path.join(dst, name)
693 copyfiles(srcname, dstname, hardlink)
676 copyfiles(srcname, dstname, hardlink)
694 else:
677 else:
695 if hardlink:
678 if hardlink:
696 try:
679 try:
697 os_link(src, dst)
680 os_link(src, dst)
698 except (IOError, OSError):
681 except (IOError, OSError):
699 hardlink = False
682 hardlink = False
700 shutil.copy(src, dst)
683 shutil.copy(src, dst)
701 else:
684 else:
702 shutil.copy(src, dst)
685 shutil.copy(src, dst)
703
686
704 class path_auditor(object):
687 class path_auditor(object):
705 '''ensure that a filesystem path contains no banned components.
688 '''ensure that a filesystem path contains no banned components.
706 the following properties of a path are checked:
689 the following properties of a path are checked:
707
690
708 - under top-level .hg
691 - under top-level .hg
709 - starts at the root of a windows drive
692 - starts at the root of a windows drive
710 - contains ".."
693 - contains ".."
711 - traverses a symlink (e.g. a/symlink_here/b)
694 - traverses a symlink (e.g. a/symlink_here/b)
712 - inside a nested repository'''
695 - inside a nested repository'''
713
696
714 def __init__(self, root):
697 def __init__(self, root):
715 self.audited = set()
698 self.audited = set()
716 self.auditeddir = set()
699 self.auditeddir = set()
717 self.root = root
700 self.root = root
718
701
719 def __call__(self, path):
702 def __call__(self, path):
720 if path in self.audited:
703 if path in self.audited:
721 return
704 return
722 normpath = os.path.normcase(path)
705 normpath = os.path.normcase(path)
723 parts = splitpath(normpath)
706 parts = splitpath(normpath)
724 if (os.path.splitdrive(path)[0]
707 if (os.path.splitdrive(path)[0]
725 or parts[0].lower() in ('.hg', '.hg.', '')
708 or parts[0].lower() in ('.hg', '.hg.', '')
726 or os.pardir in parts):
709 or os.pardir in parts):
727 raise Abort(_("path contains illegal component: %s") % path)
710 raise Abort(_("path contains illegal component: %s") % path)
728 if '.hg' in path.lower():
711 if '.hg' in path.lower():
729 lparts = [p.lower() for p in parts]
712 lparts = [p.lower() for p in parts]
730 for p in '.hg', '.hg.':
713 for p in '.hg', '.hg.':
731 if p in lparts[1:]:
714 if p in lparts[1:]:
732 pos = lparts.index(p)
715 pos = lparts.index(p)
733 base = os.path.join(*parts[:pos])
716 base = os.path.join(*parts[:pos])
734 raise Abort(_('path %r is inside repo %r') % (path, base))
717 raise Abort(_('path %r is inside repo %r') % (path, base))
735 def check(prefix):
718 def check(prefix):
736 curpath = os.path.join(self.root, prefix)
719 curpath = os.path.join(self.root, prefix)
737 try:
720 try:
738 st = os.lstat(curpath)
721 st = os.lstat(curpath)
739 except OSError, err:
722 except OSError, err:
740 # EINVAL can be raised as invalid path syntax under win32.
723 # EINVAL can be raised as invalid path syntax under win32.
741 # They must be ignored for patterns can be checked too.
724 # They must be ignored for patterns can be checked too.
742 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
725 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
743 raise
726 raise
744 else:
727 else:
745 if stat.S_ISLNK(st.st_mode):
728 if stat.S_ISLNK(st.st_mode):
746 raise Abort(_('path %r traverses symbolic link %r') %
729 raise Abort(_('path %r traverses symbolic link %r') %
747 (path, prefix))
730 (path, prefix))
748 elif (stat.S_ISDIR(st.st_mode) and
731 elif (stat.S_ISDIR(st.st_mode) and
749 os.path.isdir(os.path.join(curpath, '.hg'))):
732 os.path.isdir(os.path.join(curpath, '.hg'))):
750 raise Abort(_('path %r is inside repo %r') %
733 raise Abort(_('path %r is inside repo %r') %
751 (path, prefix))
734 (path, prefix))
752 parts.pop()
735 parts.pop()
753 prefixes = []
736 prefixes = []
754 for n in range(len(parts)):
737 for n in range(len(parts)):
755 prefix = os.sep.join(parts)
738 prefix = os.sep.join(parts)
756 if prefix in self.auditeddir:
739 if prefix in self.auditeddir:
757 break
740 break
758 check(prefix)
741 check(prefix)
759 prefixes.append(prefix)
742 prefixes.append(prefix)
760 parts.pop()
743 parts.pop()
761
744
762 self.audited.add(path)
745 self.audited.add(path)
763 # only add prefixes to the cache after checking everything: we don't
746 # only add prefixes to the cache after checking everything: we don't
764 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
747 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
765 self.auditeddir.update(prefixes)
748 self.auditeddir.update(prefixes)
766
749
767 def nlinks(pathname):
750 def nlinks(pathname):
768 """Return number of hardlinks for the given file."""
751 """Return number of hardlinks for the given file."""
769 return os.lstat(pathname).st_nlink
752 return os.lstat(pathname).st_nlink
770
753
771 if hasattr(os, 'link'):
754 if hasattr(os, 'link'):
772 os_link = os.link
755 os_link = os.link
773 else:
756 else:
774 def os_link(src, dst):
757 def os_link(src, dst):
775 raise OSError(0, _("Hardlinks not supported"))
758 raise OSError(0, _("Hardlinks not supported"))
776
759
777 def lookup_reg(key, name=None, scope=None):
760 def lookup_reg(key, name=None, scope=None):
778 return None
761 return None
779
762
780 if os.name == 'nt':
763 if os.name == 'nt':
781 from windows import *
764 from windows import *
782 def expand_glob(pats):
765 def expand_glob(pats):
783 '''On Windows, expand the implicit globs in a list of patterns'''
766 '''On Windows, expand the implicit globs in a list of patterns'''
784 ret = []
767 ret = []
785 for p in pats:
768 for p in pats:
786 kind, name = patkind(p, None)
769 kind, name = patkind(p, None)
787 if kind is None:
770 if kind is None:
788 globbed = glob.glob(name)
771 globbed = glob.glob(name)
789 if globbed:
772 if globbed:
790 ret.extend(globbed)
773 ret.extend(globbed)
791 continue
774 continue
792 # if we couldn't expand the glob, just keep it around
775 # if we couldn't expand the glob, just keep it around
793 ret.append(p)
776 ret.append(p)
794 return ret
777 return ret
795 else:
778 else:
796 from posix import *
779 from posix import *
797
780
798 def makelock(info, pathname):
781 def makelock(info, pathname):
799 try:
782 try:
800 return os.symlink(info, pathname)
783 return os.symlink(info, pathname)
801 except OSError, why:
784 except OSError, why:
802 if why.errno == errno.EEXIST:
785 if why.errno == errno.EEXIST:
803 raise
786 raise
804 except AttributeError: # no symlink in os
787 except AttributeError: # no symlink in os
805 pass
788 pass
806
789
807 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
790 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
808 os.write(ld, info)
791 os.write(ld, info)
809 os.close(ld)
792 os.close(ld)
810
793
811 def readlock(pathname):
794 def readlock(pathname):
812 try:
795 try:
813 return os.readlink(pathname)
796 return os.readlink(pathname)
814 except OSError, why:
797 except OSError, why:
815 if why.errno not in (errno.EINVAL, errno.ENOSYS):
798 if why.errno not in (errno.EINVAL, errno.ENOSYS):
816 raise
799 raise
817 except AttributeError: # no symlink in os
800 except AttributeError: # no symlink in os
818 pass
801 pass
819 return posixfile(pathname).read()
802 return posixfile(pathname).read()
820
803
821 def fstat(fp):
804 def fstat(fp):
822 '''stat file object that may not have fileno method.'''
805 '''stat file object that may not have fileno method.'''
823 try:
806 try:
824 return os.fstat(fp.fileno())
807 return os.fstat(fp.fileno())
825 except AttributeError:
808 except AttributeError:
826 return os.stat(fp.name)
809 return os.stat(fp.name)
827
810
828 # File system features
811 # File system features
829
812
830 def checkcase(path):
813 def checkcase(path):
831 """
814 """
832 Check whether the given path is on a case-sensitive filesystem
815 Check whether the given path is on a case-sensitive filesystem
833
816
834 Requires a path (like /foo/.hg) ending with a foldable final
817 Requires a path (like /foo/.hg) ending with a foldable final
835 directory component.
818 directory component.
836 """
819 """
837 s1 = os.stat(path)
820 s1 = os.stat(path)
838 d, b = os.path.split(path)
821 d, b = os.path.split(path)
839 p2 = os.path.join(d, b.upper())
822 p2 = os.path.join(d, b.upper())
840 if path == p2:
823 if path == p2:
841 p2 = os.path.join(d, b.lower())
824 p2 = os.path.join(d, b.lower())
842 try:
825 try:
843 s2 = os.stat(p2)
826 s2 = os.stat(p2)
844 if s2 == s1:
827 if s2 == s1:
845 return False
828 return False
846 return True
829 return True
847 except:
830 except:
848 return True
831 return True
849
832
850 _fspathcache = {}
833 _fspathcache = {}
851 def fspath(name, root):
834 def fspath(name, root):
852 '''Get name in the case stored in the filesystem
835 '''Get name in the case stored in the filesystem
853
836
854 The name is either relative to root, or it is an absolute path starting
837 The name is either relative to root, or it is an absolute path starting
855 with root. Note that this function is unnecessary, and should not be
838 with root. Note that this function is unnecessary, and should not be
856 called, for case-sensitive filesystems (simply because it's expensive).
839 called, for case-sensitive filesystems (simply because it's expensive).
857 '''
840 '''
858 # If name is absolute, make it relative
841 # If name is absolute, make it relative
859 if name.lower().startswith(root.lower()):
842 if name.lower().startswith(root.lower()):
860 l = len(root)
843 l = len(root)
861 if name[l] == os.sep or name[l] == os.altsep:
844 if name[l] == os.sep or name[l] == os.altsep:
862 l = l + 1
845 l = l + 1
863 name = name[l:]
846 name = name[l:]
864
847
865 if not os.path.exists(os.path.join(root, name)):
848 if not os.path.exists(os.path.join(root, name)):
866 return None
849 return None
867
850
868 seps = os.sep
851 seps = os.sep
869 if os.altsep:
852 if os.altsep:
870 seps = seps + os.altsep
853 seps = seps + os.altsep
871 # Protect backslashes. This gets silly very quickly.
854 # Protect backslashes. This gets silly very quickly.
872 seps.replace('\\','\\\\')
855 seps.replace('\\','\\\\')
873 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
856 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
874 dir = os.path.normcase(os.path.normpath(root))
857 dir = os.path.normcase(os.path.normpath(root))
875 result = []
858 result = []
876 for part, sep in pattern.findall(name):
859 for part, sep in pattern.findall(name):
877 if sep:
860 if sep:
878 result.append(sep)
861 result.append(sep)
879 continue
862 continue
880
863
881 if dir not in _fspathcache:
864 if dir not in _fspathcache:
882 _fspathcache[dir] = os.listdir(dir)
865 _fspathcache[dir] = os.listdir(dir)
883 contents = _fspathcache[dir]
866 contents = _fspathcache[dir]
884
867
885 lpart = part.lower()
868 lpart = part.lower()
886 for n in contents:
869 for n in contents:
887 if n.lower() == lpart:
870 if n.lower() == lpart:
888 result.append(n)
871 result.append(n)
889 break
872 break
890 else:
873 else:
891 # Cannot happen, as the file exists!
874 # Cannot happen, as the file exists!
892 result.append(part)
875 result.append(part)
893 dir = os.path.join(dir, lpart)
876 dir = os.path.join(dir, lpart)
894
877
895 return ''.join(result)
878 return ''.join(result)
896
879
897 def checkexec(path):
880 def checkexec(path):
898 """
881 """
899 Check whether the given path is on a filesystem with UNIX-like exec flags
882 Check whether the given path is on a filesystem with UNIX-like exec flags
900
883
901 Requires a directory (like /foo/.hg)
884 Requires a directory (like /foo/.hg)
902 """
885 """
903
886
904 # VFAT on some Linux versions can flip mode but it doesn't persist
887 # VFAT on some Linux versions can flip mode but it doesn't persist
905 # a FS remount. Frequently we can detect it if files are created
888 # a FS remount. Frequently we can detect it if files are created
906 # with exec bit on.
889 # with exec bit on.
907
890
908 try:
891 try:
909 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
892 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
910 fh, fn = tempfile.mkstemp("", "", path)
893 fh, fn = tempfile.mkstemp("", "", path)
911 try:
894 try:
912 os.close(fh)
895 os.close(fh)
913 m = os.stat(fn).st_mode & 0777
896 m = os.stat(fn).st_mode & 0777
914 new_file_has_exec = m & EXECFLAGS
897 new_file_has_exec = m & EXECFLAGS
915 os.chmod(fn, m ^ EXECFLAGS)
898 os.chmod(fn, m ^ EXECFLAGS)
916 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
899 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
917 finally:
900 finally:
918 os.unlink(fn)
901 os.unlink(fn)
919 except (IOError, OSError):
902 except (IOError, OSError):
920 # we don't care, the user probably won't be able to commit anyway
903 # we don't care, the user probably won't be able to commit anyway
921 return False
904 return False
922 return not (new_file_has_exec or exec_flags_cannot_flip)
905 return not (new_file_has_exec or exec_flags_cannot_flip)
923
906
924 def checklink(path):
907 def checklink(path):
925 """check whether the given path is on a symlink-capable filesystem"""
908 """check whether the given path is on a symlink-capable filesystem"""
926 # mktemp is not racy because symlink creation will fail if the
909 # mktemp is not racy because symlink creation will fail if the
927 # file already exists
910 # file already exists
928 name = tempfile.mktemp(dir=path)
911 name = tempfile.mktemp(dir=path)
929 try:
912 try:
930 os.symlink(".", name)
913 os.symlink(".", name)
931 os.unlink(name)
914 os.unlink(name)
932 return True
915 return True
933 except (OSError, AttributeError):
916 except (OSError, AttributeError):
934 return False
917 return False
935
918
936 def needbinarypatch():
919 def needbinarypatch():
937 """return True if patches should be applied in binary mode by default."""
920 """return True if patches should be applied in binary mode by default."""
938 return os.name == 'nt'
921 return os.name == 'nt'
939
922
940 def endswithsep(path):
923 def endswithsep(path):
941 '''Check path ends with os.sep or os.altsep.'''
924 '''Check path ends with os.sep or os.altsep.'''
942 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
925 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
943
926
944 def splitpath(path):
927 def splitpath(path):
945 '''Split path by os.sep.
928 '''Split path by os.sep.
946 Note that this function does not use os.altsep because this is
929 Note that this function does not use os.altsep because this is
947 an alternative of simple "xxx.split(os.sep)".
930 an alternative of simple "xxx.split(os.sep)".
948 It is recommended to use os.path.normpath() before using this
931 It is recommended to use os.path.normpath() before using this
949 function if need.'''
932 function if need.'''
950 return path.split(os.sep)
933 return path.split(os.sep)
951
934
952 def gui():
935 def gui():
953 '''Are we running in a GUI?'''
936 '''Are we running in a GUI?'''
954 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
937 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
955
938
956 def mktempcopy(name, emptyok=False, createmode=None):
939 def mktempcopy(name, emptyok=False, createmode=None):
957 """Create a temporary file with the same contents from name
940 """Create a temporary file with the same contents from name
958
941
959 The permission bits are copied from the original file.
942 The permission bits are copied from the original file.
960
943
961 If the temporary file is going to be truncated immediately, you
944 If the temporary file is going to be truncated immediately, you
962 can use emptyok=True as an optimization.
945 can use emptyok=True as an optimization.
963
946
964 Returns the name of the temporary file.
947 Returns the name of the temporary file.
965 """
948 """
966 d, fn = os.path.split(name)
949 d, fn = os.path.split(name)
967 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
950 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
968 os.close(fd)
951 os.close(fd)
969 # Temporary files are created with mode 0600, which is usually not
952 # Temporary files are created with mode 0600, which is usually not
970 # what we want. If the original file already exists, just copy
953 # what we want. If the original file already exists, just copy
971 # its mode. Otherwise, manually obey umask.
954 # its mode. Otherwise, manually obey umask.
972 try:
955 try:
973 st_mode = os.lstat(name).st_mode & 0777
956 st_mode = os.lstat(name).st_mode & 0777
974 except OSError, inst:
957 except OSError, inst:
975 if inst.errno != errno.ENOENT:
958 if inst.errno != errno.ENOENT:
976 raise
959 raise
977 st_mode = createmode
960 st_mode = createmode
978 if st_mode is None:
961 if st_mode is None:
979 st_mode = ~umask
962 st_mode = ~umask
980 st_mode &= 0666
963 st_mode &= 0666
981 os.chmod(temp, st_mode)
964 os.chmod(temp, st_mode)
982 if emptyok:
965 if emptyok:
983 return temp
966 return temp
984 try:
967 try:
985 try:
968 try:
986 ifp = posixfile(name, "rb")
969 ifp = posixfile(name, "rb")
987 except IOError, inst:
970 except IOError, inst:
988 if inst.errno == errno.ENOENT:
971 if inst.errno == errno.ENOENT:
989 return temp
972 return temp
990 if not getattr(inst, 'filename', None):
973 if not getattr(inst, 'filename', None):
991 inst.filename = name
974 inst.filename = name
992 raise
975 raise
993 ofp = posixfile(temp, "wb")
976 ofp = posixfile(temp, "wb")
994 for chunk in filechunkiter(ifp):
977 for chunk in filechunkiter(ifp):
995 ofp.write(chunk)
978 ofp.write(chunk)
996 ifp.close()
979 ifp.close()
997 ofp.close()
980 ofp.close()
998 except:
981 except:
999 try: os.unlink(temp)
982 try: os.unlink(temp)
1000 except: pass
983 except: pass
1001 raise
984 raise
1002 return temp
985 return temp
1003
986
1004 class atomictempfile(posixfile):
987 class atomictempfile(posixfile):
1005 """file-like object that atomically updates a file
988 """file-like object that atomically updates a file
1006
989
1007 All writes will be redirected to a temporary copy of the original
990 All writes will be redirected to a temporary copy of the original
1008 file. When rename is called, the copy is renamed to the original
991 file. When rename is called, the copy is renamed to the original
1009 name, making the changes visible.
992 name, making the changes visible.
1010 """
993 """
1011 def __init__(self, name, mode, createmode):
994 def __init__(self, name, mode, createmode):
1012 self.__name = name
995 self.__name = name
1013 self.temp = mktempcopy(name, emptyok=('w' in mode),
996 self.temp = mktempcopy(name, emptyok=('w' in mode),
1014 createmode=createmode)
997 createmode=createmode)
1015 posixfile.__init__(self, self.temp, mode)
998 posixfile.__init__(self, self.temp, mode)
1016
999
1017 def rename(self):
1000 def rename(self):
1018 if not self.closed:
1001 if not self.closed:
1019 posixfile.close(self)
1002 posixfile.close(self)
1020 rename(self.temp, localpath(self.__name))
1003 rename(self.temp, localpath(self.__name))
1021
1004
1022 def __del__(self):
1005 def __del__(self):
1023 if not self.closed:
1006 if not self.closed:
1024 try:
1007 try:
1025 os.unlink(self.temp)
1008 os.unlink(self.temp)
1026 except: pass
1009 except: pass
1027 posixfile.close(self)
1010 posixfile.close(self)
1028
1011
1029 def makedirs(name, mode=None):
1012 def makedirs(name, mode=None):
1030 """recursive directory creation with parent mode inheritance"""
1013 """recursive directory creation with parent mode inheritance"""
1031 try:
1014 try:
1032 os.mkdir(name)
1015 os.mkdir(name)
1033 if mode is not None:
1016 if mode is not None:
1034 os.chmod(name, mode)
1017 os.chmod(name, mode)
1035 return
1018 return
1036 except OSError, err:
1019 except OSError, err:
1037 if err.errno == errno.EEXIST:
1020 if err.errno == errno.EEXIST:
1038 return
1021 return
1039 if err.errno != errno.ENOENT:
1022 if err.errno != errno.ENOENT:
1040 raise
1023 raise
1041 parent = os.path.abspath(os.path.dirname(name))
1024 parent = os.path.abspath(os.path.dirname(name))
1042 makedirs(parent, mode)
1025 makedirs(parent, mode)
1043 makedirs(name, mode)
1026 makedirs(name, mode)
1044
1027
1045 class opener(object):
1028 class opener(object):
1046 """Open files relative to a base directory
1029 """Open files relative to a base directory
1047
1030
1048 This class is used to hide the details of COW semantics and
1031 This class is used to hide the details of COW semantics and
1049 remote file access from higher level code.
1032 remote file access from higher level code.
1050 """
1033 """
1051 def __init__(self, base, audit=True):
1034 def __init__(self, base, audit=True):
1052 self.base = base
1035 self.base = base
1053 if audit:
1036 if audit:
1054 self.audit_path = path_auditor(base)
1037 self.audit_path = path_auditor(base)
1055 else:
1038 else:
1056 self.audit_path = always
1039 self.audit_path = always
1057 self.createmode = None
1040 self.createmode = None
1058
1041
1059 def __getattr__(self, name):
1042 def __getattr__(self, name):
1060 if name == '_can_symlink':
1043 if name == '_can_symlink':
1061 self._can_symlink = checklink(self.base)
1044 self._can_symlink = checklink(self.base)
1062 return self._can_symlink
1045 return self._can_symlink
1063 raise AttributeError(name)
1046 raise AttributeError(name)
1064
1047
1065 def _fixfilemode(self, name):
1048 def _fixfilemode(self, name):
1066 if self.createmode is None:
1049 if self.createmode is None:
1067 return
1050 return
1068 os.chmod(name, self.createmode & 0666)
1051 os.chmod(name, self.createmode & 0666)
1069
1052
1070 def __call__(self, path, mode="r", text=False, atomictemp=False):
1053 def __call__(self, path, mode="r", text=False, atomictemp=False):
1071 self.audit_path(path)
1054 self.audit_path(path)
1072 f = os.path.join(self.base, path)
1055 f = os.path.join(self.base, path)
1073
1056
1074 if not text and "b" not in mode:
1057 if not text and "b" not in mode:
1075 mode += "b" # for that other OS
1058 mode += "b" # for that other OS
1076
1059
1077 nlink = -1
1060 nlink = -1
1078 if mode not in ("r", "rb"):
1061 if mode not in ("r", "rb"):
1079 try:
1062 try:
1080 nlink = nlinks(f)
1063 nlink = nlinks(f)
1081 except OSError:
1064 except OSError:
1082 nlink = 0
1065 nlink = 0
1083 d = os.path.dirname(f)
1066 d = os.path.dirname(f)
1084 if not os.path.isdir(d):
1067 if not os.path.isdir(d):
1085 makedirs(d, self.createmode)
1068 makedirs(d, self.createmode)
1086 if atomictemp:
1069 if atomictemp:
1087 return atomictempfile(f, mode, self.createmode)
1070 return atomictempfile(f, mode, self.createmode)
1088 if nlink > 1:
1071 if nlink > 1:
1089 rename(mktempcopy(f), f)
1072 rename(mktempcopy(f), f)
1090 fp = posixfile(f, mode)
1073 fp = posixfile(f, mode)
1091 if nlink == 0:
1074 if nlink == 0:
1092 self._fixfilemode(f)
1075 self._fixfilemode(f)
1093 return fp
1076 return fp
1094
1077
1095 def symlink(self, src, dst):
1078 def symlink(self, src, dst):
1096 self.audit_path(dst)
1079 self.audit_path(dst)
1097 linkname = os.path.join(self.base, dst)
1080 linkname = os.path.join(self.base, dst)
1098 try:
1081 try:
1099 os.unlink(linkname)
1082 os.unlink(linkname)
1100 except OSError:
1083 except OSError:
1101 pass
1084 pass
1102
1085
1103 dirname = os.path.dirname(linkname)
1086 dirname = os.path.dirname(linkname)
1104 if not os.path.exists(dirname):
1087 if not os.path.exists(dirname):
1105 makedirs(dirname, self.createmode)
1088 makedirs(dirname, self.createmode)
1106
1089
1107 if self._can_symlink:
1090 if self._can_symlink:
1108 try:
1091 try:
1109 os.symlink(src, linkname)
1092 os.symlink(src, linkname)
1110 except OSError, err:
1093 except OSError, err:
1111 raise OSError(err.errno, _('could not symlink to %r: %s') %
1094 raise OSError(err.errno, _('could not symlink to %r: %s') %
1112 (src, err.strerror), linkname)
1095 (src, err.strerror), linkname)
1113 else:
1096 else:
1114 f = self(dst, "w")
1097 f = self(dst, "w")
1115 f.write(src)
1098 f.write(src)
1116 f.close()
1099 f.close()
1117 self._fixfilemode(dst)
1100 self._fixfilemode(dst)
1118
1101
1119 class chunkbuffer(object):
1102 class chunkbuffer(object):
1120 """Allow arbitrary sized chunks of data to be efficiently read from an
1103 """Allow arbitrary sized chunks of data to be efficiently read from an
1121 iterator over chunks of arbitrary size."""
1104 iterator over chunks of arbitrary size."""
1122
1105
1123 def __init__(self, in_iter):
1106 def __init__(self, in_iter):
1124 """in_iter is the iterator that's iterating over the input chunks.
1107 """in_iter is the iterator that's iterating over the input chunks.
1125 targetsize is how big a buffer to try to maintain."""
1108 targetsize is how big a buffer to try to maintain."""
1126 self.iter = iter(in_iter)
1109 self.iter = iter(in_iter)
1127 self.buf = ''
1110 self.buf = ''
1128 self.targetsize = 2**16
1111 self.targetsize = 2**16
1129
1112
1130 def read(self, l):
1113 def read(self, l):
1131 """Read L bytes of data from the iterator of chunks of data.
1114 """Read L bytes of data from the iterator of chunks of data.
1132 Returns less than L bytes if the iterator runs dry."""
1115 Returns less than L bytes if the iterator runs dry."""
1133 if l > len(self.buf) and self.iter:
1116 if l > len(self.buf) and self.iter:
1134 # Clamp to a multiple of self.targetsize
1117 # Clamp to a multiple of self.targetsize
1135 targetsize = max(l, self.targetsize)
1118 targetsize = max(l, self.targetsize)
1136 collector = cStringIO.StringIO()
1119 collector = cStringIO.StringIO()
1137 collector.write(self.buf)
1120 collector.write(self.buf)
1138 collected = len(self.buf)
1121 collected = len(self.buf)
1139 for chunk in self.iter:
1122 for chunk in self.iter:
1140 collector.write(chunk)
1123 collector.write(chunk)
1141 collected += len(chunk)
1124 collected += len(chunk)
1142 if collected >= targetsize:
1125 if collected >= targetsize:
1143 break
1126 break
1144 if collected < targetsize:
1127 if collected < targetsize:
1145 self.iter = False
1128 self.iter = False
1146 self.buf = collector.getvalue()
1129 self.buf = collector.getvalue()
1147 if len(self.buf) == l:
1130 if len(self.buf) == l:
1148 s, self.buf = str(self.buf), ''
1131 s, self.buf = str(self.buf), ''
1149 else:
1132 else:
1150 s, self.buf = self.buf[:l], buffer(self.buf, l)
1133 s, self.buf = self.buf[:l], buffer(self.buf, l)
1151 return s
1134 return s
1152
1135
1153 def filechunkiter(f, size=65536, limit=None):
1136 def filechunkiter(f, size=65536, limit=None):
1154 """Create a generator that produces the data in the file size
1137 """Create a generator that produces the data in the file size
1155 (default 65536) bytes at a time, up to optional limit (default is
1138 (default 65536) bytes at a time, up to optional limit (default is
1156 to read all data). Chunks may be less than size bytes if the
1139 to read all data). Chunks may be less than size bytes if the
1157 chunk is the last chunk in the file, or the file is a socket or
1140 chunk is the last chunk in the file, or the file is a socket or
1158 some other type of file that sometimes reads less data than is
1141 some other type of file that sometimes reads less data than is
1159 requested."""
1142 requested."""
1160 assert size >= 0
1143 assert size >= 0
1161 assert limit is None or limit >= 0
1144 assert limit is None or limit >= 0
1162 while True:
1145 while True:
1163 if limit is None: nbytes = size
1146 if limit is None: nbytes = size
1164 else: nbytes = min(limit, size)
1147 else: nbytes = min(limit, size)
1165 s = nbytes and f.read(nbytes)
1148 s = nbytes and f.read(nbytes)
1166 if not s: break
1149 if not s: break
1167 if limit: limit -= len(s)
1150 if limit: limit -= len(s)
1168 yield s
1151 yield s
1169
1152
1170 def makedate():
1153 def makedate():
1171 lt = time.localtime()
1154 lt = time.localtime()
1172 if lt[8] == 1 and time.daylight:
1155 if lt[8] == 1 and time.daylight:
1173 tz = time.altzone
1156 tz = time.altzone
1174 else:
1157 else:
1175 tz = time.timezone
1158 tz = time.timezone
1176 return time.mktime(lt), tz
1159 return time.mktime(lt), tz
1177
1160
1178 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1161 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1179 """represent a (unixtime, offset) tuple as a localized time.
1162 """represent a (unixtime, offset) tuple as a localized time.
1180 unixtime is seconds since the epoch, and offset is the time zone's
1163 unixtime is seconds since the epoch, and offset is the time zone's
1181 number of seconds away from UTC. if timezone is false, do not
1164 number of seconds away from UTC. if timezone is false, do not
1182 append time zone to string."""
1165 append time zone to string."""
1183 t, tz = date or makedate()
1166 t, tz = date or makedate()
1184 if "%1" in format or "%2" in format:
1167 if "%1" in format or "%2" in format:
1185 sign = (tz > 0) and "-" or "+"
1168 sign = (tz > 0) and "-" or "+"
1186 minutes = abs(tz) / 60
1169 minutes = abs(tz) / 60
1187 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1170 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1188 format = format.replace("%2", "%02d" % (minutes % 60))
1171 format = format.replace("%2", "%02d" % (minutes % 60))
1189 s = time.strftime(format, time.gmtime(float(t) - tz))
1172 s = time.strftime(format, time.gmtime(float(t) - tz))
1190 return s
1173 return s
1191
1174
1192 def shortdate(date=None):
1175 def shortdate(date=None):
1193 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1176 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1194 return datestr(date, format='%Y-%m-%d')
1177 return datestr(date, format='%Y-%m-%d')
1195
1178
1196 def strdate(string, format, defaults=[]):
1179 def strdate(string, format, defaults=[]):
1197 """parse a localized time string and return a (unixtime, offset) tuple.
1180 """parse a localized time string and return a (unixtime, offset) tuple.
1198 if the string cannot be parsed, ValueError is raised."""
1181 if the string cannot be parsed, ValueError is raised."""
1199 def timezone(string):
1182 def timezone(string):
1200 tz = string.split()[-1]
1183 tz = string.split()[-1]
1201 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1184 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1202 sign = (tz[0] == "+") and 1 or -1
1185 sign = (tz[0] == "+") and 1 or -1
1203 hours = int(tz[1:3])
1186 hours = int(tz[1:3])
1204 minutes = int(tz[3:5])
1187 minutes = int(tz[3:5])
1205 return -sign * (hours * 60 + minutes) * 60
1188 return -sign * (hours * 60 + minutes) * 60
1206 if tz == "GMT" or tz == "UTC":
1189 if tz == "GMT" or tz == "UTC":
1207 return 0
1190 return 0
1208 return None
1191 return None
1209
1192
1210 # NOTE: unixtime = localunixtime + offset
1193 # NOTE: unixtime = localunixtime + offset
1211 offset, date = timezone(string), string
1194 offset, date = timezone(string), string
1212 if offset != None:
1195 if offset != None:
1213 date = " ".join(string.split()[:-1])
1196 date = " ".join(string.split()[:-1])
1214
1197
1215 # add missing elements from defaults
1198 # add missing elements from defaults
1216 for part in defaults:
1199 for part in defaults:
1217 found = [True for p in part if ("%"+p) in format]
1200 found = [True for p in part if ("%"+p) in format]
1218 if not found:
1201 if not found:
1219 date += "@" + defaults[part]
1202 date += "@" + defaults[part]
1220 format += "@%" + part[0]
1203 format += "@%" + part[0]
1221
1204
1222 timetuple = time.strptime(date, format)
1205 timetuple = time.strptime(date, format)
1223 localunixtime = int(calendar.timegm(timetuple))
1206 localunixtime = int(calendar.timegm(timetuple))
1224 if offset is None:
1207 if offset is None:
1225 # local timezone
1208 # local timezone
1226 unixtime = int(time.mktime(timetuple))
1209 unixtime = int(time.mktime(timetuple))
1227 offset = unixtime - localunixtime
1210 offset = unixtime - localunixtime
1228 else:
1211 else:
1229 unixtime = localunixtime + offset
1212 unixtime = localunixtime + offset
1230 return unixtime, offset
1213 return unixtime, offset
1231
1214
1232 def parsedate(date, formats=None, defaults=None):
1215 def parsedate(date, formats=None, defaults=None):
1233 """parse a localized date/time string and return a (unixtime, offset) tuple.
1216 """parse a localized date/time string and return a (unixtime, offset) tuple.
1234
1217
1235 The date may be a "unixtime offset" string or in one of the specified
1218 The date may be a "unixtime offset" string or in one of the specified
1236 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1219 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1237 """
1220 """
1238 if not date:
1221 if not date:
1239 return 0, 0
1222 return 0, 0
1240 if isinstance(date, tuple) and len(date) == 2:
1223 if isinstance(date, tuple) and len(date) == 2:
1241 return date
1224 return date
1242 if not formats:
1225 if not formats:
1243 formats = defaultdateformats
1226 formats = defaultdateformats
1244 date = date.strip()
1227 date = date.strip()
1245 try:
1228 try:
1246 when, offset = map(int, date.split(' '))
1229 when, offset = map(int, date.split(' '))
1247 except ValueError:
1230 except ValueError:
1248 # fill out defaults
1231 # fill out defaults
1249 if not defaults:
1232 if not defaults:
1250 defaults = {}
1233 defaults = {}
1251 now = makedate()
1234 now = makedate()
1252 for part in "d mb yY HI M S".split():
1235 for part in "d mb yY HI M S".split():
1253 if part not in defaults:
1236 if part not in defaults:
1254 if part[0] in "HMS":
1237 if part[0] in "HMS":
1255 defaults[part] = "00"
1238 defaults[part] = "00"
1256 else:
1239 else:
1257 defaults[part] = datestr(now, "%" + part[0])
1240 defaults[part] = datestr(now, "%" + part[0])
1258
1241
1259 for format in formats:
1242 for format in formats:
1260 try:
1243 try:
1261 when, offset = strdate(date, format, defaults)
1244 when, offset = strdate(date, format, defaults)
1262 except (ValueError, OverflowError):
1245 except (ValueError, OverflowError):
1263 pass
1246 pass
1264 else:
1247 else:
1265 break
1248 break
1266 else:
1249 else:
1267 raise Abort(_('invalid date: %r ') % date)
1250 raise Abort(_('invalid date: %r ') % date)
1268 # validate explicit (probably user-specified) date and
1251 # validate explicit (probably user-specified) date and
1269 # time zone offset. values must fit in signed 32 bits for
1252 # time zone offset. values must fit in signed 32 bits for
1270 # current 32-bit linux runtimes. timezones go from UTC-12
1253 # current 32-bit linux runtimes. timezones go from UTC-12
1271 # to UTC+14
1254 # to UTC+14
1272 if abs(when) > 0x7fffffff:
1255 if abs(when) > 0x7fffffff:
1273 raise Abort(_('date exceeds 32 bits: %d') % when)
1256 raise Abort(_('date exceeds 32 bits: %d') % when)
1274 if offset < -50400 or offset > 43200:
1257 if offset < -50400 or offset > 43200:
1275 raise Abort(_('impossible time zone offset: %d') % offset)
1258 raise Abort(_('impossible time zone offset: %d') % offset)
1276 return when, offset
1259 return when, offset
1277
1260
1278 def matchdate(date):
1261 def matchdate(date):
1279 """Return a function that matches a given date match specifier
1262 """Return a function that matches a given date match specifier
1280
1263
1281 Formats include:
1264 Formats include:
1282
1265
1283 '{date}' match a given date to the accuracy provided
1266 '{date}' match a given date to the accuracy provided
1284
1267
1285 '<{date}' on or before a given date
1268 '<{date}' on or before a given date
1286
1269
1287 '>{date}' on or after a given date
1270 '>{date}' on or after a given date
1288
1271
1289 """
1272 """
1290
1273
1291 def lower(date):
1274 def lower(date):
1292 d = dict(mb="1", d="1")
1275 d = dict(mb="1", d="1")
1293 return parsedate(date, extendeddateformats, d)[0]
1276 return parsedate(date, extendeddateformats, d)[0]
1294
1277
1295 def upper(date):
1278 def upper(date):
1296 d = dict(mb="12", HI="23", M="59", S="59")
1279 d = dict(mb="12", HI="23", M="59", S="59")
1297 for days in "31 30 29".split():
1280 for days in "31 30 29".split():
1298 try:
1281 try:
1299 d["d"] = days
1282 d["d"] = days
1300 return parsedate(date, extendeddateformats, d)[0]
1283 return parsedate(date, extendeddateformats, d)[0]
1301 except:
1284 except:
1302 pass
1285 pass
1303 d["d"] = "28"
1286 d["d"] = "28"
1304 return parsedate(date, extendeddateformats, d)[0]
1287 return parsedate(date, extendeddateformats, d)[0]
1305
1288
1306 date = date.strip()
1289 date = date.strip()
1307 if date[0] == "<":
1290 if date[0] == "<":
1308 when = upper(date[1:])
1291 when = upper(date[1:])
1309 return lambda x: x <= when
1292 return lambda x: x <= when
1310 elif date[0] == ">":
1293 elif date[0] == ">":
1311 when = lower(date[1:])
1294 when = lower(date[1:])
1312 return lambda x: x >= when
1295 return lambda x: x >= when
1313 elif date[0] == "-":
1296 elif date[0] == "-":
1314 try:
1297 try:
1315 days = int(date[1:])
1298 days = int(date[1:])
1316 except ValueError:
1299 except ValueError:
1317 raise Abort(_("invalid day spec: %s") % date[1:])
1300 raise Abort(_("invalid day spec: %s") % date[1:])
1318 when = makedate()[0] - days * 3600 * 24
1301 when = makedate()[0] - days * 3600 * 24
1319 return lambda x: x >= when
1302 return lambda x: x >= when
1320 elif " to " in date:
1303 elif " to " in date:
1321 a, b = date.split(" to ")
1304 a, b = date.split(" to ")
1322 start, stop = lower(a), upper(b)
1305 start, stop = lower(a), upper(b)
1323 return lambda x: x >= start and x <= stop
1306 return lambda x: x >= start and x <= stop
1324 else:
1307 else:
1325 start, stop = lower(date), upper(date)
1308 start, stop = lower(date), upper(date)
1326 return lambda x: x >= start and x <= stop
1309 return lambda x: x >= start and x <= stop
1327
1310
1328 def shortuser(user):
1311 def shortuser(user):
1329 """Return a short representation of a user name or email address."""
1312 """Return a short representation of a user name or email address."""
1330 f = user.find('@')
1313 f = user.find('@')
1331 if f >= 0:
1314 if f >= 0:
1332 user = user[:f]
1315 user = user[:f]
1333 f = user.find('<')
1316 f = user.find('<')
1334 if f >= 0:
1317 if f >= 0:
1335 user = user[f+1:]
1318 user = user[f+1:]
1336 f = user.find(' ')
1319 f = user.find(' ')
1337 if f >= 0:
1320 if f >= 0:
1338 user = user[:f]
1321 user = user[:f]
1339 f = user.find('.')
1322 f = user.find('.')
1340 if f >= 0:
1323 if f >= 0:
1341 user = user[:f]
1324 user = user[:f]
1342 return user
1325 return user
1343
1326
1344 def email(author):
1327 def email(author):
1345 '''get email of author.'''
1328 '''get email of author.'''
1346 r = author.find('>')
1329 r = author.find('>')
1347 if r == -1: r = None
1330 if r == -1: r = None
1348 return author[author.find('<')+1:r]
1331 return author[author.find('<')+1:r]
1349
1332
1350 def ellipsis(text, maxlength=400):
1333 def ellipsis(text, maxlength=400):
1351 """Trim string to at most maxlength (default: 400) characters."""
1334 """Trim string to at most maxlength (default: 400) characters."""
1352 if len(text) <= maxlength:
1335 if len(text) <= maxlength:
1353 return text
1336 return text
1354 else:
1337 else:
1355 return "%s..." % (text[:maxlength-3])
1338 return "%s..." % (text[:maxlength-3])
1356
1339
1357 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1340 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1358 '''yield every hg repository under path, recursively.'''
1341 '''yield every hg repository under path, recursively.'''
1359 def errhandler(err):
1342 def errhandler(err):
1360 if err.filename == path:
1343 if err.filename == path:
1361 raise err
1344 raise err
1362 if followsym and hasattr(os.path, 'samestat'):
1345 if followsym and hasattr(os.path, 'samestat'):
1363 def _add_dir_if_not_there(dirlst, dirname):
1346 def _add_dir_if_not_there(dirlst, dirname):
1364 match = False
1347 match = False
1365 samestat = os.path.samestat
1348 samestat = os.path.samestat
1366 dirstat = os.stat(dirname)
1349 dirstat = os.stat(dirname)
1367 for lstdirstat in dirlst:
1350 for lstdirstat in dirlst:
1368 if samestat(dirstat, lstdirstat):
1351 if samestat(dirstat, lstdirstat):
1369 match = True
1352 match = True
1370 break
1353 break
1371 if not match:
1354 if not match:
1372 dirlst.append(dirstat)
1355 dirlst.append(dirstat)
1373 return not match
1356 return not match
1374 else:
1357 else:
1375 followsym = False
1358 followsym = False
1376
1359
1377 if (seen_dirs is None) and followsym:
1360 if (seen_dirs is None) and followsym:
1378 seen_dirs = []
1361 seen_dirs = []
1379 _add_dir_if_not_there(seen_dirs, path)
1362 _add_dir_if_not_there(seen_dirs, path)
1380 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1363 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1381 if '.hg' in dirs:
1364 if '.hg' in dirs:
1382 yield root # found a repository
1365 yield root # found a repository
1383 qroot = os.path.join(root, '.hg', 'patches')
1366 qroot = os.path.join(root, '.hg', 'patches')
1384 if os.path.isdir(os.path.join(qroot, '.hg')):
1367 if os.path.isdir(os.path.join(qroot, '.hg')):
1385 yield qroot # we have a patch queue repo here
1368 yield qroot # we have a patch queue repo here
1386 if recurse:
1369 if recurse:
1387 # avoid recursing inside the .hg directory
1370 # avoid recursing inside the .hg directory
1388 dirs.remove('.hg')
1371 dirs.remove('.hg')
1389 else:
1372 else:
1390 dirs[:] = [] # don't descend further
1373 dirs[:] = [] # don't descend further
1391 elif followsym:
1374 elif followsym:
1392 newdirs = []
1375 newdirs = []
1393 for d in dirs:
1376 for d in dirs:
1394 fname = os.path.join(root, d)
1377 fname = os.path.join(root, d)
1395 if _add_dir_if_not_there(seen_dirs, fname):
1378 if _add_dir_if_not_there(seen_dirs, fname):
1396 if os.path.islink(fname):
1379 if os.path.islink(fname):
1397 for hgname in walkrepos(fname, True, seen_dirs):
1380 for hgname in walkrepos(fname, True, seen_dirs):
1398 yield hgname
1381 yield hgname
1399 else:
1382 else:
1400 newdirs.append(d)
1383 newdirs.append(d)
1401 dirs[:] = newdirs
1384 dirs[:] = newdirs
1402
1385
1403 _rcpath = None
1386 _rcpath = None
1404
1387
1405 def os_rcpath():
1388 def os_rcpath():
1406 '''return default os-specific hgrc search path'''
1389 '''return default os-specific hgrc search path'''
1407 path = system_rcpath()
1390 path = system_rcpath()
1408 path.extend(user_rcpath())
1391 path.extend(user_rcpath())
1409 path = [os.path.normpath(f) for f in path]
1392 path = [os.path.normpath(f) for f in path]
1410 return path
1393 return path
1411
1394
1412 def rcpath():
1395 def rcpath():
1413 '''return hgrc search path. if env var HGRCPATH is set, use it.
1396 '''return hgrc search path. if env var HGRCPATH is set, use it.
1414 for each item in path, if directory, use files ending in .rc,
1397 for each item in path, if directory, use files ending in .rc,
1415 else use item.
1398 else use item.
1416 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1399 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1417 if no HGRCPATH, use default os-specific path.'''
1400 if no HGRCPATH, use default os-specific path.'''
1418 global _rcpath
1401 global _rcpath
1419 if _rcpath is None:
1402 if _rcpath is None:
1420 if 'HGRCPATH' in os.environ:
1403 if 'HGRCPATH' in os.environ:
1421 _rcpath = []
1404 _rcpath = []
1422 for p in os.environ['HGRCPATH'].split(os.pathsep):
1405 for p in os.environ['HGRCPATH'].split(os.pathsep):
1423 if not p: continue
1406 if not p: continue
1424 if os.path.isdir(p):
1407 if os.path.isdir(p):
1425 for f, kind in osutil.listdir(p):
1408 for f, kind in osutil.listdir(p):
1426 if f.endswith('.rc'):
1409 if f.endswith('.rc'):
1427 _rcpath.append(os.path.join(p, f))
1410 _rcpath.append(os.path.join(p, f))
1428 else:
1411 else:
1429 _rcpath.append(p)
1412 _rcpath.append(p)
1430 else:
1413 else:
1431 _rcpath = os_rcpath()
1414 _rcpath = os_rcpath()
1432 return _rcpath
1415 return _rcpath
1433
1416
1434 def bytecount(nbytes):
1417 def bytecount(nbytes):
1435 '''return byte count formatted as readable string, with units'''
1418 '''return byte count formatted as readable string, with units'''
1436
1419
1437 units = (
1420 units = (
1438 (100, 1<<30, _('%.0f GB')),
1421 (100, 1<<30, _('%.0f GB')),
1439 (10, 1<<30, _('%.1f GB')),
1422 (10, 1<<30, _('%.1f GB')),
1440 (1, 1<<30, _('%.2f GB')),
1423 (1, 1<<30, _('%.2f GB')),
1441 (100, 1<<20, _('%.0f MB')),
1424 (100, 1<<20, _('%.0f MB')),
1442 (10, 1<<20, _('%.1f MB')),
1425 (10, 1<<20, _('%.1f MB')),
1443 (1, 1<<20, _('%.2f MB')),
1426 (1, 1<<20, _('%.2f MB')),
1444 (100, 1<<10, _('%.0f KB')),
1427 (100, 1<<10, _('%.0f KB')),
1445 (10, 1<<10, _('%.1f KB')),
1428 (10, 1<<10, _('%.1f KB')),
1446 (1, 1<<10, _('%.2f KB')),
1429 (1, 1<<10, _('%.2f KB')),
1447 (1, 1, _('%.0f bytes')),
1430 (1, 1, _('%.0f bytes')),
1448 )
1431 )
1449
1432
1450 for multiplier, divisor, format in units:
1433 for multiplier, divisor, format in units:
1451 if nbytes >= divisor * multiplier:
1434 if nbytes >= divisor * multiplier:
1452 return format % (nbytes / float(divisor))
1435 return format % (nbytes / float(divisor))
1453 return units[-1][2] % nbytes
1436 return units[-1][2] % nbytes
1454
1437
1455 def drop_scheme(scheme, path):
1438 def drop_scheme(scheme, path):
1456 sc = scheme + ':'
1439 sc = scheme + ':'
1457 if path.startswith(sc):
1440 if path.startswith(sc):
1458 path = path[len(sc):]
1441 path = path[len(sc):]
1459 if path.startswith('//'):
1442 if path.startswith('//'):
1460 path = path[2:]
1443 path = path[2:]
1461 return path
1444 return path
1462
1445
1463 def uirepr(s):
1446 def uirepr(s):
1464 # Avoid double backslash in Windows path repr()
1447 # Avoid double backslash in Windows path repr()
1465 return repr(s).replace('\\\\', '\\')
1448 return repr(s).replace('\\\\', '\\')
1466
1449
1467 def termwidth():
1450 def termwidth():
1468 if 'COLUMNS' in os.environ:
1451 if 'COLUMNS' in os.environ:
1469 try:
1452 try:
1470 return int(os.environ['COLUMNS'])
1453 return int(os.environ['COLUMNS'])
1471 except ValueError:
1454 except ValueError:
1472 pass
1455 pass
1473 try:
1456 try:
1474 import termios, array, fcntl
1457 import termios, array, fcntl
1475 for dev in (sys.stdout, sys.stdin):
1458 for dev in (sys.stdout, sys.stdin):
1476 try:
1459 try:
1477 fd = dev.fileno()
1460 fd = dev.fileno()
1478 if not os.isatty(fd):
1461 if not os.isatty(fd):
1479 continue
1462 continue
1480 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1463 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1481 return array.array('h', arri)[1]
1464 return array.array('h', arri)[1]
1482 except ValueError:
1465 except ValueError:
1483 pass
1466 pass
1484 except ImportError:
1467 except ImportError:
1485 pass
1468 pass
1486 return 80
1469 return 80
1487
1470
1488 def iterlines(iterator):
1471 def iterlines(iterator):
1489 for chunk in iterator:
1472 for chunk in iterator:
1490 for line in chunk.splitlines():
1473 for line in chunk.splitlines():
1491 yield line
1474 yield line
General Comments 0
You need to be logged in to leave comments. Login now