##// END OF EJS Templates
Backed out changeset fce065538bcf: it caused a 5x performance regression on OS X
Bryan O'Sullivan -
r9083:ec171737 default
parent child Browse files
Show More
@@ -1,1455 +1,1457 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2, incorporated herein by reference.
8 # GNU General Public License version 2, incorporated herein by reference.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil
17 import error, osutil
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
18 import cStringIO, errno, re, shutil, sys, tempfile, traceback
19 import os, stat, threading, time, calendar, glob, random
19 import os, stat, threading, time, calendar, glob, random
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 try:
31 try:
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 except ImportError:
33 except ImportError:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import subprocess
39 import subprocess
40 closefds = os.name == 'posix'
40 closefds = os.name == 'posix'
41 def popen2(cmd):
41 def popen2(cmd, bufsize=-1):
42 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
42 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
43 close_fds=closefds,
43 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
44 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
44 return p.stdin, p.stdout
45 return p.stdin, p.stdout
45 def popen3(cmd):
46 def popen3(cmd, bufsize=-1):
46 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
47 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
48 close_fds=closefds,
47 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
49 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
48 stderr=subprocess.PIPE)
50 stderr=subprocess.PIPE)
49 return p.stdin, p.stdout, p.stderr
51 return p.stdin, p.stdout, p.stderr
50
52
51 def version():
53 def version():
52 """Return version information if available."""
54 """Return version information if available."""
53 try:
55 try:
54 import __version__
56 import __version__
55 return __version__.version
57 return __version__.version
56 except ImportError:
58 except ImportError:
57 return 'unknown'
59 return 'unknown'
58
60
59 # used by parsedate
61 # used by parsedate
60 defaultdateformats = (
62 defaultdateformats = (
61 '%Y-%m-%d %H:%M:%S',
63 '%Y-%m-%d %H:%M:%S',
62 '%Y-%m-%d %I:%M:%S%p',
64 '%Y-%m-%d %I:%M:%S%p',
63 '%Y-%m-%d %H:%M',
65 '%Y-%m-%d %H:%M',
64 '%Y-%m-%d %I:%M%p',
66 '%Y-%m-%d %I:%M%p',
65 '%Y-%m-%d',
67 '%Y-%m-%d',
66 '%m-%d',
68 '%m-%d',
67 '%m/%d',
69 '%m/%d',
68 '%m/%d/%y',
70 '%m/%d/%y',
69 '%m/%d/%Y',
71 '%m/%d/%Y',
70 '%a %b %d %H:%M:%S %Y',
72 '%a %b %d %H:%M:%S %Y',
71 '%a %b %d %I:%M:%S%p %Y',
73 '%a %b %d %I:%M:%S%p %Y',
72 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
74 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
73 '%b %d %H:%M:%S %Y',
75 '%b %d %H:%M:%S %Y',
74 '%b %d %I:%M:%S%p %Y',
76 '%b %d %I:%M:%S%p %Y',
75 '%b %d %H:%M:%S',
77 '%b %d %H:%M:%S',
76 '%b %d %I:%M:%S%p',
78 '%b %d %I:%M:%S%p',
77 '%b %d %H:%M',
79 '%b %d %H:%M',
78 '%b %d %I:%M%p',
80 '%b %d %I:%M%p',
79 '%b %d %Y',
81 '%b %d %Y',
80 '%b %d',
82 '%b %d',
81 '%H:%M:%S',
83 '%H:%M:%S',
82 '%I:%M:%SP',
84 '%I:%M:%SP',
83 '%H:%M',
85 '%H:%M',
84 '%I:%M%p',
86 '%I:%M%p',
85 )
87 )
86
88
87 extendeddateformats = defaultdateformats + (
89 extendeddateformats = defaultdateformats + (
88 "%Y",
90 "%Y",
89 "%Y-%m",
91 "%Y-%m",
90 "%b",
92 "%b",
91 "%b %Y",
93 "%b %Y",
92 )
94 )
93
95
94 def cachefunc(func):
96 def cachefunc(func):
95 '''cache the result of function calls'''
97 '''cache the result of function calls'''
96 # XXX doesn't handle keywords args
98 # XXX doesn't handle keywords args
97 cache = {}
99 cache = {}
98 if func.func_code.co_argcount == 1:
100 if func.func_code.co_argcount == 1:
99 # we gain a small amount of time because
101 # we gain a small amount of time because
100 # we don't need to pack/unpack the list
102 # we don't need to pack/unpack the list
101 def f(arg):
103 def f(arg):
102 if arg not in cache:
104 if arg not in cache:
103 cache[arg] = func(arg)
105 cache[arg] = func(arg)
104 return cache[arg]
106 return cache[arg]
105 else:
107 else:
106 def f(*args):
108 def f(*args):
107 if args not in cache:
109 if args not in cache:
108 cache[args] = func(*args)
110 cache[args] = func(*args)
109 return cache[args]
111 return cache[args]
110
112
111 return f
113 return f
112
114
113 class propertycache(object):
115 class propertycache(object):
114 def __init__(self, func):
116 def __init__(self, func):
115 self.func = func
117 self.func = func
116 self.name = func.__name__
118 self.name = func.__name__
117 def __get__(self, obj, type=None):
119 def __get__(self, obj, type=None):
118 result = self.func(obj)
120 result = self.func(obj)
119 setattr(obj, self.name, result)
121 setattr(obj, self.name, result)
120 return result
122 return result
121
123
122 def pipefilter(s, cmd):
124 def pipefilter(s, cmd):
123 '''filter string S through command CMD, returning its output'''
125 '''filter string S through command CMD, returning its output'''
124 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
126 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
125 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
127 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
126 pout, perr = p.communicate(s)
128 pout, perr = p.communicate(s)
127 return pout
129 return pout
128
130
129 def tempfilter(s, cmd):
131 def tempfilter(s, cmd):
130 '''filter string S through a pair of temporary files with CMD.
132 '''filter string S through a pair of temporary files with CMD.
131 CMD is used as a template to create the real command to be run,
133 CMD is used as a template to create the real command to be run,
132 with the strings INFILE and OUTFILE replaced by the real names of
134 with the strings INFILE and OUTFILE replaced by the real names of
133 the temporary files generated.'''
135 the temporary files generated.'''
134 inname, outname = None, None
136 inname, outname = None, None
135 try:
137 try:
136 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
138 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
137 fp = os.fdopen(infd, 'wb')
139 fp = os.fdopen(infd, 'wb')
138 fp.write(s)
140 fp.write(s)
139 fp.close()
141 fp.close()
140 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
142 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
141 os.close(outfd)
143 os.close(outfd)
142 cmd = cmd.replace('INFILE', inname)
144 cmd = cmd.replace('INFILE', inname)
143 cmd = cmd.replace('OUTFILE', outname)
145 cmd = cmd.replace('OUTFILE', outname)
144 code = os.system(cmd)
146 code = os.system(cmd)
145 if sys.platform == 'OpenVMS' and code & 1:
147 if sys.platform == 'OpenVMS' and code & 1:
146 code = 0
148 code = 0
147 if code: raise Abort(_("command '%s' failed: %s") %
149 if code: raise Abort(_("command '%s' failed: %s") %
148 (cmd, explain_exit(code)))
150 (cmd, explain_exit(code)))
149 return open(outname, 'rb').read()
151 return open(outname, 'rb').read()
150 finally:
152 finally:
151 try:
153 try:
152 if inname: os.unlink(inname)
154 if inname: os.unlink(inname)
153 except: pass
155 except: pass
154 try:
156 try:
155 if outname: os.unlink(outname)
157 if outname: os.unlink(outname)
156 except: pass
158 except: pass
157
159
158 filtertable = {
160 filtertable = {
159 'tempfile:': tempfilter,
161 'tempfile:': tempfilter,
160 'pipe:': pipefilter,
162 'pipe:': pipefilter,
161 }
163 }
162
164
163 def filter(s, cmd):
165 def filter(s, cmd):
164 "filter a string through a command that transforms its input to its output"
166 "filter a string through a command that transforms its input to its output"
165 for name, fn in filtertable.iteritems():
167 for name, fn in filtertable.iteritems():
166 if cmd.startswith(name):
168 if cmd.startswith(name):
167 return fn(s, cmd[len(name):].lstrip())
169 return fn(s, cmd[len(name):].lstrip())
168 return pipefilter(s, cmd)
170 return pipefilter(s, cmd)
169
171
170 def binary(s):
172 def binary(s):
171 """return true if a string is binary data"""
173 """return true if a string is binary data"""
172 return bool(s and '\0' in s)
174 return bool(s and '\0' in s)
173
175
174 def increasingchunks(source, min=1024, max=65536):
176 def increasingchunks(source, min=1024, max=65536):
175 '''return no less than min bytes per chunk while data remains,
177 '''return no less than min bytes per chunk while data remains,
176 doubling min after each chunk until it reaches max'''
178 doubling min after each chunk until it reaches max'''
177 def log2(x):
179 def log2(x):
178 if not x:
180 if not x:
179 return 0
181 return 0
180 i = 0
182 i = 0
181 while x:
183 while x:
182 x >>= 1
184 x >>= 1
183 i += 1
185 i += 1
184 return i - 1
186 return i - 1
185
187
186 buf = []
188 buf = []
187 blen = 0
189 blen = 0
188 for chunk in source:
190 for chunk in source:
189 buf.append(chunk)
191 buf.append(chunk)
190 blen += len(chunk)
192 blen += len(chunk)
191 if blen >= min:
193 if blen >= min:
192 if min < max:
194 if min < max:
193 min = min << 1
195 min = min << 1
194 nmin = 1 << log2(blen)
196 nmin = 1 << log2(blen)
195 if nmin > min:
197 if nmin > min:
196 min = nmin
198 min = nmin
197 if min > max:
199 if min > max:
198 min = max
200 min = max
199 yield ''.join(buf)
201 yield ''.join(buf)
200 blen = 0
202 blen = 0
201 buf = []
203 buf = []
202 if buf:
204 if buf:
203 yield ''.join(buf)
205 yield ''.join(buf)
204
206
205 Abort = error.Abort
207 Abort = error.Abort
206
208
207 def always(fn): return True
209 def always(fn): return True
208 def never(fn): return False
210 def never(fn): return False
209
211
210 def patkind(name, default):
212 def patkind(name, default):
211 """Split a string into an optional pattern kind prefix and the
213 """Split a string into an optional pattern kind prefix and the
212 actual pattern."""
214 actual pattern."""
213 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
215 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
214 if name.startswith(prefix + ':'): return name.split(':', 1)
216 if name.startswith(prefix + ':'): return name.split(':', 1)
215 return default, name
217 return default, name
216
218
217 def globre(pat, head='^', tail='$'):
219 def globre(pat, head='^', tail='$'):
218 "convert a glob pattern into a regexp"
220 "convert a glob pattern into a regexp"
219 i, n = 0, len(pat)
221 i, n = 0, len(pat)
220 res = ''
222 res = ''
221 group = 0
223 group = 0
222 def peek(): return i < n and pat[i]
224 def peek(): return i < n and pat[i]
223 while i < n:
225 while i < n:
224 c = pat[i]
226 c = pat[i]
225 i = i+1
227 i = i+1
226 if c == '*':
228 if c == '*':
227 if peek() == '*':
229 if peek() == '*':
228 i += 1
230 i += 1
229 res += '.*'
231 res += '.*'
230 else:
232 else:
231 res += '[^/]*'
233 res += '[^/]*'
232 elif c == '?':
234 elif c == '?':
233 res += '.'
235 res += '.'
234 elif c == '[':
236 elif c == '[':
235 j = i
237 j = i
236 if j < n and pat[j] in '!]':
238 if j < n and pat[j] in '!]':
237 j += 1
239 j += 1
238 while j < n and pat[j] != ']':
240 while j < n and pat[j] != ']':
239 j += 1
241 j += 1
240 if j >= n:
242 if j >= n:
241 res += '\\['
243 res += '\\['
242 else:
244 else:
243 stuff = pat[i:j].replace('\\','\\\\')
245 stuff = pat[i:j].replace('\\','\\\\')
244 i = j + 1
246 i = j + 1
245 if stuff[0] == '!':
247 if stuff[0] == '!':
246 stuff = '^' + stuff[1:]
248 stuff = '^' + stuff[1:]
247 elif stuff[0] == '^':
249 elif stuff[0] == '^':
248 stuff = '\\' + stuff
250 stuff = '\\' + stuff
249 res = '%s[%s]' % (res, stuff)
251 res = '%s[%s]' % (res, stuff)
250 elif c == '{':
252 elif c == '{':
251 group += 1
253 group += 1
252 res += '(?:'
254 res += '(?:'
253 elif c == '}' and group:
255 elif c == '}' and group:
254 res += ')'
256 res += ')'
255 group -= 1
257 group -= 1
256 elif c == ',' and group:
258 elif c == ',' and group:
257 res += '|'
259 res += '|'
258 elif c == '\\':
260 elif c == '\\':
259 p = peek()
261 p = peek()
260 if p:
262 if p:
261 i += 1
263 i += 1
262 res += re.escape(p)
264 res += re.escape(p)
263 else:
265 else:
264 res += re.escape(c)
266 res += re.escape(c)
265 else:
267 else:
266 res += re.escape(c)
268 res += re.escape(c)
267 return head + res + tail
269 return head + res + tail
268
270
269 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
271 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
270
272
271 def pathto(root, n1, n2):
273 def pathto(root, n1, n2):
272 '''return the relative path from one place to another.
274 '''return the relative path from one place to another.
273 root should use os.sep to separate directories
275 root should use os.sep to separate directories
274 n1 should use os.sep to separate directories
276 n1 should use os.sep to separate directories
275 n2 should use "/" to separate directories
277 n2 should use "/" to separate directories
276 returns an os.sep-separated path.
278 returns an os.sep-separated path.
277
279
278 If n1 is a relative path, it's assumed it's
280 If n1 is a relative path, it's assumed it's
279 relative to root.
281 relative to root.
280 n2 should always be relative to root.
282 n2 should always be relative to root.
281 '''
283 '''
282 if not n1: return localpath(n2)
284 if not n1: return localpath(n2)
283 if os.path.isabs(n1):
285 if os.path.isabs(n1):
284 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
285 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
286 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
287 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
288 a.reverse()
290 a.reverse()
289 b.reverse()
291 b.reverse()
290 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
291 a.pop()
293 a.pop()
292 b.pop()
294 b.pop()
293 b.reverse()
295 b.reverse()
294 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
295
297
296 def canonpath(root, cwd, myname):
298 def canonpath(root, cwd, myname):
297 """return the canonical path of myname, given cwd and root"""
299 """return the canonical path of myname, given cwd and root"""
298 if root == os.sep:
300 if root == os.sep:
299 rootsep = os.sep
301 rootsep = os.sep
300 elif endswithsep(root):
302 elif endswithsep(root):
301 rootsep = root
303 rootsep = root
302 else:
304 else:
303 rootsep = root + os.sep
305 rootsep = root + os.sep
304 name = myname
306 name = myname
305 if not os.path.isabs(name):
307 if not os.path.isabs(name):
306 name = os.path.join(root, cwd, name)
308 name = os.path.join(root, cwd, name)
307 name = os.path.normpath(name)
309 name = os.path.normpath(name)
308 audit_path = path_auditor(root)
310 audit_path = path_auditor(root)
309 if name != rootsep and name.startswith(rootsep):
311 if name != rootsep and name.startswith(rootsep):
310 name = name[len(rootsep):]
312 name = name[len(rootsep):]
311 audit_path(name)
313 audit_path(name)
312 return pconvert(name)
314 return pconvert(name)
313 elif name == root:
315 elif name == root:
314 return ''
316 return ''
315 else:
317 else:
316 # Determine whether `name' is in the hierarchy at or beneath `root',
318 # Determine whether `name' is in the hierarchy at or beneath `root',
317 # by iterating name=dirname(name) until that causes no change (can't
319 # by iterating name=dirname(name) until that causes no change (can't
318 # check name == '/', because that doesn't work on windows). For each
320 # check name == '/', because that doesn't work on windows). For each
319 # `name', compare dev/inode numbers. If they match, the list `rel'
321 # `name', compare dev/inode numbers. If they match, the list `rel'
320 # holds the reversed list of components making up the relative file
322 # holds the reversed list of components making up the relative file
321 # name we want.
323 # name we want.
322 root_st = os.stat(root)
324 root_st = os.stat(root)
323 rel = []
325 rel = []
324 while True:
326 while True:
325 try:
327 try:
326 name_st = os.stat(name)
328 name_st = os.stat(name)
327 except OSError:
329 except OSError:
328 break
330 break
329 if samestat(name_st, root_st):
331 if samestat(name_st, root_st):
330 if not rel:
332 if not rel:
331 # name was actually the same as root (maybe a symlink)
333 # name was actually the same as root (maybe a symlink)
332 return ''
334 return ''
333 rel.reverse()
335 rel.reverse()
334 name = os.path.join(*rel)
336 name = os.path.join(*rel)
335 audit_path(name)
337 audit_path(name)
336 return pconvert(name)
338 return pconvert(name)
337 dirname, basename = os.path.split(name)
339 dirname, basename = os.path.split(name)
338 rel.append(basename)
340 rel.append(basename)
339 if dirname == name:
341 if dirname == name:
340 break
342 break
341 name = dirname
343 name = dirname
342
344
343 raise Abort('%s not under root' % myname)
345 raise Abort('%s not under root' % myname)
344
346
345 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
347 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
346 """build a function to match a set of file patterns
348 """build a function to match a set of file patterns
347
349
348 arguments:
350 arguments:
349 canonroot - the canonical root of the tree you're matching against
351 canonroot - the canonical root of the tree you're matching against
350 cwd - the current working directory, if relevant
352 cwd - the current working directory, if relevant
351 names - patterns to find
353 names - patterns to find
352 inc - patterns to include
354 inc - patterns to include
353 exc - patterns to exclude
355 exc - patterns to exclude
354 dflt_pat - if a pattern in names has no explicit type, assume this one
356 dflt_pat - if a pattern in names has no explicit type, assume this one
355 src - where these patterns came from (e.g. .hgignore)
357 src - where these patterns came from (e.g. .hgignore)
356
358
357 a pattern is one of:
359 a pattern is one of:
358 'glob:<glob>' - a glob relative to cwd
360 'glob:<glob>' - a glob relative to cwd
359 're:<regexp>' - a regular expression
361 're:<regexp>' - a regular expression
360 'path:<path>' - a path relative to canonroot
362 'path:<path>' - a path relative to canonroot
361 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
363 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
362 'relpath:<path>' - a path relative to cwd
364 'relpath:<path>' - a path relative to cwd
363 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
365 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
364 '<something>' - one of the cases above, selected by the dflt_pat argument
366 '<something>' - one of the cases above, selected by the dflt_pat argument
365
367
366 returns:
368 returns:
367 a 3-tuple containing
369 a 3-tuple containing
368 - list of roots (places where one should start a recursive walk of the fs);
370 - list of roots (places where one should start a recursive walk of the fs);
369 this often matches the explicit non-pattern names passed in, but also
371 this often matches the explicit non-pattern names passed in, but also
370 includes the initial part of glob: patterns that has no glob characters
372 includes the initial part of glob: patterns that has no glob characters
371 - a bool match(filename) function
373 - a bool match(filename) function
372 - a bool indicating if any patterns were passed in
374 - a bool indicating if any patterns were passed in
373 """
375 """
374
376
375 # a common case: no patterns at all
377 # a common case: no patterns at all
376 if not names and not inc and not exc:
378 if not names and not inc and not exc:
377 return [], always, False
379 return [], always, False
378
380
379 def contains_glob(name):
381 def contains_glob(name):
380 for c in name:
382 for c in name:
381 if c in _globchars: return True
383 if c in _globchars: return True
382 return False
384 return False
383
385
384 def regex(kind, name, tail):
386 def regex(kind, name, tail):
385 '''convert a pattern into a regular expression'''
387 '''convert a pattern into a regular expression'''
386 if not name:
388 if not name:
387 return ''
389 return ''
388 if kind == 're':
390 if kind == 're':
389 return name
391 return name
390 elif kind == 'path':
392 elif kind == 'path':
391 return '^' + re.escape(name) + '(?:/|$)'
393 return '^' + re.escape(name) + '(?:/|$)'
392 elif kind == 'relglob':
394 elif kind == 'relglob':
393 return globre(name, '(?:|.*/)', tail)
395 return globre(name, '(?:|.*/)', tail)
394 elif kind == 'relpath':
396 elif kind == 'relpath':
395 return re.escape(name) + '(?:/|$)'
397 return re.escape(name) + '(?:/|$)'
396 elif kind == 'relre':
398 elif kind == 'relre':
397 if name.startswith('^'):
399 if name.startswith('^'):
398 return name
400 return name
399 return '.*' + name
401 return '.*' + name
400 return globre(name, '', tail)
402 return globre(name, '', tail)
401
403
402 def matchfn(pats, tail):
404 def matchfn(pats, tail):
403 """build a matching function from a set of patterns"""
405 """build a matching function from a set of patterns"""
404 if not pats:
406 if not pats:
405 return
407 return
406 try:
408 try:
407 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
409 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
408 if len(pat) > 20000:
410 if len(pat) > 20000:
409 raise OverflowError()
411 raise OverflowError()
410 return re.compile(pat).match
412 return re.compile(pat).match
411 except OverflowError:
413 except OverflowError:
412 # We're using a Python with a tiny regex engine and we
414 # We're using a Python with a tiny regex engine and we
413 # made it explode, so we'll divide the pattern list in two
415 # made it explode, so we'll divide the pattern list in two
414 # until it works
416 # until it works
415 l = len(pats)
417 l = len(pats)
416 if l < 2:
418 if l < 2:
417 raise
419 raise
418 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
420 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
419 return lambda s: a(s) or b(s)
421 return lambda s: a(s) or b(s)
420 except re.error:
422 except re.error:
421 for k, p in pats:
423 for k, p in pats:
422 try:
424 try:
423 re.compile('(?:%s)' % regex(k, p, tail))
425 re.compile('(?:%s)' % regex(k, p, tail))
424 except re.error:
426 except re.error:
425 if src:
427 if src:
426 raise Abort("%s: invalid pattern (%s): %s" %
428 raise Abort("%s: invalid pattern (%s): %s" %
427 (src, k, p))
429 (src, k, p))
428 else:
430 else:
429 raise Abort("invalid pattern (%s): %s" % (k, p))
431 raise Abort("invalid pattern (%s): %s" % (k, p))
430 raise Abort("invalid pattern")
432 raise Abort("invalid pattern")
431
433
432 def globprefix(pat):
434 def globprefix(pat):
433 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
435 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
434 root = []
436 root = []
435 for p in pat.split('/'):
437 for p in pat.split('/'):
436 if contains_glob(p): break
438 if contains_glob(p): break
437 root.append(p)
439 root.append(p)
438 return '/'.join(root) or '.'
440 return '/'.join(root) or '.'
439
441
440 def normalizepats(names, default):
442 def normalizepats(names, default):
441 pats = []
443 pats = []
442 roots = []
444 roots = []
443 anypats = False
445 anypats = False
444 for kind, name in [patkind(p, default) for p in names]:
446 for kind, name in [patkind(p, default) for p in names]:
445 if kind in ('glob', 'relpath'):
447 if kind in ('glob', 'relpath'):
446 name = canonpath(canonroot, cwd, name)
448 name = canonpath(canonroot, cwd, name)
447 elif kind in ('relglob', 'path'):
449 elif kind in ('relglob', 'path'):
448 name = normpath(name)
450 name = normpath(name)
449
451
450 pats.append((kind, name))
452 pats.append((kind, name))
451
453
452 if kind in ('glob', 're', 'relglob', 'relre'):
454 if kind in ('glob', 're', 'relglob', 'relre'):
453 anypats = True
455 anypats = True
454
456
455 if kind == 'glob':
457 if kind == 'glob':
456 root = globprefix(name)
458 root = globprefix(name)
457 roots.append(root)
459 roots.append(root)
458 elif kind in ('relpath', 'path'):
460 elif kind in ('relpath', 'path'):
459 roots.append(name or '.')
461 roots.append(name or '.')
460 elif kind == 'relglob':
462 elif kind == 'relglob':
461 roots.append('.')
463 roots.append('.')
462 return roots, pats, anypats
464 return roots, pats, anypats
463
465
464 roots, pats, anypats = normalizepats(names, dflt_pat)
466 roots, pats, anypats = normalizepats(names, dflt_pat)
465
467
466 patmatch = matchfn(pats, '$') or always
468 patmatch = matchfn(pats, '$') or always
467 incmatch = always
469 incmatch = always
468 if inc:
470 if inc:
469 dummy, inckinds, dummy = normalizepats(inc, 'glob')
471 dummy, inckinds, dummy = normalizepats(inc, 'glob')
470 incmatch = matchfn(inckinds, '(?:/|$)')
472 incmatch = matchfn(inckinds, '(?:/|$)')
471 excmatch = never
473 excmatch = never
472 if exc:
474 if exc:
473 dummy, exckinds, dummy = normalizepats(exc, 'glob')
475 dummy, exckinds, dummy = normalizepats(exc, 'glob')
474 excmatch = matchfn(exckinds, '(?:/|$)')
476 excmatch = matchfn(exckinds, '(?:/|$)')
475
477
476 if not names and inc and not exc:
478 if not names and inc and not exc:
477 # common case: hgignore patterns
479 # common case: hgignore patterns
478 match = incmatch
480 match = incmatch
479 else:
481 else:
480 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
482 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
481
483
482 return (roots, match, (inc or exc or anypats) and True)
484 return (roots, match, (inc or exc or anypats) and True)
483
485
484 _hgexecutable = None
486 _hgexecutable = None
485
487
486 def main_is_frozen():
488 def main_is_frozen():
487 """return True if we are a frozen executable.
489 """return True if we are a frozen executable.
488
490
489 The code supports py2exe (most common, Windows only) and tools/freeze
491 The code supports py2exe (most common, Windows only) and tools/freeze
490 (portable, not much used).
492 (portable, not much used).
491 """
493 """
492 return (hasattr(sys, "frozen") or # new py2exe
494 return (hasattr(sys, "frozen") or # new py2exe
493 hasattr(sys, "importers") or # old py2exe
495 hasattr(sys, "importers") or # old py2exe
494 imp.is_frozen("__main__")) # tools/freeze
496 imp.is_frozen("__main__")) # tools/freeze
495
497
496 def hgexecutable():
498 def hgexecutable():
497 """return location of the 'hg' executable.
499 """return location of the 'hg' executable.
498
500
499 Defaults to $HG or 'hg' in the search path.
501 Defaults to $HG or 'hg' in the search path.
500 """
502 """
501 if _hgexecutable is None:
503 if _hgexecutable is None:
502 hg = os.environ.get('HG')
504 hg = os.environ.get('HG')
503 if hg:
505 if hg:
504 set_hgexecutable(hg)
506 set_hgexecutable(hg)
505 elif main_is_frozen():
507 elif main_is_frozen():
506 set_hgexecutable(sys.executable)
508 set_hgexecutable(sys.executable)
507 else:
509 else:
508 set_hgexecutable(find_exe('hg') or 'hg')
510 set_hgexecutable(find_exe('hg') or 'hg')
509 return _hgexecutable
511 return _hgexecutable
510
512
511 def set_hgexecutable(path):
513 def set_hgexecutable(path):
512 """set location of the 'hg' executable"""
514 """set location of the 'hg' executable"""
513 global _hgexecutable
515 global _hgexecutable
514 _hgexecutable = path
516 _hgexecutable = path
515
517
516 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
518 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
517 '''enhanced shell command execution.
519 '''enhanced shell command execution.
518 run with environment maybe modified, maybe in different dir.
520 run with environment maybe modified, maybe in different dir.
519
521
520 if command fails and onerr is None, return status. if ui object,
522 if command fails and onerr is None, return status. if ui object,
521 print error message and return status, else raise onerr object as
523 print error message and return status, else raise onerr object as
522 exception.'''
524 exception.'''
523 def py2shell(val):
525 def py2shell(val):
524 'convert python object into string that is useful to shell'
526 'convert python object into string that is useful to shell'
525 if val in (None, False):
527 if val in (None, False):
526 return '0'
528 return '0'
527 if val == True:
529 if val == True:
528 return '1'
530 return '1'
529 return str(val)
531 return str(val)
530 oldenv = {}
532 oldenv = {}
531 for k in environ:
533 for k in environ:
532 oldenv[k] = os.environ.get(k)
534 oldenv[k] = os.environ.get(k)
533 if cwd is not None:
535 if cwd is not None:
534 oldcwd = os.getcwd()
536 oldcwd = os.getcwd()
535 origcmd = cmd
537 origcmd = cmd
536 if os.name == 'nt':
538 if os.name == 'nt':
537 cmd = '"%s"' % cmd
539 cmd = '"%s"' % cmd
538 try:
540 try:
539 for k, v in environ.iteritems():
541 for k, v in environ.iteritems():
540 os.environ[k] = py2shell(v)
542 os.environ[k] = py2shell(v)
541 os.environ['HG'] = hgexecutable()
543 os.environ['HG'] = hgexecutable()
542 if cwd is not None and oldcwd != cwd:
544 if cwd is not None and oldcwd != cwd:
543 os.chdir(cwd)
545 os.chdir(cwd)
544 rc = os.system(cmd)
546 rc = os.system(cmd)
545 if sys.platform == 'OpenVMS' and rc & 1:
547 if sys.platform == 'OpenVMS' and rc & 1:
546 rc = 0
548 rc = 0
547 if rc and onerr:
549 if rc and onerr:
548 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
550 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
549 explain_exit(rc)[0])
551 explain_exit(rc)[0])
550 if errprefix:
552 if errprefix:
551 errmsg = '%s: %s' % (errprefix, errmsg)
553 errmsg = '%s: %s' % (errprefix, errmsg)
552 try:
554 try:
553 onerr.warn(errmsg + '\n')
555 onerr.warn(errmsg + '\n')
554 except AttributeError:
556 except AttributeError:
555 raise onerr(errmsg)
557 raise onerr(errmsg)
556 return rc
558 return rc
557 finally:
559 finally:
558 for k, v in oldenv.iteritems():
560 for k, v in oldenv.iteritems():
559 if v is None:
561 if v is None:
560 del os.environ[k]
562 del os.environ[k]
561 else:
563 else:
562 os.environ[k] = v
564 os.environ[k] = v
563 if cwd is not None and oldcwd != cwd:
565 if cwd is not None and oldcwd != cwd:
564 os.chdir(oldcwd)
566 os.chdir(oldcwd)
565
567
566 def checksignature(func):
568 def checksignature(func):
567 '''wrap a function with code to check for calling errors'''
569 '''wrap a function with code to check for calling errors'''
568 def check(*args, **kwargs):
570 def check(*args, **kwargs):
569 try:
571 try:
570 return func(*args, **kwargs)
572 return func(*args, **kwargs)
571 except TypeError:
573 except TypeError:
572 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
574 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
573 raise error.SignatureError
575 raise error.SignatureError
574 raise
576 raise
575
577
576 return check
578 return check
577
579
578 # os.path.lexists is not available on python2.3
580 # os.path.lexists is not available on python2.3
579 def lexists(filename):
581 def lexists(filename):
580 "test whether a file with this name exists. does not follow symlinks"
582 "test whether a file with this name exists. does not follow symlinks"
581 try:
583 try:
582 os.lstat(filename)
584 os.lstat(filename)
583 except:
585 except:
584 return False
586 return False
585 return True
587 return True
586
588
587 def rename(src, dst):
589 def rename(src, dst):
588 """forcibly rename a file"""
590 """forcibly rename a file"""
589 try:
591 try:
590 os.rename(src, dst)
592 os.rename(src, dst)
591 except OSError, err: # FIXME: check err (EEXIST ?)
593 except OSError, err: # FIXME: check err (EEXIST ?)
592
594
593 # On windows, rename to existing file is not allowed, so we
595 # On windows, rename to existing file is not allowed, so we
594 # must delete destination first. But if a file is open, unlink
596 # must delete destination first. But if a file is open, unlink
595 # schedules it for delete but does not delete it. Rename
597 # schedules it for delete but does not delete it. Rename
596 # happens immediately even for open files, so we rename
598 # happens immediately even for open files, so we rename
597 # destination to a temporary name, then delete that. Then
599 # destination to a temporary name, then delete that. Then
598 # rename is safe to do.
600 # rename is safe to do.
599 # The temporary name is chosen at random to avoid the situation
601 # The temporary name is chosen at random to avoid the situation
600 # where a file is left lying around from a previous aborted run.
602 # where a file is left lying around from a previous aborted run.
601 # The usual race condition this introduces can't be avoided as
603 # The usual race condition this introduces can't be avoided as
602 # we need the name to rename into, and not the file itself. Due
604 # we need the name to rename into, and not the file itself. Due
603 # to the nature of the operation however, any races will at worst
605 # to the nature of the operation however, any races will at worst
604 # lead to the rename failing and the current operation aborting.
606 # lead to the rename failing and the current operation aborting.
605
607
606 def tempname(prefix):
608 def tempname(prefix):
607 for tries in xrange(10):
609 for tries in xrange(10):
608 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
610 temp = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
609 if not os.path.exists(temp):
611 if not os.path.exists(temp):
610 return temp
612 return temp
611 raise IOError, (errno.EEXIST, "No usable temporary filename found")
613 raise IOError, (errno.EEXIST, "No usable temporary filename found")
612
614
613 temp = tempname(dst)
615 temp = tempname(dst)
614 os.rename(dst, temp)
616 os.rename(dst, temp)
615 os.unlink(temp)
617 os.unlink(temp)
616 os.rename(src, dst)
618 os.rename(src, dst)
617
619
618 def unlink(f):
620 def unlink(f):
619 """unlink and remove the directory if it is empty"""
621 """unlink and remove the directory if it is empty"""
620 os.unlink(f)
622 os.unlink(f)
621 # try removing directories that might now be empty
623 # try removing directories that might now be empty
622 try:
624 try:
623 os.removedirs(os.path.dirname(f))
625 os.removedirs(os.path.dirname(f))
624 except OSError:
626 except OSError:
625 pass
627 pass
626
628
627 def copyfile(src, dest):
629 def copyfile(src, dest):
628 "copy a file, preserving mode and atime/mtime"
630 "copy a file, preserving mode and atime/mtime"
629 if os.path.islink(src):
631 if os.path.islink(src):
630 try:
632 try:
631 os.unlink(dest)
633 os.unlink(dest)
632 except:
634 except:
633 pass
635 pass
634 os.symlink(os.readlink(src), dest)
636 os.symlink(os.readlink(src), dest)
635 else:
637 else:
636 try:
638 try:
637 shutil.copyfile(src, dest)
639 shutil.copyfile(src, dest)
638 shutil.copystat(src, dest)
640 shutil.copystat(src, dest)
639 except shutil.Error, inst:
641 except shutil.Error, inst:
640 raise Abort(str(inst))
642 raise Abort(str(inst))
641
643
642 def copyfiles(src, dst, hardlink=None):
644 def copyfiles(src, dst, hardlink=None):
643 """Copy a directory tree using hardlinks if possible"""
645 """Copy a directory tree using hardlinks if possible"""
644
646
645 if hardlink is None:
647 if hardlink is None:
646 hardlink = (os.stat(src).st_dev ==
648 hardlink = (os.stat(src).st_dev ==
647 os.stat(os.path.dirname(dst)).st_dev)
649 os.stat(os.path.dirname(dst)).st_dev)
648
650
649 if os.path.isdir(src):
651 if os.path.isdir(src):
650 os.mkdir(dst)
652 os.mkdir(dst)
651 for name, kind in osutil.listdir(src):
653 for name, kind in osutil.listdir(src):
652 srcname = os.path.join(src, name)
654 srcname = os.path.join(src, name)
653 dstname = os.path.join(dst, name)
655 dstname = os.path.join(dst, name)
654 copyfiles(srcname, dstname, hardlink)
656 copyfiles(srcname, dstname, hardlink)
655 else:
657 else:
656 if hardlink:
658 if hardlink:
657 try:
659 try:
658 os_link(src, dst)
660 os_link(src, dst)
659 except (IOError, OSError):
661 except (IOError, OSError):
660 hardlink = False
662 hardlink = False
661 shutil.copy(src, dst)
663 shutil.copy(src, dst)
662 else:
664 else:
663 shutil.copy(src, dst)
665 shutil.copy(src, dst)
664
666
665 class path_auditor(object):
667 class path_auditor(object):
666 '''ensure that a filesystem path contains no banned components.
668 '''ensure that a filesystem path contains no banned components.
667 the following properties of a path are checked:
669 the following properties of a path are checked:
668
670
669 - under top-level .hg
671 - under top-level .hg
670 - starts at the root of a windows drive
672 - starts at the root of a windows drive
671 - contains ".."
673 - contains ".."
672 - traverses a symlink (e.g. a/symlink_here/b)
674 - traverses a symlink (e.g. a/symlink_here/b)
673 - inside a nested repository'''
675 - inside a nested repository'''
674
676
675 def __init__(self, root):
677 def __init__(self, root):
676 self.audited = set()
678 self.audited = set()
677 self.auditeddir = set()
679 self.auditeddir = set()
678 self.root = root
680 self.root = root
679
681
680 def __call__(self, path):
682 def __call__(self, path):
681 if path in self.audited:
683 if path in self.audited:
682 return
684 return
683 normpath = os.path.normcase(path)
685 normpath = os.path.normcase(path)
684 parts = splitpath(normpath)
686 parts = splitpath(normpath)
685 if (os.path.splitdrive(path)[0]
687 if (os.path.splitdrive(path)[0]
686 or parts[0].lower() in ('.hg', '.hg.', '')
688 or parts[0].lower() in ('.hg', '.hg.', '')
687 or os.pardir in parts):
689 or os.pardir in parts):
688 raise Abort(_("path contains illegal component: %s") % path)
690 raise Abort(_("path contains illegal component: %s") % path)
689 if '.hg' in path.lower():
691 if '.hg' in path.lower():
690 lparts = [p.lower() for p in parts]
692 lparts = [p.lower() for p in parts]
691 for p in '.hg', '.hg.':
693 for p in '.hg', '.hg.':
692 if p in lparts[1:]:
694 if p in lparts[1:]:
693 pos = lparts.index(p)
695 pos = lparts.index(p)
694 base = os.path.join(*parts[:pos])
696 base = os.path.join(*parts[:pos])
695 raise Abort(_('path %r is inside repo %r') % (path, base))
697 raise Abort(_('path %r is inside repo %r') % (path, base))
696 def check(prefix):
698 def check(prefix):
697 curpath = os.path.join(self.root, prefix)
699 curpath = os.path.join(self.root, prefix)
698 try:
700 try:
699 st = os.lstat(curpath)
701 st = os.lstat(curpath)
700 except OSError, err:
702 except OSError, err:
701 # EINVAL can be raised as invalid path syntax under win32.
703 # EINVAL can be raised as invalid path syntax under win32.
702 # They must be ignored for patterns can be checked too.
704 # They must be ignored for patterns can be checked too.
703 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
705 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
704 raise
706 raise
705 else:
707 else:
706 if stat.S_ISLNK(st.st_mode):
708 if stat.S_ISLNK(st.st_mode):
707 raise Abort(_('path %r traverses symbolic link %r') %
709 raise Abort(_('path %r traverses symbolic link %r') %
708 (path, prefix))
710 (path, prefix))
709 elif (stat.S_ISDIR(st.st_mode) and
711 elif (stat.S_ISDIR(st.st_mode) and
710 os.path.isdir(os.path.join(curpath, '.hg'))):
712 os.path.isdir(os.path.join(curpath, '.hg'))):
711 raise Abort(_('path %r is inside repo %r') %
713 raise Abort(_('path %r is inside repo %r') %
712 (path, prefix))
714 (path, prefix))
713 parts.pop()
715 parts.pop()
714 prefixes = []
716 prefixes = []
715 for n in range(len(parts)):
717 for n in range(len(parts)):
716 prefix = os.sep.join(parts)
718 prefix = os.sep.join(parts)
717 if prefix in self.auditeddir:
719 if prefix in self.auditeddir:
718 break
720 break
719 check(prefix)
721 check(prefix)
720 prefixes.append(prefix)
722 prefixes.append(prefix)
721 parts.pop()
723 parts.pop()
722
724
723 self.audited.add(path)
725 self.audited.add(path)
724 # only add prefixes to the cache after checking everything: we don't
726 # only add prefixes to the cache after checking everything: we don't
725 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
727 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
726 self.auditeddir.update(prefixes)
728 self.auditeddir.update(prefixes)
727
729
728 def nlinks(pathname):
730 def nlinks(pathname):
729 """Return number of hardlinks for the given file."""
731 """Return number of hardlinks for the given file."""
730 return os.lstat(pathname).st_nlink
732 return os.lstat(pathname).st_nlink
731
733
732 if hasattr(os, 'link'):
734 if hasattr(os, 'link'):
733 os_link = os.link
735 os_link = os.link
734 else:
736 else:
735 def os_link(src, dst):
737 def os_link(src, dst):
736 raise OSError(0, _("Hardlinks not supported"))
738 raise OSError(0, _("Hardlinks not supported"))
737
739
738 def lookup_reg(key, name=None, scope=None):
740 def lookup_reg(key, name=None, scope=None):
739 return None
741 return None
740
742
741 if os.name == 'nt':
743 if os.name == 'nt':
742 from windows import *
744 from windows import *
743 def expand_glob(pats):
745 def expand_glob(pats):
744 '''On Windows, expand the implicit globs in a list of patterns'''
746 '''On Windows, expand the implicit globs in a list of patterns'''
745 ret = []
747 ret = []
746 for p in pats:
748 for p in pats:
747 kind, name = patkind(p, None)
749 kind, name = patkind(p, None)
748 if kind is None:
750 if kind is None:
749 globbed = glob.glob(name)
751 globbed = glob.glob(name)
750 if globbed:
752 if globbed:
751 ret.extend(globbed)
753 ret.extend(globbed)
752 continue
754 continue
753 # if we couldn't expand the glob, just keep it around
755 # if we couldn't expand the glob, just keep it around
754 ret.append(p)
756 ret.append(p)
755 return ret
757 return ret
756 else:
758 else:
757 from posix import *
759 from posix import *
758
760
759 def makelock(info, pathname):
761 def makelock(info, pathname):
760 try:
762 try:
761 return os.symlink(info, pathname)
763 return os.symlink(info, pathname)
762 except OSError, why:
764 except OSError, why:
763 if why.errno == errno.EEXIST:
765 if why.errno == errno.EEXIST:
764 raise
766 raise
765 except AttributeError: # no symlink in os
767 except AttributeError: # no symlink in os
766 pass
768 pass
767
769
768 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
770 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
769 os.write(ld, info)
771 os.write(ld, info)
770 os.close(ld)
772 os.close(ld)
771
773
772 def readlock(pathname):
774 def readlock(pathname):
773 try:
775 try:
774 return os.readlink(pathname)
776 return os.readlink(pathname)
775 except OSError, why:
777 except OSError, why:
776 if why.errno not in (errno.EINVAL, errno.ENOSYS):
778 if why.errno not in (errno.EINVAL, errno.ENOSYS):
777 raise
779 raise
778 except AttributeError: # no symlink in os
780 except AttributeError: # no symlink in os
779 pass
781 pass
780 return posixfile(pathname).read()
782 return posixfile(pathname).read()
781
783
782 def fstat(fp):
784 def fstat(fp):
783 '''stat file object that may not have fileno method.'''
785 '''stat file object that may not have fileno method.'''
784 try:
786 try:
785 return os.fstat(fp.fileno())
787 return os.fstat(fp.fileno())
786 except AttributeError:
788 except AttributeError:
787 return os.stat(fp.name)
789 return os.stat(fp.name)
788
790
789 # File system features
791 # File system features
790
792
791 def checkcase(path):
793 def checkcase(path):
792 """
794 """
793 Check whether the given path is on a case-sensitive filesystem
795 Check whether the given path is on a case-sensitive filesystem
794
796
795 Requires a path (like /foo/.hg) ending with a foldable final
797 Requires a path (like /foo/.hg) ending with a foldable final
796 directory component.
798 directory component.
797 """
799 """
798 s1 = os.stat(path)
800 s1 = os.stat(path)
799 d, b = os.path.split(path)
801 d, b = os.path.split(path)
800 p2 = os.path.join(d, b.upper())
802 p2 = os.path.join(d, b.upper())
801 if path == p2:
803 if path == p2:
802 p2 = os.path.join(d, b.lower())
804 p2 = os.path.join(d, b.lower())
803 try:
805 try:
804 s2 = os.stat(p2)
806 s2 = os.stat(p2)
805 if s2 == s1:
807 if s2 == s1:
806 return False
808 return False
807 return True
809 return True
808 except:
810 except:
809 return True
811 return True
810
812
811 _fspathcache = {}
813 _fspathcache = {}
812 def fspath(name, root):
814 def fspath(name, root):
813 '''Get name in the case stored in the filesystem
815 '''Get name in the case stored in the filesystem
814
816
815 The name is either relative to root, or it is an absolute path starting
817 The name is either relative to root, or it is an absolute path starting
816 with root. Note that this function is unnecessary, and should not be
818 with root. Note that this function is unnecessary, and should not be
817 called, for case-sensitive filesystems (simply because it's expensive).
819 called, for case-sensitive filesystems (simply because it's expensive).
818 '''
820 '''
819 # If name is absolute, make it relative
821 # If name is absolute, make it relative
820 if name.lower().startswith(root.lower()):
822 if name.lower().startswith(root.lower()):
821 l = len(root)
823 l = len(root)
822 if name[l] == os.sep or name[l] == os.altsep:
824 if name[l] == os.sep or name[l] == os.altsep:
823 l = l + 1
825 l = l + 1
824 name = name[l:]
826 name = name[l:]
825
827
826 if not os.path.exists(os.path.join(root, name)):
828 if not os.path.exists(os.path.join(root, name)):
827 return None
829 return None
828
830
829 seps = os.sep
831 seps = os.sep
830 if os.altsep:
832 if os.altsep:
831 seps = seps + os.altsep
833 seps = seps + os.altsep
832 # Protect backslashes. This gets silly very quickly.
834 # Protect backslashes. This gets silly very quickly.
833 seps.replace('\\','\\\\')
835 seps.replace('\\','\\\\')
834 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
836 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
835 dir = os.path.normcase(os.path.normpath(root))
837 dir = os.path.normcase(os.path.normpath(root))
836 result = []
838 result = []
837 for part, sep in pattern.findall(name):
839 for part, sep in pattern.findall(name):
838 if sep:
840 if sep:
839 result.append(sep)
841 result.append(sep)
840 continue
842 continue
841
843
842 if dir not in _fspathcache:
844 if dir not in _fspathcache:
843 _fspathcache[dir] = os.listdir(dir)
845 _fspathcache[dir] = os.listdir(dir)
844 contents = _fspathcache[dir]
846 contents = _fspathcache[dir]
845
847
846 lpart = part.lower()
848 lpart = part.lower()
847 for n in contents:
849 for n in contents:
848 if n.lower() == lpart:
850 if n.lower() == lpart:
849 result.append(n)
851 result.append(n)
850 break
852 break
851 else:
853 else:
852 # Cannot happen, as the file exists!
854 # Cannot happen, as the file exists!
853 result.append(part)
855 result.append(part)
854 dir = os.path.join(dir, lpart)
856 dir = os.path.join(dir, lpart)
855
857
856 return ''.join(result)
858 return ''.join(result)
857
859
858 def checkexec(path):
860 def checkexec(path):
859 """
861 """
860 Check whether the given path is on a filesystem with UNIX-like exec flags
862 Check whether the given path is on a filesystem with UNIX-like exec flags
861
863
862 Requires a directory (like /foo/.hg)
864 Requires a directory (like /foo/.hg)
863 """
865 """
864
866
865 # VFAT on some Linux versions can flip mode but it doesn't persist
867 # VFAT on some Linux versions can flip mode but it doesn't persist
866 # a FS remount. Frequently we can detect it if files are created
868 # a FS remount. Frequently we can detect it if files are created
867 # with exec bit on.
869 # with exec bit on.
868
870
869 try:
871 try:
870 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
872 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
871 fh, fn = tempfile.mkstemp("", "", path)
873 fh, fn = tempfile.mkstemp("", "", path)
872 try:
874 try:
873 os.close(fh)
875 os.close(fh)
874 m = os.stat(fn).st_mode & 0777
876 m = os.stat(fn).st_mode & 0777
875 new_file_has_exec = m & EXECFLAGS
877 new_file_has_exec = m & EXECFLAGS
876 os.chmod(fn, m ^ EXECFLAGS)
878 os.chmod(fn, m ^ EXECFLAGS)
877 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
879 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
878 finally:
880 finally:
879 os.unlink(fn)
881 os.unlink(fn)
880 except (IOError, OSError):
882 except (IOError, OSError):
881 # we don't care, the user probably won't be able to commit anyway
883 # we don't care, the user probably won't be able to commit anyway
882 return False
884 return False
883 return not (new_file_has_exec or exec_flags_cannot_flip)
885 return not (new_file_has_exec or exec_flags_cannot_flip)
884
886
885 def checklink(path):
887 def checklink(path):
886 """check whether the given path is on a symlink-capable filesystem"""
888 """check whether the given path is on a symlink-capable filesystem"""
887 # mktemp is not racy because symlink creation will fail if the
889 # mktemp is not racy because symlink creation will fail if the
888 # file already exists
890 # file already exists
889 name = tempfile.mktemp(dir=path)
891 name = tempfile.mktemp(dir=path)
890 try:
892 try:
891 os.symlink(".", name)
893 os.symlink(".", name)
892 os.unlink(name)
894 os.unlink(name)
893 return True
895 return True
894 except (OSError, AttributeError):
896 except (OSError, AttributeError):
895 return False
897 return False
896
898
897 def needbinarypatch():
899 def needbinarypatch():
898 """return True if patches should be applied in binary mode by default."""
900 """return True if patches should be applied in binary mode by default."""
899 return os.name == 'nt'
901 return os.name == 'nt'
900
902
901 def endswithsep(path):
903 def endswithsep(path):
902 '''Check path ends with os.sep or os.altsep.'''
904 '''Check path ends with os.sep or os.altsep.'''
903 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
905 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
904
906
905 def splitpath(path):
907 def splitpath(path):
906 '''Split path by os.sep.
908 '''Split path by os.sep.
907 Note that this function does not use os.altsep because this is
909 Note that this function does not use os.altsep because this is
908 an alternative of simple "xxx.split(os.sep)".
910 an alternative of simple "xxx.split(os.sep)".
909 It is recommended to use os.path.normpath() before using this
911 It is recommended to use os.path.normpath() before using this
910 function if need.'''
912 function if need.'''
911 return path.split(os.sep)
913 return path.split(os.sep)
912
914
913 def gui():
915 def gui():
914 '''Are we running in a GUI?'''
916 '''Are we running in a GUI?'''
915 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
917 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
916
918
917 def mktempcopy(name, emptyok=False, createmode=None):
919 def mktempcopy(name, emptyok=False, createmode=None):
918 """Create a temporary file with the same contents from name
920 """Create a temporary file with the same contents from name
919
921
920 The permission bits are copied from the original file.
922 The permission bits are copied from the original file.
921
923
922 If the temporary file is going to be truncated immediately, you
924 If the temporary file is going to be truncated immediately, you
923 can use emptyok=True as an optimization.
925 can use emptyok=True as an optimization.
924
926
925 Returns the name of the temporary file.
927 Returns the name of the temporary file.
926 """
928 """
927 d, fn = os.path.split(name)
929 d, fn = os.path.split(name)
928 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
930 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
929 os.close(fd)
931 os.close(fd)
930 # Temporary files are created with mode 0600, which is usually not
932 # Temporary files are created with mode 0600, which is usually not
931 # what we want. If the original file already exists, just copy
933 # what we want. If the original file already exists, just copy
932 # its mode. Otherwise, manually obey umask.
934 # its mode. Otherwise, manually obey umask.
933 try:
935 try:
934 st_mode = os.lstat(name).st_mode & 0777
936 st_mode = os.lstat(name).st_mode & 0777
935 except OSError, inst:
937 except OSError, inst:
936 if inst.errno != errno.ENOENT:
938 if inst.errno != errno.ENOENT:
937 raise
939 raise
938 st_mode = createmode
940 st_mode = createmode
939 if st_mode is None:
941 if st_mode is None:
940 st_mode = ~umask
942 st_mode = ~umask
941 st_mode &= 0666
943 st_mode &= 0666
942 os.chmod(temp, st_mode)
944 os.chmod(temp, st_mode)
943 if emptyok:
945 if emptyok:
944 return temp
946 return temp
945 try:
947 try:
946 try:
948 try:
947 ifp = posixfile(name, "rb")
949 ifp = posixfile(name, "rb")
948 except IOError, inst:
950 except IOError, inst:
949 if inst.errno == errno.ENOENT:
951 if inst.errno == errno.ENOENT:
950 return temp
952 return temp
951 if not getattr(inst, 'filename', None):
953 if not getattr(inst, 'filename', None):
952 inst.filename = name
954 inst.filename = name
953 raise
955 raise
954 ofp = posixfile(temp, "wb")
956 ofp = posixfile(temp, "wb")
955 for chunk in filechunkiter(ifp):
957 for chunk in filechunkiter(ifp):
956 ofp.write(chunk)
958 ofp.write(chunk)
957 ifp.close()
959 ifp.close()
958 ofp.close()
960 ofp.close()
959 except:
961 except:
960 try: os.unlink(temp)
962 try: os.unlink(temp)
961 except: pass
963 except: pass
962 raise
964 raise
963 return temp
965 return temp
964
966
965 class atomictempfile:
967 class atomictempfile:
966 """file-like object that atomically updates a file
968 """file-like object that atomically updates a file
967
969
968 All writes will be redirected to a temporary copy of the original
970 All writes will be redirected to a temporary copy of the original
969 file. When rename is called, the copy is renamed to the original
971 file. When rename is called, the copy is renamed to the original
970 name, making the changes visible.
972 name, making the changes visible.
971 """
973 """
972 def __init__(self, name, mode, createmode):
974 def __init__(self, name, mode, createmode):
973 self.__name = name
975 self.__name = name
974 self.temp = mktempcopy(name, emptyok=('w' in mode),
976 self.temp = mktempcopy(name, emptyok=('w' in mode),
975 createmode=createmode)
977 createmode=createmode)
976 self._fp = posixfile(self.temp, mode)
978 self._fp = posixfile(self.temp, mode)
977
979
978 def __getattr__(self, name):
980 def __getattr__(self, name):
979 return getattr(self._fp, name)
981 return getattr(self._fp, name)
980
982
981 def rename(self):
983 def rename(self):
982 if not self.closed:
984 if not self.closed:
983 self._fp.close()
985 self._fp.close()
984 rename(self.temp, localpath(self.__name))
986 rename(self.temp, localpath(self.__name))
985
987
986 def __del__(self):
988 def __del__(self):
987 if not self.closed:
989 if not self.closed:
988 try:
990 try:
989 os.unlink(self.temp)
991 os.unlink(self.temp)
990 except: pass
992 except: pass
991 self._fp.close()
993 self._fp.close()
992
994
993 def makedirs(name, mode=None):
995 def makedirs(name, mode=None):
994 """recursive directory creation with parent mode inheritance"""
996 """recursive directory creation with parent mode inheritance"""
995 try:
997 try:
996 os.mkdir(name)
998 os.mkdir(name)
997 if mode is not None:
999 if mode is not None:
998 os.chmod(name, mode)
1000 os.chmod(name, mode)
999 return
1001 return
1000 except OSError, err:
1002 except OSError, err:
1001 if err.errno == errno.EEXIST:
1003 if err.errno == errno.EEXIST:
1002 return
1004 return
1003 if err.errno != errno.ENOENT:
1005 if err.errno != errno.ENOENT:
1004 raise
1006 raise
1005 parent = os.path.abspath(os.path.dirname(name))
1007 parent = os.path.abspath(os.path.dirname(name))
1006 makedirs(parent, mode)
1008 makedirs(parent, mode)
1007 makedirs(name, mode)
1009 makedirs(name, mode)
1008
1010
1009 class opener(object):
1011 class opener(object):
1010 """Open files relative to a base directory
1012 """Open files relative to a base directory
1011
1013
1012 This class is used to hide the details of COW semantics and
1014 This class is used to hide the details of COW semantics and
1013 remote file access from higher level code.
1015 remote file access from higher level code.
1014 """
1016 """
1015 def __init__(self, base, audit=True):
1017 def __init__(self, base, audit=True):
1016 self.base = base
1018 self.base = base
1017 if audit:
1019 if audit:
1018 self.audit_path = path_auditor(base)
1020 self.audit_path = path_auditor(base)
1019 else:
1021 else:
1020 self.audit_path = always
1022 self.audit_path = always
1021 self.createmode = None
1023 self.createmode = None
1022
1024
1023 def __getattr__(self, name):
1025 def __getattr__(self, name):
1024 if name == '_can_symlink':
1026 if name == '_can_symlink':
1025 self._can_symlink = checklink(self.base)
1027 self._can_symlink = checklink(self.base)
1026 return self._can_symlink
1028 return self._can_symlink
1027 raise AttributeError(name)
1029 raise AttributeError(name)
1028
1030
1029 def _fixfilemode(self, name):
1031 def _fixfilemode(self, name):
1030 if self.createmode is None:
1032 if self.createmode is None:
1031 return
1033 return
1032 os.chmod(name, self.createmode & 0666)
1034 os.chmod(name, self.createmode & 0666)
1033
1035
1034 def __call__(self, path, mode="r", text=False, atomictemp=False):
1036 def __call__(self, path, mode="r", text=False, atomictemp=False):
1035 self.audit_path(path)
1037 self.audit_path(path)
1036 f = os.path.join(self.base, path)
1038 f = os.path.join(self.base, path)
1037
1039
1038 if not text and "b" not in mode:
1040 if not text and "b" not in mode:
1039 mode += "b" # for that other OS
1041 mode += "b" # for that other OS
1040
1042
1041 nlink = -1
1043 nlink = -1
1042 if mode not in ("r", "rb"):
1044 if mode not in ("r", "rb"):
1043 try:
1045 try:
1044 nlink = nlinks(f)
1046 nlink = nlinks(f)
1045 except OSError:
1047 except OSError:
1046 nlink = 0
1048 nlink = 0
1047 d = os.path.dirname(f)
1049 d = os.path.dirname(f)
1048 if not os.path.isdir(d):
1050 if not os.path.isdir(d):
1049 makedirs(d, self.createmode)
1051 makedirs(d, self.createmode)
1050 if atomictemp:
1052 if atomictemp:
1051 return atomictempfile(f, mode, self.createmode)
1053 return atomictempfile(f, mode, self.createmode)
1052 if nlink > 1:
1054 if nlink > 1:
1053 rename(mktempcopy(f), f)
1055 rename(mktempcopy(f), f)
1054 fp = posixfile(f, mode)
1056 fp = posixfile(f, mode)
1055 if nlink == 0:
1057 if nlink == 0:
1056 self._fixfilemode(f)
1058 self._fixfilemode(f)
1057 return fp
1059 return fp
1058
1060
1059 def symlink(self, src, dst):
1061 def symlink(self, src, dst):
1060 self.audit_path(dst)
1062 self.audit_path(dst)
1061 linkname = os.path.join(self.base, dst)
1063 linkname = os.path.join(self.base, dst)
1062 try:
1064 try:
1063 os.unlink(linkname)
1065 os.unlink(linkname)
1064 except OSError:
1066 except OSError:
1065 pass
1067 pass
1066
1068
1067 dirname = os.path.dirname(linkname)
1069 dirname = os.path.dirname(linkname)
1068 if not os.path.exists(dirname):
1070 if not os.path.exists(dirname):
1069 makedirs(dirname, self.createmode)
1071 makedirs(dirname, self.createmode)
1070
1072
1071 if self._can_symlink:
1073 if self._can_symlink:
1072 try:
1074 try:
1073 os.symlink(src, linkname)
1075 os.symlink(src, linkname)
1074 except OSError, err:
1076 except OSError, err:
1075 raise OSError(err.errno, _('could not symlink to %r: %s') %
1077 raise OSError(err.errno, _('could not symlink to %r: %s') %
1076 (src, err.strerror), linkname)
1078 (src, err.strerror), linkname)
1077 else:
1079 else:
1078 f = self(dst, "w")
1080 f = self(dst, "w")
1079 f.write(src)
1081 f.write(src)
1080 f.close()
1082 f.close()
1081 self._fixfilemode(dst)
1083 self._fixfilemode(dst)
1082
1084
1083 class chunkbuffer(object):
1085 class chunkbuffer(object):
1084 """Allow arbitrary sized chunks of data to be efficiently read from an
1086 """Allow arbitrary sized chunks of data to be efficiently read from an
1085 iterator over chunks of arbitrary size."""
1087 iterator over chunks of arbitrary size."""
1086
1088
1087 def __init__(self, in_iter):
1089 def __init__(self, in_iter):
1088 """in_iter is the iterator that's iterating over the input chunks.
1090 """in_iter is the iterator that's iterating over the input chunks.
1089 targetsize is how big a buffer to try to maintain."""
1091 targetsize is how big a buffer to try to maintain."""
1090 self.iter = iter(in_iter)
1092 self.iter = iter(in_iter)
1091 self.buf = ''
1093 self.buf = ''
1092 self.targetsize = 2**16
1094 self.targetsize = 2**16
1093
1095
1094 def read(self, l):
1096 def read(self, l):
1095 """Read L bytes of data from the iterator of chunks of data.
1097 """Read L bytes of data from the iterator of chunks of data.
1096 Returns less than L bytes if the iterator runs dry."""
1098 Returns less than L bytes if the iterator runs dry."""
1097 if l > len(self.buf) and self.iter:
1099 if l > len(self.buf) and self.iter:
1098 # Clamp to a multiple of self.targetsize
1100 # Clamp to a multiple of self.targetsize
1099 targetsize = max(l, self.targetsize)
1101 targetsize = max(l, self.targetsize)
1100 collector = cStringIO.StringIO()
1102 collector = cStringIO.StringIO()
1101 collector.write(self.buf)
1103 collector.write(self.buf)
1102 collected = len(self.buf)
1104 collected = len(self.buf)
1103 for chunk in self.iter:
1105 for chunk in self.iter:
1104 collector.write(chunk)
1106 collector.write(chunk)
1105 collected += len(chunk)
1107 collected += len(chunk)
1106 if collected >= targetsize:
1108 if collected >= targetsize:
1107 break
1109 break
1108 if collected < targetsize:
1110 if collected < targetsize:
1109 self.iter = False
1111 self.iter = False
1110 self.buf = collector.getvalue()
1112 self.buf = collector.getvalue()
1111 if len(self.buf) == l:
1113 if len(self.buf) == l:
1112 s, self.buf = str(self.buf), ''
1114 s, self.buf = str(self.buf), ''
1113 else:
1115 else:
1114 s, self.buf = self.buf[:l], buffer(self.buf, l)
1116 s, self.buf = self.buf[:l], buffer(self.buf, l)
1115 return s
1117 return s
1116
1118
1117 def filechunkiter(f, size=65536, limit=None):
1119 def filechunkiter(f, size=65536, limit=None):
1118 """Create a generator that produces the data in the file size
1120 """Create a generator that produces the data in the file size
1119 (default 65536) bytes at a time, up to optional limit (default is
1121 (default 65536) bytes at a time, up to optional limit (default is
1120 to read all data). Chunks may be less than size bytes if the
1122 to read all data). Chunks may be less than size bytes if the
1121 chunk is the last chunk in the file, or the file is a socket or
1123 chunk is the last chunk in the file, or the file is a socket or
1122 some other type of file that sometimes reads less data than is
1124 some other type of file that sometimes reads less data than is
1123 requested."""
1125 requested."""
1124 assert size >= 0
1126 assert size >= 0
1125 assert limit is None or limit >= 0
1127 assert limit is None or limit >= 0
1126 while True:
1128 while True:
1127 if limit is None: nbytes = size
1129 if limit is None: nbytes = size
1128 else: nbytes = min(limit, size)
1130 else: nbytes = min(limit, size)
1129 s = nbytes and f.read(nbytes)
1131 s = nbytes and f.read(nbytes)
1130 if not s: break
1132 if not s: break
1131 if limit: limit -= len(s)
1133 if limit: limit -= len(s)
1132 yield s
1134 yield s
1133
1135
1134 def makedate():
1136 def makedate():
1135 lt = time.localtime()
1137 lt = time.localtime()
1136 if lt[8] == 1 and time.daylight:
1138 if lt[8] == 1 and time.daylight:
1137 tz = time.altzone
1139 tz = time.altzone
1138 else:
1140 else:
1139 tz = time.timezone
1141 tz = time.timezone
1140 return time.mktime(lt), tz
1142 return time.mktime(lt), tz
1141
1143
1142 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1144 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1143 """represent a (unixtime, offset) tuple as a localized time.
1145 """represent a (unixtime, offset) tuple as a localized time.
1144 unixtime is seconds since the epoch, and offset is the time zone's
1146 unixtime is seconds since the epoch, and offset is the time zone's
1145 number of seconds away from UTC. if timezone is false, do not
1147 number of seconds away from UTC. if timezone is false, do not
1146 append time zone to string."""
1148 append time zone to string."""
1147 t, tz = date or makedate()
1149 t, tz = date or makedate()
1148 if "%1" in format or "%2" in format:
1150 if "%1" in format or "%2" in format:
1149 sign = (tz > 0) and "-" or "+"
1151 sign = (tz > 0) and "-" or "+"
1150 minutes = abs(tz) / 60
1152 minutes = abs(tz) / 60
1151 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1153 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1152 format = format.replace("%2", "%02d" % (minutes % 60))
1154 format = format.replace("%2", "%02d" % (minutes % 60))
1153 s = time.strftime(format, time.gmtime(float(t) - tz))
1155 s = time.strftime(format, time.gmtime(float(t) - tz))
1154 return s
1156 return s
1155
1157
1156 def shortdate(date=None):
1158 def shortdate(date=None):
1157 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1159 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1158 return datestr(date, format='%Y-%m-%d')
1160 return datestr(date, format='%Y-%m-%d')
1159
1161
1160 def strdate(string, format, defaults=[]):
1162 def strdate(string, format, defaults=[]):
1161 """parse a localized time string and return a (unixtime, offset) tuple.
1163 """parse a localized time string and return a (unixtime, offset) tuple.
1162 if the string cannot be parsed, ValueError is raised."""
1164 if the string cannot be parsed, ValueError is raised."""
1163 def timezone(string):
1165 def timezone(string):
1164 tz = string.split()[-1]
1166 tz = string.split()[-1]
1165 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1167 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1166 sign = (tz[0] == "+") and 1 or -1
1168 sign = (tz[0] == "+") and 1 or -1
1167 hours = int(tz[1:3])
1169 hours = int(tz[1:3])
1168 minutes = int(tz[3:5])
1170 minutes = int(tz[3:5])
1169 return -sign * (hours * 60 + minutes) * 60
1171 return -sign * (hours * 60 + minutes) * 60
1170 if tz == "GMT" or tz == "UTC":
1172 if tz == "GMT" or tz == "UTC":
1171 return 0
1173 return 0
1172 return None
1174 return None
1173
1175
1174 # NOTE: unixtime = localunixtime + offset
1176 # NOTE: unixtime = localunixtime + offset
1175 offset, date = timezone(string), string
1177 offset, date = timezone(string), string
1176 if offset != None:
1178 if offset != None:
1177 date = " ".join(string.split()[:-1])
1179 date = " ".join(string.split()[:-1])
1178
1180
1179 # add missing elements from defaults
1181 # add missing elements from defaults
1180 for part in defaults:
1182 for part in defaults:
1181 found = [True for p in part if ("%"+p) in format]
1183 found = [True for p in part if ("%"+p) in format]
1182 if not found:
1184 if not found:
1183 date += "@" + defaults[part]
1185 date += "@" + defaults[part]
1184 format += "@%" + part[0]
1186 format += "@%" + part[0]
1185
1187
1186 timetuple = time.strptime(date, format)
1188 timetuple = time.strptime(date, format)
1187 localunixtime = int(calendar.timegm(timetuple))
1189 localunixtime = int(calendar.timegm(timetuple))
1188 if offset is None:
1190 if offset is None:
1189 # local timezone
1191 # local timezone
1190 unixtime = int(time.mktime(timetuple))
1192 unixtime = int(time.mktime(timetuple))
1191 offset = unixtime - localunixtime
1193 offset = unixtime - localunixtime
1192 else:
1194 else:
1193 unixtime = localunixtime + offset
1195 unixtime = localunixtime + offset
1194 return unixtime, offset
1196 return unixtime, offset
1195
1197
1196 def parsedate(date, formats=None, defaults=None):
1198 def parsedate(date, formats=None, defaults=None):
1197 """parse a localized date/time string and return a (unixtime, offset) tuple.
1199 """parse a localized date/time string and return a (unixtime, offset) tuple.
1198
1200
1199 The date may be a "unixtime offset" string or in one of the specified
1201 The date may be a "unixtime offset" string or in one of the specified
1200 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1202 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1201 """
1203 """
1202 if not date:
1204 if not date:
1203 return 0, 0
1205 return 0, 0
1204 if isinstance(date, tuple) and len(date) == 2:
1206 if isinstance(date, tuple) and len(date) == 2:
1205 return date
1207 return date
1206 if not formats:
1208 if not formats:
1207 formats = defaultdateformats
1209 formats = defaultdateformats
1208 date = date.strip()
1210 date = date.strip()
1209 try:
1211 try:
1210 when, offset = map(int, date.split(' '))
1212 when, offset = map(int, date.split(' '))
1211 except ValueError:
1213 except ValueError:
1212 # fill out defaults
1214 # fill out defaults
1213 if not defaults:
1215 if not defaults:
1214 defaults = {}
1216 defaults = {}
1215 now = makedate()
1217 now = makedate()
1216 for part in "d mb yY HI M S".split():
1218 for part in "d mb yY HI M S".split():
1217 if part not in defaults:
1219 if part not in defaults:
1218 if part[0] in "HMS":
1220 if part[0] in "HMS":
1219 defaults[part] = "00"
1221 defaults[part] = "00"
1220 else:
1222 else:
1221 defaults[part] = datestr(now, "%" + part[0])
1223 defaults[part] = datestr(now, "%" + part[0])
1222
1224
1223 for format in formats:
1225 for format in formats:
1224 try:
1226 try:
1225 when, offset = strdate(date, format, defaults)
1227 when, offset = strdate(date, format, defaults)
1226 except (ValueError, OverflowError):
1228 except (ValueError, OverflowError):
1227 pass
1229 pass
1228 else:
1230 else:
1229 break
1231 break
1230 else:
1232 else:
1231 raise Abort(_('invalid date: %r ') % date)
1233 raise Abort(_('invalid date: %r ') % date)
1232 # validate explicit (probably user-specified) date and
1234 # validate explicit (probably user-specified) date and
1233 # time zone offset. values must fit in signed 32 bits for
1235 # time zone offset. values must fit in signed 32 bits for
1234 # current 32-bit linux runtimes. timezones go from UTC-12
1236 # current 32-bit linux runtimes. timezones go from UTC-12
1235 # to UTC+14
1237 # to UTC+14
1236 if abs(when) > 0x7fffffff:
1238 if abs(when) > 0x7fffffff:
1237 raise Abort(_('date exceeds 32 bits: %d') % when)
1239 raise Abort(_('date exceeds 32 bits: %d') % when)
1238 if offset < -50400 or offset > 43200:
1240 if offset < -50400 or offset > 43200:
1239 raise Abort(_('impossible time zone offset: %d') % offset)
1241 raise Abort(_('impossible time zone offset: %d') % offset)
1240 return when, offset
1242 return when, offset
1241
1243
1242 def matchdate(date):
1244 def matchdate(date):
1243 """Return a function that matches a given date match specifier
1245 """Return a function that matches a given date match specifier
1244
1246
1245 Formats include:
1247 Formats include:
1246
1248
1247 '{date}' match a given date to the accuracy provided
1249 '{date}' match a given date to the accuracy provided
1248
1250
1249 '<{date}' on or before a given date
1251 '<{date}' on or before a given date
1250
1252
1251 '>{date}' on or after a given date
1253 '>{date}' on or after a given date
1252
1254
1253 """
1255 """
1254
1256
1255 def lower(date):
1257 def lower(date):
1256 d = dict(mb="1", d="1")
1258 d = dict(mb="1", d="1")
1257 return parsedate(date, extendeddateformats, d)[0]
1259 return parsedate(date, extendeddateformats, d)[0]
1258
1260
1259 def upper(date):
1261 def upper(date):
1260 d = dict(mb="12", HI="23", M="59", S="59")
1262 d = dict(mb="12", HI="23", M="59", S="59")
1261 for days in "31 30 29".split():
1263 for days in "31 30 29".split():
1262 try:
1264 try:
1263 d["d"] = days
1265 d["d"] = days
1264 return parsedate(date, extendeddateformats, d)[0]
1266 return parsedate(date, extendeddateformats, d)[0]
1265 except:
1267 except:
1266 pass
1268 pass
1267 d["d"] = "28"
1269 d["d"] = "28"
1268 return parsedate(date, extendeddateformats, d)[0]
1270 return parsedate(date, extendeddateformats, d)[0]
1269
1271
1270 date = date.strip()
1272 date = date.strip()
1271 if date[0] == "<":
1273 if date[0] == "<":
1272 when = upper(date[1:])
1274 when = upper(date[1:])
1273 return lambda x: x <= when
1275 return lambda x: x <= when
1274 elif date[0] == ">":
1276 elif date[0] == ">":
1275 when = lower(date[1:])
1277 when = lower(date[1:])
1276 return lambda x: x >= when
1278 return lambda x: x >= when
1277 elif date[0] == "-":
1279 elif date[0] == "-":
1278 try:
1280 try:
1279 days = int(date[1:])
1281 days = int(date[1:])
1280 except ValueError:
1282 except ValueError:
1281 raise Abort(_("invalid day spec: %s") % date[1:])
1283 raise Abort(_("invalid day spec: %s") % date[1:])
1282 when = makedate()[0] - days * 3600 * 24
1284 when = makedate()[0] - days * 3600 * 24
1283 return lambda x: x >= when
1285 return lambda x: x >= when
1284 elif " to " in date:
1286 elif " to " in date:
1285 a, b = date.split(" to ")
1287 a, b = date.split(" to ")
1286 start, stop = lower(a), upper(b)
1288 start, stop = lower(a), upper(b)
1287 return lambda x: x >= start and x <= stop
1289 return lambda x: x >= start and x <= stop
1288 else:
1290 else:
1289 start, stop = lower(date), upper(date)
1291 start, stop = lower(date), upper(date)
1290 return lambda x: x >= start and x <= stop
1292 return lambda x: x >= start and x <= stop
1291
1293
1292 def shortuser(user):
1294 def shortuser(user):
1293 """Return a short representation of a user name or email address."""
1295 """Return a short representation of a user name or email address."""
1294 f = user.find('@')
1296 f = user.find('@')
1295 if f >= 0:
1297 if f >= 0:
1296 user = user[:f]
1298 user = user[:f]
1297 f = user.find('<')
1299 f = user.find('<')
1298 if f >= 0:
1300 if f >= 0:
1299 user = user[f+1:]
1301 user = user[f+1:]
1300 f = user.find(' ')
1302 f = user.find(' ')
1301 if f >= 0:
1303 if f >= 0:
1302 user = user[:f]
1304 user = user[:f]
1303 f = user.find('.')
1305 f = user.find('.')
1304 if f >= 0:
1306 if f >= 0:
1305 user = user[:f]
1307 user = user[:f]
1306 return user
1308 return user
1307
1309
1308 def email(author):
1310 def email(author):
1309 '''get email of author.'''
1311 '''get email of author.'''
1310 r = author.find('>')
1312 r = author.find('>')
1311 if r == -1: r = None
1313 if r == -1: r = None
1312 return author[author.find('<')+1:r]
1314 return author[author.find('<')+1:r]
1313
1315
1314 def ellipsis(text, maxlength=400):
1316 def ellipsis(text, maxlength=400):
1315 """Trim string to at most maxlength (default: 400) characters."""
1317 """Trim string to at most maxlength (default: 400) characters."""
1316 if len(text) <= maxlength:
1318 if len(text) <= maxlength:
1317 return text
1319 return text
1318 else:
1320 else:
1319 return "%s..." % (text[:maxlength-3])
1321 return "%s..." % (text[:maxlength-3])
1320
1322
1321 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1323 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1322 '''yield every hg repository under path, recursively.'''
1324 '''yield every hg repository under path, recursively.'''
1323 def errhandler(err):
1325 def errhandler(err):
1324 if err.filename == path:
1326 if err.filename == path:
1325 raise err
1327 raise err
1326 if followsym and hasattr(os.path, 'samestat'):
1328 if followsym and hasattr(os.path, 'samestat'):
1327 def _add_dir_if_not_there(dirlst, dirname):
1329 def _add_dir_if_not_there(dirlst, dirname):
1328 match = False
1330 match = False
1329 samestat = os.path.samestat
1331 samestat = os.path.samestat
1330 dirstat = os.stat(dirname)
1332 dirstat = os.stat(dirname)
1331 for lstdirstat in dirlst:
1333 for lstdirstat in dirlst:
1332 if samestat(dirstat, lstdirstat):
1334 if samestat(dirstat, lstdirstat):
1333 match = True
1335 match = True
1334 break
1336 break
1335 if not match:
1337 if not match:
1336 dirlst.append(dirstat)
1338 dirlst.append(dirstat)
1337 return not match
1339 return not match
1338 else:
1340 else:
1339 followsym = False
1341 followsym = False
1340
1342
1341 if (seen_dirs is None) and followsym:
1343 if (seen_dirs is None) and followsym:
1342 seen_dirs = []
1344 seen_dirs = []
1343 _add_dir_if_not_there(seen_dirs, path)
1345 _add_dir_if_not_there(seen_dirs, path)
1344 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1346 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1345 if '.hg' in dirs:
1347 if '.hg' in dirs:
1346 yield root # found a repository
1348 yield root # found a repository
1347 qroot = os.path.join(root, '.hg', 'patches')
1349 qroot = os.path.join(root, '.hg', 'patches')
1348 if os.path.isdir(os.path.join(qroot, '.hg')):
1350 if os.path.isdir(os.path.join(qroot, '.hg')):
1349 yield qroot # we have a patch queue repo here
1351 yield qroot # we have a patch queue repo here
1350 if recurse:
1352 if recurse:
1351 # avoid recursing inside the .hg directory
1353 # avoid recursing inside the .hg directory
1352 dirs.remove('.hg')
1354 dirs.remove('.hg')
1353 else:
1355 else:
1354 dirs[:] = [] # don't descend further
1356 dirs[:] = [] # don't descend further
1355 elif followsym:
1357 elif followsym:
1356 newdirs = []
1358 newdirs = []
1357 for d in dirs:
1359 for d in dirs:
1358 fname = os.path.join(root, d)
1360 fname = os.path.join(root, d)
1359 if _add_dir_if_not_there(seen_dirs, fname):
1361 if _add_dir_if_not_there(seen_dirs, fname):
1360 if os.path.islink(fname):
1362 if os.path.islink(fname):
1361 for hgname in walkrepos(fname, True, seen_dirs):
1363 for hgname in walkrepos(fname, True, seen_dirs):
1362 yield hgname
1364 yield hgname
1363 else:
1365 else:
1364 newdirs.append(d)
1366 newdirs.append(d)
1365 dirs[:] = newdirs
1367 dirs[:] = newdirs
1366
1368
1367 _rcpath = None
1369 _rcpath = None
1368
1370
1369 def os_rcpath():
1371 def os_rcpath():
1370 '''return default os-specific hgrc search path'''
1372 '''return default os-specific hgrc search path'''
1371 path = system_rcpath()
1373 path = system_rcpath()
1372 path.extend(user_rcpath())
1374 path.extend(user_rcpath())
1373 path = [os.path.normpath(f) for f in path]
1375 path = [os.path.normpath(f) for f in path]
1374 return path
1376 return path
1375
1377
1376 def rcpath():
1378 def rcpath():
1377 '''return hgrc search path. if env var HGRCPATH is set, use it.
1379 '''return hgrc search path. if env var HGRCPATH is set, use it.
1378 for each item in path, if directory, use files ending in .rc,
1380 for each item in path, if directory, use files ending in .rc,
1379 else use item.
1381 else use item.
1380 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1382 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1381 if no HGRCPATH, use default os-specific path.'''
1383 if no HGRCPATH, use default os-specific path.'''
1382 global _rcpath
1384 global _rcpath
1383 if _rcpath is None:
1385 if _rcpath is None:
1384 if 'HGRCPATH' in os.environ:
1386 if 'HGRCPATH' in os.environ:
1385 _rcpath = []
1387 _rcpath = []
1386 for p in os.environ['HGRCPATH'].split(os.pathsep):
1388 for p in os.environ['HGRCPATH'].split(os.pathsep):
1387 if not p: continue
1389 if not p: continue
1388 if os.path.isdir(p):
1390 if os.path.isdir(p):
1389 for f, kind in osutil.listdir(p):
1391 for f, kind in osutil.listdir(p):
1390 if f.endswith('.rc'):
1392 if f.endswith('.rc'):
1391 _rcpath.append(os.path.join(p, f))
1393 _rcpath.append(os.path.join(p, f))
1392 else:
1394 else:
1393 _rcpath.append(p)
1395 _rcpath.append(p)
1394 else:
1396 else:
1395 _rcpath = os_rcpath()
1397 _rcpath = os_rcpath()
1396 return _rcpath
1398 return _rcpath
1397
1399
1398 def bytecount(nbytes):
1400 def bytecount(nbytes):
1399 '''return byte count formatted as readable string, with units'''
1401 '''return byte count formatted as readable string, with units'''
1400
1402
1401 units = (
1403 units = (
1402 (100, 1<<30, _('%.0f GB')),
1404 (100, 1<<30, _('%.0f GB')),
1403 (10, 1<<30, _('%.1f GB')),
1405 (10, 1<<30, _('%.1f GB')),
1404 (1, 1<<30, _('%.2f GB')),
1406 (1, 1<<30, _('%.2f GB')),
1405 (100, 1<<20, _('%.0f MB')),
1407 (100, 1<<20, _('%.0f MB')),
1406 (10, 1<<20, _('%.1f MB')),
1408 (10, 1<<20, _('%.1f MB')),
1407 (1, 1<<20, _('%.2f MB')),
1409 (1, 1<<20, _('%.2f MB')),
1408 (100, 1<<10, _('%.0f KB')),
1410 (100, 1<<10, _('%.0f KB')),
1409 (10, 1<<10, _('%.1f KB')),
1411 (10, 1<<10, _('%.1f KB')),
1410 (1, 1<<10, _('%.2f KB')),
1412 (1, 1<<10, _('%.2f KB')),
1411 (1, 1, _('%.0f bytes')),
1413 (1, 1, _('%.0f bytes')),
1412 )
1414 )
1413
1415
1414 for multiplier, divisor, format in units:
1416 for multiplier, divisor, format in units:
1415 if nbytes >= divisor * multiplier:
1417 if nbytes >= divisor * multiplier:
1416 return format % (nbytes / float(divisor))
1418 return format % (nbytes / float(divisor))
1417 return units[-1][2] % nbytes
1419 return units[-1][2] % nbytes
1418
1420
1419 def drop_scheme(scheme, path):
1421 def drop_scheme(scheme, path):
1420 sc = scheme + ':'
1422 sc = scheme + ':'
1421 if path.startswith(sc):
1423 if path.startswith(sc):
1422 path = path[len(sc):]
1424 path = path[len(sc):]
1423 if path.startswith('//'):
1425 if path.startswith('//'):
1424 path = path[2:]
1426 path = path[2:]
1425 return path
1427 return path
1426
1428
1427 def uirepr(s):
1429 def uirepr(s):
1428 # Avoid double backslash in Windows path repr()
1430 # Avoid double backslash in Windows path repr()
1429 return repr(s).replace('\\\\', '\\')
1431 return repr(s).replace('\\\\', '\\')
1430
1432
1431 def termwidth():
1433 def termwidth():
1432 if 'COLUMNS' in os.environ:
1434 if 'COLUMNS' in os.environ:
1433 try:
1435 try:
1434 return int(os.environ['COLUMNS'])
1436 return int(os.environ['COLUMNS'])
1435 except ValueError:
1437 except ValueError:
1436 pass
1438 pass
1437 try:
1439 try:
1438 import termios, array, fcntl
1440 import termios, array, fcntl
1439 for dev in (sys.stdout, sys.stdin):
1441 for dev in (sys.stdout, sys.stdin):
1440 try:
1442 try:
1441 fd = dev.fileno()
1443 fd = dev.fileno()
1442 if not os.isatty(fd):
1444 if not os.isatty(fd):
1443 continue
1445 continue
1444 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1446 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1445 return array.array('h', arri)[1]
1447 return array.array('h', arri)[1]
1446 except ValueError:
1448 except ValueError:
1447 pass
1449 pass
1448 except ImportError:
1450 except ImportError:
1449 pass
1451 pass
1450 return 80
1452 return 80
1451
1453
1452 def iterlines(iterator):
1454 def iterlines(iterator):
1453 for chunk in iterator:
1455 for chunk in iterator:
1454 for line in chunk.splitlines():
1456 for line in chunk.splitlines():
1455 yield line
1457 yield line
General Comments 0
You need to be logged in to leave comments. Login now