##// END OF EJS Templates
Correct a bug on date formats with '>' or '<' accompanied by space characters.
Justin Peng -
r7953:8c6f823e default
parent child Browse files
Show More
@@ -1,1502 +1,1503
1 """
1 """
2 util.py - Mercurial utility functions and platform specfic implementations
2 util.py - Mercurial utility functions and platform specfic implementations
3
3
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 Copyright 2005 K. Thananchayan <thananck@yahoo.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
7
7
8 This software may be used and distributed according to the terms
8 This software may be used and distributed according to the terms
9 of the GNU General Public License, incorporated herein by reference.
9 of the GNU General Public License, incorporated herein by reference.
10
10
11 This contains helper routines that are independent of the SCM core and hide
11 This contains helper routines that are independent of the SCM core and hide
12 platform-specific details from the core.
12 platform-specific details from the core.
13 """
13 """
14
14
15 from i18n import _
15 from i18n import _
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
16 import cStringIO, errno, re, shutil, sys, tempfile, traceback, error
17 import os, stat, threading, time, calendar, ConfigParser, glob, osutil
17 import os, stat, threading, time, calendar, ConfigParser, glob, osutil
18 import imp
18 import imp
19
19
20 # Python compatibility
20 # Python compatibility
21
21
22 try:
22 try:
23 set = set
23 set = set
24 frozenset = frozenset
24 frozenset = frozenset
25 except NameError:
25 except NameError:
26 from sets import Set as set, ImmutableSet as frozenset
26 from sets import Set as set, ImmutableSet as frozenset
27
27
28 _md5 = None
28 _md5 = None
29 def md5(s):
29 def md5(s):
30 global _md5
30 global _md5
31 if _md5 is None:
31 if _md5 is None:
32 try:
32 try:
33 import hashlib
33 import hashlib
34 _md5 = hashlib.md5
34 _md5 = hashlib.md5
35 except ImportError:
35 except ImportError:
36 import md5
36 import md5
37 _md5 = md5.md5
37 _md5 = md5.md5
38 return _md5(s)
38 return _md5(s)
39
39
40 _sha1 = None
40 _sha1 = None
41 def sha1(s):
41 def sha1(s):
42 global _sha1
42 global _sha1
43 if _sha1 is None:
43 if _sha1 is None:
44 try:
44 try:
45 import hashlib
45 import hashlib
46 _sha1 = hashlib.sha1
46 _sha1 = hashlib.sha1
47 except ImportError:
47 except ImportError:
48 import sha
48 import sha
49 _sha1 = sha.sha
49 _sha1 = sha.sha
50 return _sha1(s)
50 return _sha1(s)
51
51
52 try:
52 try:
53 import subprocess
53 import subprocess
54 subprocess.Popen # trigger ImportError early
54 subprocess.Popen # trigger ImportError early
55 closefds = os.name == 'posix'
55 closefds = os.name == 'posix'
56 def popen2(cmd, mode='t', bufsize=-1):
56 def popen2(cmd, mode='t', bufsize=-1):
57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
57 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
58 close_fds=closefds,
58 close_fds=closefds,
59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
59 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
60 return p.stdin, p.stdout
60 return p.stdin, p.stdout
61 def popen3(cmd, mode='t', bufsize=-1):
61 def popen3(cmd, mode='t', bufsize=-1):
62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
62 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
63 close_fds=closefds,
63 close_fds=closefds,
64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
64 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
65 stderr=subprocess.PIPE)
65 stderr=subprocess.PIPE)
66 return p.stdin, p.stdout, p.stderr
66 return p.stdin, p.stdout, p.stderr
67 def Popen3(cmd, capturestderr=False, bufsize=-1):
67 def Popen3(cmd, capturestderr=False, bufsize=-1):
68 stderr = capturestderr and subprocess.PIPE or None
68 stderr = capturestderr and subprocess.PIPE or None
69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
69 p = subprocess.Popen(cmd, shell=True, bufsize=bufsize,
70 close_fds=closefds,
70 close_fds=closefds,
71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
71 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
72 stderr=stderr)
72 stderr=stderr)
73 p.fromchild = p.stdout
73 p.fromchild = p.stdout
74 p.tochild = p.stdin
74 p.tochild = p.stdin
75 p.childerr = p.stderr
75 p.childerr = p.stderr
76 return p
76 return p
77 except ImportError:
77 except ImportError:
78 subprocess = None
78 subprocess = None
79 from popen2 import Popen3
79 from popen2 import Popen3
80 popen2 = os.popen2
80 popen2 = os.popen2
81 popen3 = os.popen3
81 popen3 = os.popen3
82
82
83
83
84 def version():
84 def version():
85 """Return version information if available."""
85 """Return version information if available."""
86 try:
86 try:
87 import __version__
87 import __version__
88 return __version__.version
88 return __version__.version
89 except ImportError:
89 except ImportError:
90 return 'unknown'
90 return 'unknown'
91
91
92 # used by parsedate
92 # used by parsedate
93 defaultdateformats = (
93 defaultdateformats = (
94 '%Y-%m-%d %H:%M:%S',
94 '%Y-%m-%d %H:%M:%S',
95 '%Y-%m-%d %I:%M:%S%p',
95 '%Y-%m-%d %I:%M:%S%p',
96 '%Y-%m-%d %H:%M',
96 '%Y-%m-%d %H:%M',
97 '%Y-%m-%d %I:%M%p',
97 '%Y-%m-%d %I:%M%p',
98 '%Y-%m-%d',
98 '%Y-%m-%d',
99 '%m-%d',
99 '%m-%d',
100 '%m/%d',
100 '%m/%d',
101 '%m/%d/%y',
101 '%m/%d/%y',
102 '%m/%d/%Y',
102 '%m/%d/%Y',
103 '%a %b %d %H:%M:%S %Y',
103 '%a %b %d %H:%M:%S %Y',
104 '%a %b %d %I:%M:%S%p %Y',
104 '%a %b %d %I:%M:%S%p %Y',
105 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
105 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
106 '%b %d %H:%M:%S %Y',
106 '%b %d %H:%M:%S %Y',
107 '%b %d %I:%M:%S%p %Y',
107 '%b %d %I:%M:%S%p %Y',
108 '%b %d %H:%M:%S',
108 '%b %d %H:%M:%S',
109 '%b %d %I:%M:%S%p',
109 '%b %d %I:%M:%S%p',
110 '%b %d %H:%M',
110 '%b %d %H:%M',
111 '%b %d %I:%M%p',
111 '%b %d %I:%M%p',
112 '%b %d %Y',
112 '%b %d %Y',
113 '%b %d',
113 '%b %d',
114 '%H:%M:%S',
114 '%H:%M:%S',
115 '%I:%M:%SP',
115 '%I:%M:%SP',
116 '%H:%M',
116 '%H:%M',
117 '%I:%M%p',
117 '%I:%M%p',
118 )
118 )
119
119
120 extendeddateformats = defaultdateformats + (
120 extendeddateformats = defaultdateformats + (
121 "%Y",
121 "%Y",
122 "%Y-%m",
122 "%Y-%m",
123 "%b",
123 "%b",
124 "%b %Y",
124 "%b %Y",
125 )
125 )
126
126
127 # differences from SafeConfigParser:
127 # differences from SafeConfigParser:
128 # - case-sensitive keys
128 # - case-sensitive keys
129 # - allows values that are not strings (this means that you may not
129 # - allows values that are not strings (this means that you may not
130 # be able to save the configuration to a file)
130 # be able to save the configuration to a file)
131 class configparser(ConfigParser.SafeConfigParser):
131 class configparser(ConfigParser.SafeConfigParser):
132 def optionxform(self, optionstr):
132 def optionxform(self, optionstr):
133 return optionstr
133 return optionstr
134
134
135 def set(self, section, option, value):
135 def set(self, section, option, value):
136 return ConfigParser.ConfigParser.set(self, section, option, value)
136 return ConfigParser.ConfigParser.set(self, section, option, value)
137
137
138 def _interpolate(self, section, option, rawval, vars):
138 def _interpolate(self, section, option, rawval, vars):
139 if not isinstance(rawval, basestring):
139 if not isinstance(rawval, basestring):
140 return rawval
140 return rawval
141 return ConfigParser.SafeConfigParser._interpolate(self, section,
141 return ConfigParser.SafeConfigParser._interpolate(self, section,
142 option, rawval, vars)
142 option, rawval, vars)
143
143
144 def cachefunc(func):
144 def cachefunc(func):
145 '''cache the result of function calls'''
145 '''cache the result of function calls'''
146 # XXX doesn't handle keywords args
146 # XXX doesn't handle keywords args
147 cache = {}
147 cache = {}
148 if func.func_code.co_argcount == 1:
148 if func.func_code.co_argcount == 1:
149 # we gain a small amount of time because
149 # we gain a small amount of time because
150 # we don't need to pack/unpack the list
150 # we don't need to pack/unpack the list
151 def f(arg):
151 def f(arg):
152 if arg not in cache:
152 if arg not in cache:
153 cache[arg] = func(arg)
153 cache[arg] = func(arg)
154 return cache[arg]
154 return cache[arg]
155 else:
155 else:
156 def f(*args):
156 def f(*args):
157 if args not in cache:
157 if args not in cache:
158 cache[args] = func(*args)
158 cache[args] = func(*args)
159 return cache[args]
159 return cache[args]
160
160
161 return f
161 return f
162
162
163 def pipefilter(s, cmd):
163 def pipefilter(s, cmd):
164 '''filter string S through command CMD, returning its output'''
164 '''filter string S through command CMD, returning its output'''
165 (pin, pout) = popen2(cmd, 'b')
165 (pin, pout) = popen2(cmd, 'b')
166 def writer():
166 def writer():
167 try:
167 try:
168 pin.write(s)
168 pin.write(s)
169 pin.close()
169 pin.close()
170 except IOError, inst:
170 except IOError, inst:
171 if inst.errno != errno.EPIPE:
171 if inst.errno != errno.EPIPE:
172 raise
172 raise
173
173
174 # we should use select instead on UNIX, but this will work on most
174 # we should use select instead on UNIX, but this will work on most
175 # systems, including Windows
175 # systems, including Windows
176 w = threading.Thread(target=writer)
176 w = threading.Thread(target=writer)
177 w.start()
177 w.start()
178 f = pout.read()
178 f = pout.read()
179 pout.close()
179 pout.close()
180 w.join()
180 w.join()
181 return f
181 return f
182
182
183 def tempfilter(s, cmd):
183 def tempfilter(s, cmd):
184 '''filter string S through a pair of temporary files with CMD.
184 '''filter string S through a pair of temporary files with CMD.
185 CMD is used as a template to create the real command to be run,
185 CMD is used as a template to create the real command to be run,
186 with the strings INFILE and OUTFILE replaced by the real names of
186 with the strings INFILE and OUTFILE replaced by the real names of
187 the temporary files generated.'''
187 the temporary files generated.'''
188 inname, outname = None, None
188 inname, outname = None, None
189 try:
189 try:
190 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
190 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
191 fp = os.fdopen(infd, 'wb')
191 fp = os.fdopen(infd, 'wb')
192 fp.write(s)
192 fp.write(s)
193 fp.close()
193 fp.close()
194 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
194 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
195 os.close(outfd)
195 os.close(outfd)
196 cmd = cmd.replace('INFILE', inname)
196 cmd = cmd.replace('INFILE', inname)
197 cmd = cmd.replace('OUTFILE', outname)
197 cmd = cmd.replace('OUTFILE', outname)
198 code = os.system(cmd)
198 code = os.system(cmd)
199 if sys.platform == 'OpenVMS' and code & 1:
199 if sys.platform == 'OpenVMS' and code & 1:
200 code = 0
200 code = 0
201 if code: raise Abort(_("command '%s' failed: %s") %
201 if code: raise Abort(_("command '%s' failed: %s") %
202 (cmd, explain_exit(code)))
202 (cmd, explain_exit(code)))
203 return open(outname, 'rb').read()
203 return open(outname, 'rb').read()
204 finally:
204 finally:
205 try:
205 try:
206 if inname: os.unlink(inname)
206 if inname: os.unlink(inname)
207 except: pass
207 except: pass
208 try:
208 try:
209 if outname: os.unlink(outname)
209 if outname: os.unlink(outname)
210 except: pass
210 except: pass
211
211
212 filtertable = {
212 filtertable = {
213 'tempfile:': tempfilter,
213 'tempfile:': tempfilter,
214 'pipe:': pipefilter,
214 'pipe:': pipefilter,
215 }
215 }
216
216
217 def filter(s, cmd):
217 def filter(s, cmd):
218 "filter a string through a command that transforms its input to its output"
218 "filter a string through a command that transforms its input to its output"
219 for name, fn in filtertable.iteritems():
219 for name, fn in filtertable.iteritems():
220 if cmd.startswith(name):
220 if cmd.startswith(name):
221 return fn(s, cmd[len(name):].lstrip())
221 return fn(s, cmd[len(name):].lstrip())
222 return pipefilter(s, cmd)
222 return pipefilter(s, cmd)
223
223
224 def binary(s):
224 def binary(s):
225 """return true if a string is binary data"""
225 """return true if a string is binary data"""
226 if s and '\0' in s:
226 if s and '\0' in s:
227 return True
227 return True
228 return False
228 return False
229
229
230 def unique(g):
230 def unique(g):
231 """return the uniq elements of iterable g"""
231 """return the uniq elements of iterable g"""
232 return dict.fromkeys(g).keys()
232 return dict.fromkeys(g).keys()
233
233
234 def sort(l):
234 def sort(l):
235 if not isinstance(l, list):
235 if not isinstance(l, list):
236 l = list(l)
236 l = list(l)
237 l.sort()
237 l.sort()
238 return l
238 return l
239
239
240 def increasingchunks(source, min=1024, max=65536):
240 def increasingchunks(source, min=1024, max=65536):
241 '''return no less than min bytes per chunk while data remains,
241 '''return no less than min bytes per chunk while data remains,
242 doubling min after each chunk until it reaches max'''
242 doubling min after each chunk until it reaches max'''
243 def log2(x):
243 def log2(x):
244 if not x:
244 if not x:
245 return 0
245 return 0
246 i = 0
246 i = 0
247 while x:
247 while x:
248 x >>= 1
248 x >>= 1
249 i += 1
249 i += 1
250 return i - 1
250 return i - 1
251
251
252 buf = []
252 buf = []
253 blen = 0
253 blen = 0
254 for chunk in source:
254 for chunk in source:
255 buf.append(chunk)
255 buf.append(chunk)
256 blen += len(chunk)
256 blen += len(chunk)
257 if blen >= min:
257 if blen >= min:
258 if min < max:
258 if min < max:
259 min = min << 1
259 min = min << 1
260 nmin = 1 << log2(blen)
260 nmin = 1 << log2(blen)
261 if nmin > min:
261 if nmin > min:
262 min = nmin
262 min = nmin
263 if min > max:
263 if min > max:
264 min = max
264 min = max
265 yield ''.join(buf)
265 yield ''.join(buf)
266 blen = 0
266 blen = 0
267 buf = []
267 buf = []
268 if buf:
268 if buf:
269 yield ''.join(buf)
269 yield ''.join(buf)
270
270
271 Abort = error.Abort
271 Abort = error.Abort
272
272
273 def always(fn): return True
273 def always(fn): return True
274 def never(fn): return False
274 def never(fn): return False
275
275
276 def patkind(name, default):
276 def patkind(name, default):
277 """Split a string into an optional pattern kind prefix and the
277 """Split a string into an optional pattern kind prefix and the
278 actual pattern."""
278 actual pattern."""
279 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
279 for prefix in 're', 'glob', 'path', 'relglob', 'relpath', 'relre':
280 if name.startswith(prefix + ':'): return name.split(':', 1)
280 if name.startswith(prefix + ':'): return name.split(':', 1)
281 return default, name
281 return default, name
282
282
283 def globre(pat, head='^', tail='$'):
283 def globre(pat, head='^', tail='$'):
284 "convert a glob pattern into a regexp"
284 "convert a glob pattern into a regexp"
285 i, n = 0, len(pat)
285 i, n = 0, len(pat)
286 res = ''
286 res = ''
287 group = 0
287 group = 0
288 def peek(): return i < n and pat[i]
288 def peek(): return i < n and pat[i]
289 while i < n:
289 while i < n:
290 c = pat[i]
290 c = pat[i]
291 i = i+1
291 i = i+1
292 if c == '*':
292 if c == '*':
293 if peek() == '*':
293 if peek() == '*':
294 i += 1
294 i += 1
295 res += '.*'
295 res += '.*'
296 else:
296 else:
297 res += '[^/]*'
297 res += '[^/]*'
298 elif c == '?':
298 elif c == '?':
299 res += '.'
299 res += '.'
300 elif c == '[':
300 elif c == '[':
301 j = i
301 j = i
302 if j < n and pat[j] in '!]':
302 if j < n and pat[j] in '!]':
303 j += 1
303 j += 1
304 while j < n and pat[j] != ']':
304 while j < n and pat[j] != ']':
305 j += 1
305 j += 1
306 if j >= n:
306 if j >= n:
307 res += '\\['
307 res += '\\['
308 else:
308 else:
309 stuff = pat[i:j].replace('\\','\\\\')
309 stuff = pat[i:j].replace('\\','\\\\')
310 i = j + 1
310 i = j + 1
311 if stuff[0] == '!':
311 if stuff[0] == '!':
312 stuff = '^' + stuff[1:]
312 stuff = '^' + stuff[1:]
313 elif stuff[0] == '^':
313 elif stuff[0] == '^':
314 stuff = '\\' + stuff
314 stuff = '\\' + stuff
315 res = '%s[%s]' % (res, stuff)
315 res = '%s[%s]' % (res, stuff)
316 elif c == '{':
316 elif c == '{':
317 group += 1
317 group += 1
318 res += '(?:'
318 res += '(?:'
319 elif c == '}' and group:
319 elif c == '}' and group:
320 res += ')'
320 res += ')'
321 group -= 1
321 group -= 1
322 elif c == ',' and group:
322 elif c == ',' and group:
323 res += '|'
323 res += '|'
324 elif c == '\\':
324 elif c == '\\':
325 p = peek()
325 p = peek()
326 if p:
326 if p:
327 i += 1
327 i += 1
328 res += re.escape(p)
328 res += re.escape(p)
329 else:
329 else:
330 res += re.escape(c)
330 res += re.escape(c)
331 else:
331 else:
332 res += re.escape(c)
332 res += re.escape(c)
333 return head + res + tail
333 return head + res + tail
334
334
335 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
335 _globchars = {'[': 1, '{': 1, '*': 1, '?': 1}
336
336
337 def pathto(root, n1, n2):
337 def pathto(root, n1, n2):
338 '''return the relative path from one place to another.
338 '''return the relative path from one place to another.
339 root should use os.sep to separate directories
339 root should use os.sep to separate directories
340 n1 should use os.sep to separate directories
340 n1 should use os.sep to separate directories
341 n2 should use "/" to separate directories
341 n2 should use "/" to separate directories
342 returns an os.sep-separated path.
342 returns an os.sep-separated path.
343
343
344 If n1 is a relative path, it's assumed it's
344 If n1 is a relative path, it's assumed it's
345 relative to root.
345 relative to root.
346 n2 should always be relative to root.
346 n2 should always be relative to root.
347 '''
347 '''
348 if not n1: return localpath(n2)
348 if not n1: return localpath(n2)
349 if os.path.isabs(n1):
349 if os.path.isabs(n1):
350 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
350 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
351 return os.path.join(root, localpath(n2))
351 return os.path.join(root, localpath(n2))
352 n2 = '/'.join((pconvert(root), n2))
352 n2 = '/'.join((pconvert(root), n2))
353 a, b = splitpath(n1), n2.split('/')
353 a, b = splitpath(n1), n2.split('/')
354 a.reverse()
354 a.reverse()
355 b.reverse()
355 b.reverse()
356 while a and b and a[-1] == b[-1]:
356 while a and b and a[-1] == b[-1]:
357 a.pop()
357 a.pop()
358 b.pop()
358 b.pop()
359 b.reverse()
359 b.reverse()
360 return os.sep.join((['..'] * len(a)) + b) or '.'
360 return os.sep.join((['..'] * len(a)) + b) or '.'
361
361
362 def canonpath(root, cwd, myname):
362 def canonpath(root, cwd, myname):
363 """return the canonical path of myname, given cwd and root"""
363 """return the canonical path of myname, given cwd and root"""
364 if root == os.sep:
364 if root == os.sep:
365 rootsep = os.sep
365 rootsep = os.sep
366 elif endswithsep(root):
366 elif endswithsep(root):
367 rootsep = root
367 rootsep = root
368 else:
368 else:
369 rootsep = root + os.sep
369 rootsep = root + os.sep
370 name = myname
370 name = myname
371 if not os.path.isabs(name):
371 if not os.path.isabs(name):
372 name = os.path.join(root, cwd, name)
372 name = os.path.join(root, cwd, name)
373 name = os.path.normpath(name)
373 name = os.path.normpath(name)
374 audit_path = path_auditor(root)
374 audit_path = path_auditor(root)
375 if name != rootsep and name.startswith(rootsep):
375 if name != rootsep and name.startswith(rootsep):
376 name = name[len(rootsep):]
376 name = name[len(rootsep):]
377 audit_path(name)
377 audit_path(name)
378 return pconvert(name)
378 return pconvert(name)
379 elif name == root:
379 elif name == root:
380 return ''
380 return ''
381 else:
381 else:
382 # Determine whether `name' is in the hierarchy at or beneath `root',
382 # Determine whether `name' is in the hierarchy at or beneath `root',
383 # by iterating name=dirname(name) until that causes no change (can't
383 # by iterating name=dirname(name) until that causes no change (can't
384 # check name == '/', because that doesn't work on windows). For each
384 # check name == '/', because that doesn't work on windows). For each
385 # `name', compare dev/inode numbers. If they match, the list `rel'
385 # `name', compare dev/inode numbers. If they match, the list `rel'
386 # holds the reversed list of components making up the relative file
386 # holds the reversed list of components making up the relative file
387 # name we want.
387 # name we want.
388 root_st = os.stat(root)
388 root_st = os.stat(root)
389 rel = []
389 rel = []
390 while True:
390 while True:
391 try:
391 try:
392 name_st = os.stat(name)
392 name_st = os.stat(name)
393 except OSError:
393 except OSError:
394 break
394 break
395 if samestat(name_st, root_st):
395 if samestat(name_st, root_st):
396 if not rel:
396 if not rel:
397 # name was actually the same as root (maybe a symlink)
397 # name was actually the same as root (maybe a symlink)
398 return ''
398 return ''
399 rel.reverse()
399 rel.reverse()
400 name = os.path.join(*rel)
400 name = os.path.join(*rel)
401 audit_path(name)
401 audit_path(name)
402 return pconvert(name)
402 return pconvert(name)
403 dirname, basename = os.path.split(name)
403 dirname, basename = os.path.split(name)
404 rel.append(basename)
404 rel.append(basename)
405 if dirname == name:
405 if dirname == name:
406 break
406 break
407 name = dirname
407 name = dirname
408
408
409 raise Abort('%s not under root' % myname)
409 raise Abort('%s not under root' % myname)
410
410
411 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
411 def matcher(canonroot, cwd='', names=[], inc=[], exc=[], src=None, dflt_pat='glob'):
412 """build a function to match a set of file patterns
412 """build a function to match a set of file patterns
413
413
414 arguments:
414 arguments:
415 canonroot - the canonical root of the tree you're matching against
415 canonroot - the canonical root of the tree you're matching against
416 cwd - the current working directory, if relevant
416 cwd - the current working directory, if relevant
417 names - patterns to find
417 names - patterns to find
418 inc - patterns to include
418 inc - patterns to include
419 exc - patterns to exclude
419 exc - patterns to exclude
420 dflt_pat - if a pattern in names has no explicit type, assume this one
420 dflt_pat - if a pattern in names has no explicit type, assume this one
421 src - where these patterns came from (e.g. .hgignore)
421 src - where these patterns came from (e.g. .hgignore)
422
422
423 a pattern is one of:
423 a pattern is one of:
424 'glob:<glob>' - a glob relative to cwd
424 'glob:<glob>' - a glob relative to cwd
425 're:<regexp>' - a regular expression
425 're:<regexp>' - a regular expression
426 'path:<path>' - a path relative to canonroot
426 'path:<path>' - a path relative to canonroot
427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
427 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs)
428 'relpath:<path>' - a path relative to cwd
428 'relpath:<path>' - a path relative to cwd
429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
429 'relre:<regexp>' - a regexp that doesn't have to match the start of a name
430 '<something>' - one of the cases above, selected by the dflt_pat argument
430 '<something>' - one of the cases above, selected by the dflt_pat argument
431
431
432 returns:
432 returns:
433 a 3-tuple containing
433 a 3-tuple containing
434 - list of roots (places where one should start a recursive walk of the fs);
434 - list of roots (places where one should start a recursive walk of the fs);
435 this often matches the explicit non-pattern names passed in, but also
435 this often matches the explicit non-pattern names passed in, but also
436 includes the initial part of glob: patterns that has no glob characters
436 includes the initial part of glob: patterns that has no glob characters
437 - a bool match(filename) function
437 - a bool match(filename) function
438 - a bool indicating if any patterns were passed in
438 - a bool indicating if any patterns were passed in
439 """
439 """
440
440
441 # a common case: no patterns at all
441 # a common case: no patterns at all
442 if not names and not inc and not exc:
442 if not names and not inc and not exc:
443 return [], always, False
443 return [], always, False
444
444
445 def contains_glob(name):
445 def contains_glob(name):
446 for c in name:
446 for c in name:
447 if c in _globchars: return True
447 if c in _globchars: return True
448 return False
448 return False
449
449
450 def regex(kind, name, tail):
450 def regex(kind, name, tail):
451 '''convert a pattern into a regular expression'''
451 '''convert a pattern into a regular expression'''
452 if not name:
452 if not name:
453 return ''
453 return ''
454 if kind == 're':
454 if kind == 're':
455 return name
455 return name
456 elif kind == 'path':
456 elif kind == 'path':
457 return '^' + re.escape(name) + '(?:/|$)'
457 return '^' + re.escape(name) + '(?:/|$)'
458 elif kind == 'relglob':
458 elif kind == 'relglob':
459 return globre(name, '(?:|.*/)', tail)
459 return globre(name, '(?:|.*/)', tail)
460 elif kind == 'relpath':
460 elif kind == 'relpath':
461 return re.escape(name) + '(?:/|$)'
461 return re.escape(name) + '(?:/|$)'
462 elif kind == 'relre':
462 elif kind == 'relre':
463 if name.startswith('^'):
463 if name.startswith('^'):
464 return name
464 return name
465 return '.*' + name
465 return '.*' + name
466 return globre(name, '', tail)
466 return globre(name, '', tail)
467
467
468 def matchfn(pats, tail):
468 def matchfn(pats, tail):
469 """build a matching function from a set of patterns"""
469 """build a matching function from a set of patterns"""
470 if not pats:
470 if not pats:
471 return
471 return
472 try:
472 try:
473 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
473 pat = '(?:%s)' % '|'.join([regex(k, p, tail) for (k, p) in pats])
474 if len(pat) > 20000:
474 if len(pat) > 20000:
475 raise OverflowError()
475 raise OverflowError()
476 return re.compile(pat).match
476 return re.compile(pat).match
477 except OverflowError:
477 except OverflowError:
478 # We're using a Python with a tiny regex engine and we
478 # We're using a Python with a tiny regex engine and we
479 # made it explode, so we'll divide the pattern list in two
479 # made it explode, so we'll divide the pattern list in two
480 # until it works
480 # until it works
481 l = len(pats)
481 l = len(pats)
482 if l < 2:
482 if l < 2:
483 raise
483 raise
484 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
484 a, b = matchfn(pats[:l//2], tail), matchfn(pats[l//2:], tail)
485 return lambda s: a(s) or b(s)
485 return lambda s: a(s) or b(s)
486 except re.error:
486 except re.error:
487 for k, p in pats:
487 for k, p in pats:
488 try:
488 try:
489 re.compile('(?:%s)' % regex(k, p, tail))
489 re.compile('(?:%s)' % regex(k, p, tail))
490 except re.error:
490 except re.error:
491 if src:
491 if src:
492 raise Abort("%s: invalid pattern (%s): %s" %
492 raise Abort("%s: invalid pattern (%s): %s" %
493 (src, k, p))
493 (src, k, p))
494 else:
494 else:
495 raise Abort("invalid pattern (%s): %s" % (k, p))
495 raise Abort("invalid pattern (%s): %s" % (k, p))
496 raise Abort("invalid pattern")
496 raise Abort("invalid pattern")
497
497
498 def globprefix(pat):
498 def globprefix(pat):
499 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
499 '''return the non-glob prefix of a path, e.g. foo/* -> foo'''
500 root = []
500 root = []
501 for p in pat.split('/'):
501 for p in pat.split('/'):
502 if contains_glob(p): break
502 if contains_glob(p): break
503 root.append(p)
503 root.append(p)
504 return '/'.join(root) or '.'
504 return '/'.join(root) or '.'
505
505
506 def normalizepats(names, default):
506 def normalizepats(names, default):
507 pats = []
507 pats = []
508 roots = []
508 roots = []
509 anypats = False
509 anypats = False
510 for kind, name in [patkind(p, default) for p in names]:
510 for kind, name in [patkind(p, default) for p in names]:
511 if kind in ('glob', 'relpath'):
511 if kind in ('glob', 'relpath'):
512 name = canonpath(canonroot, cwd, name)
512 name = canonpath(canonroot, cwd, name)
513 elif kind in ('relglob', 'path'):
513 elif kind in ('relglob', 'path'):
514 name = normpath(name)
514 name = normpath(name)
515
515
516 pats.append((kind, name))
516 pats.append((kind, name))
517
517
518 if kind in ('glob', 're', 'relglob', 'relre'):
518 if kind in ('glob', 're', 'relglob', 'relre'):
519 anypats = True
519 anypats = True
520
520
521 if kind == 'glob':
521 if kind == 'glob':
522 root = globprefix(name)
522 root = globprefix(name)
523 roots.append(root)
523 roots.append(root)
524 elif kind in ('relpath', 'path'):
524 elif kind in ('relpath', 'path'):
525 roots.append(name or '.')
525 roots.append(name or '.')
526 elif kind == 'relglob':
526 elif kind == 'relglob':
527 roots.append('.')
527 roots.append('.')
528 return roots, pats, anypats
528 return roots, pats, anypats
529
529
530 roots, pats, anypats = normalizepats(names, dflt_pat)
530 roots, pats, anypats = normalizepats(names, dflt_pat)
531
531
532 patmatch = matchfn(pats, '$') or always
532 patmatch = matchfn(pats, '$') or always
533 incmatch = always
533 incmatch = always
534 if inc:
534 if inc:
535 dummy, inckinds, dummy = normalizepats(inc, 'glob')
535 dummy, inckinds, dummy = normalizepats(inc, 'glob')
536 incmatch = matchfn(inckinds, '(?:/|$)')
536 incmatch = matchfn(inckinds, '(?:/|$)')
537 excmatch = never
537 excmatch = never
538 if exc:
538 if exc:
539 dummy, exckinds, dummy = normalizepats(exc, 'glob')
539 dummy, exckinds, dummy = normalizepats(exc, 'glob')
540 excmatch = matchfn(exckinds, '(?:/|$)')
540 excmatch = matchfn(exckinds, '(?:/|$)')
541
541
542 if not names and inc and not exc:
542 if not names and inc and not exc:
543 # common case: hgignore patterns
543 # common case: hgignore patterns
544 match = incmatch
544 match = incmatch
545 else:
545 else:
546 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
546 match = lambda fn: incmatch(fn) and not excmatch(fn) and patmatch(fn)
547
547
548 return (roots, match, (inc or exc or anypats) and True)
548 return (roots, match, (inc or exc or anypats) and True)
549
549
550 _hgexecutable = None
550 _hgexecutable = None
551
551
552 def main_is_frozen():
552 def main_is_frozen():
553 """return True if we are a frozen executable.
553 """return True if we are a frozen executable.
554
554
555 The code supports py2exe (most common, Windows only) and tools/freeze
555 The code supports py2exe (most common, Windows only) and tools/freeze
556 (portable, not much used).
556 (portable, not much used).
557 """
557 """
558 return (hasattr(sys, "frozen") or # new py2exe
558 return (hasattr(sys, "frozen") or # new py2exe
559 hasattr(sys, "importers") or # old py2exe
559 hasattr(sys, "importers") or # old py2exe
560 imp.is_frozen("__main__")) # tools/freeze
560 imp.is_frozen("__main__")) # tools/freeze
561
561
562 def hgexecutable():
562 def hgexecutable():
563 """return location of the 'hg' executable.
563 """return location of the 'hg' executable.
564
564
565 Defaults to $HG or 'hg' in the search path.
565 Defaults to $HG or 'hg' in the search path.
566 """
566 """
567 if _hgexecutable is None:
567 if _hgexecutable is None:
568 hg = os.environ.get('HG')
568 hg = os.environ.get('HG')
569 if hg:
569 if hg:
570 set_hgexecutable(hg)
570 set_hgexecutable(hg)
571 elif main_is_frozen():
571 elif main_is_frozen():
572 set_hgexecutable(sys.executable)
572 set_hgexecutable(sys.executable)
573 else:
573 else:
574 set_hgexecutable(find_exe('hg') or 'hg')
574 set_hgexecutable(find_exe('hg') or 'hg')
575 return _hgexecutable
575 return _hgexecutable
576
576
577 def set_hgexecutable(path):
577 def set_hgexecutable(path):
578 """set location of the 'hg' executable"""
578 """set location of the 'hg' executable"""
579 global _hgexecutable
579 global _hgexecutable
580 _hgexecutable = path
580 _hgexecutable = path
581
581
582 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
582 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None):
583 '''enhanced shell command execution.
583 '''enhanced shell command execution.
584 run with environment maybe modified, maybe in different dir.
584 run with environment maybe modified, maybe in different dir.
585
585
586 if command fails and onerr is None, return status. if ui object,
586 if command fails and onerr is None, return status. if ui object,
587 print error message and return status, else raise onerr object as
587 print error message and return status, else raise onerr object as
588 exception.'''
588 exception.'''
589 def py2shell(val):
589 def py2shell(val):
590 'convert python object into string that is useful to shell'
590 'convert python object into string that is useful to shell'
591 if val in (None, False):
591 if val in (None, False):
592 return '0'
592 return '0'
593 if val == True:
593 if val == True:
594 return '1'
594 return '1'
595 return str(val)
595 return str(val)
596 oldenv = {}
596 oldenv = {}
597 for k in environ:
597 for k in environ:
598 oldenv[k] = os.environ.get(k)
598 oldenv[k] = os.environ.get(k)
599 if cwd is not None:
599 if cwd is not None:
600 oldcwd = os.getcwd()
600 oldcwd = os.getcwd()
601 origcmd = cmd
601 origcmd = cmd
602 if os.name == 'nt':
602 if os.name == 'nt':
603 cmd = '"%s"' % cmd
603 cmd = '"%s"' % cmd
604 try:
604 try:
605 for k, v in environ.iteritems():
605 for k, v in environ.iteritems():
606 os.environ[k] = py2shell(v)
606 os.environ[k] = py2shell(v)
607 os.environ['HG'] = hgexecutable()
607 os.environ['HG'] = hgexecutable()
608 if cwd is not None and oldcwd != cwd:
608 if cwd is not None and oldcwd != cwd:
609 os.chdir(cwd)
609 os.chdir(cwd)
610 rc = os.system(cmd)
610 rc = os.system(cmd)
611 if sys.platform == 'OpenVMS' and rc & 1:
611 if sys.platform == 'OpenVMS' and rc & 1:
612 rc = 0
612 rc = 0
613 if rc and onerr:
613 if rc and onerr:
614 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
614 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
615 explain_exit(rc)[0])
615 explain_exit(rc)[0])
616 if errprefix:
616 if errprefix:
617 errmsg = '%s: %s' % (errprefix, errmsg)
617 errmsg = '%s: %s' % (errprefix, errmsg)
618 try:
618 try:
619 onerr.warn(errmsg + '\n')
619 onerr.warn(errmsg + '\n')
620 except AttributeError:
620 except AttributeError:
621 raise onerr(errmsg)
621 raise onerr(errmsg)
622 return rc
622 return rc
623 finally:
623 finally:
624 for k, v in oldenv.iteritems():
624 for k, v in oldenv.iteritems():
625 if v is None:
625 if v is None:
626 del os.environ[k]
626 del os.environ[k]
627 else:
627 else:
628 os.environ[k] = v
628 os.environ[k] = v
629 if cwd is not None and oldcwd != cwd:
629 if cwd is not None and oldcwd != cwd:
630 os.chdir(oldcwd)
630 os.chdir(oldcwd)
631
631
632 def checksignature(func):
632 def checksignature(func):
633 '''wrap a function with code to check for calling errors'''
633 '''wrap a function with code to check for calling errors'''
634 def check(*args, **kwargs):
634 def check(*args, **kwargs):
635 try:
635 try:
636 return func(*args, **kwargs)
636 return func(*args, **kwargs)
637 except TypeError:
637 except TypeError:
638 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
638 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
639 raise error.SignatureError
639 raise error.SignatureError
640 raise
640 raise
641
641
642 return check
642 return check
643
643
644 # os.path.lexists is not available on python2.3
644 # os.path.lexists is not available on python2.3
645 def lexists(filename):
645 def lexists(filename):
646 "test whether a file with this name exists. does not follow symlinks"
646 "test whether a file with this name exists. does not follow symlinks"
647 try:
647 try:
648 os.lstat(filename)
648 os.lstat(filename)
649 except:
649 except:
650 return False
650 return False
651 return True
651 return True
652
652
653 def rename(src, dst):
653 def rename(src, dst):
654 """forcibly rename a file"""
654 """forcibly rename a file"""
655 try:
655 try:
656 os.rename(src, dst)
656 os.rename(src, dst)
657 except OSError, err: # FIXME: check err (EEXIST ?)
657 except OSError, err: # FIXME: check err (EEXIST ?)
658 # on windows, rename to existing file is not allowed, so we
658 # on windows, rename to existing file is not allowed, so we
659 # must delete destination first. but if file is open, unlink
659 # must delete destination first. but if file is open, unlink
660 # schedules it for delete but does not delete it. rename
660 # schedules it for delete but does not delete it. rename
661 # happens immediately even for open files, so we rename
661 # happens immediately even for open files, so we rename
662 # destination to a temporary name, then delete that. then
662 # destination to a temporary name, then delete that. then
663 # rename is safe to do.
663 # rename is safe to do.
664 temp = dst + "-force-rename"
664 temp = dst + "-force-rename"
665 os.rename(dst, temp)
665 os.rename(dst, temp)
666 os.unlink(temp)
666 os.unlink(temp)
667 os.rename(src, dst)
667 os.rename(src, dst)
668
668
669 def unlink(f):
669 def unlink(f):
670 """unlink and remove the directory if it is empty"""
670 """unlink and remove the directory if it is empty"""
671 os.unlink(f)
671 os.unlink(f)
672 # try removing directories that might now be empty
672 # try removing directories that might now be empty
673 try:
673 try:
674 os.removedirs(os.path.dirname(f))
674 os.removedirs(os.path.dirname(f))
675 except OSError:
675 except OSError:
676 pass
676 pass
677
677
678 def copyfile(src, dest):
678 def copyfile(src, dest):
679 "copy a file, preserving mode and atime/mtime"
679 "copy a file, preserving mode and atime/mtime"
680 if os.path.islink(src):
680 if os.path.islink(src):
681 try:
681 try:
682 os.unlink(dest)
682 os.unlink(dest)
683 except:
683 except:
684 pass
684 pass
685 os.symlink(os.readlink(src), dest)
685 os.symlink(os.readlink(src), dest)
686 else:
686 else:
687 try:
687 try:
688 shutil.copyfile(src, dest)
688 shutil.copyfile(src, dest)
689 shutil.copystat(src, dest)
689 shutil.copystat(src, dest)
690 except shutil.Error, inst:
690 except shutil.Error, inst:
691 raise Abort(str(inst))
691 raise Abort(str(inst))
692
692
693 def copyfiles(src, dst, hardlink=None):
693 def copyfiles(src, dst, hardlink=None):
694 """Copy a directory tree using hardlinks if possible"""
694 """Copy a directory tree using hardlinks if possible"""
695
695
696 if hardlink is None:
696 if hardlink is None:
697 hardlink = (os.stat(src).st_dev ==
697 hardlink = (os.stat(src).st_dev ==
698 os.stat(os.path.dirname(dst)).st_dev)
698 os.stat(os.path.dirname(dst)).st_dev)
699
699
700 if os.path.isdir(src):
700 if os.path.isdir(src):
701 os.mkdir(dst)
701 os.mkdir(dst)
702 for name, kind in osutil.listdir(src):
702 for name, kind in osutil.listdir(src):
703 srcname = os.path.join(src, name)
703 srcname = os.path.join(src, name)
704 dstname = os.path.join(dst, name)
704 dstname = os.path.join(dst, name)
705 copyfiles(srcname, dstname, hardlink)
705 copyfiles(srcname, dstname, hardlink)
706 else:
706 else:
707 if hardlink:
707 if hardlink:
708 try:
708 try:
709 os_link(src, dst)
709 os_link(src, dst)
710 except (IOError, OSError):
710 except (IOError, OSError):
711 hardlink = False
711 hardlink = False
712 shutil.copy(src, dst)
712 shutil.copy(src, dst)
713 else:
713 else:
714 shutil.copy(src, dst)
714 shutil.copy(src, dst)
715
715
716 class path_auditor(object):
716 class path_auditor(object):
717 '''ensure that a filesystem path contains no banned components.
717 '''ensure that a filesystem path contains no banned components.
718 the following properties of a path are checked:
718 the following properties of a path are checked:
719
719
720 - under top-level .hg
720 - under top-level .hg
721 - starts at the root of a windows drive
721 - starts at the root of a windows drive
722 - contains ".."
722 - contains ".."
723 - traverses a symlink (e.g. a/symlink_here/b)
723 - traverses a symlink (e.g. a/symlink_here/b)
724 - inside a nested repository'''
724 - inside a nested repository'''
725
725
726 def __init__(self, root):
726 def __init__(self, root):
727 self.audited = set()
727 self.audited = set()
728 self.auditeddir = set()
728 self.auditeddir = set()
729 self.root = root
729 self.root = root
730
730
731 def __call__(self, path):
731 def __call__(self, path):
732 if path in self.audited:
732 if path in self.audited:
733 return
733 return
734 normpath = os.path.normcase(path)
734 normpath = os.path.normcase(path)
735 parts = splitpath(normpath)
735 parts = splitpath(normpath)
736 if (os.path.splitdrive(path)[0]
736 if (os.path.splitdrive(path)[0]
737 or parts[0].lower() in ('.hg', '.hg.', '')
737 or parts[0].lower() in ('.hg', '.hg.', '')
738 or os.pardir in parts):
738 or os.pardir in parts):
739 raise Abort(_("path contains illegal component: %s") % path)
739 raise Abort(_("path contains illegal component: %s") % path)
740 if '.hg' in path.lower():
740 if '.hg' in path.lower():
741 lparts = [p.lower() for p in parts]
741 lparts = [p.lower() for p in parts]
742 for p in '.hg', '.hg.':
742 for p in '.hg', '.hg.':
743 if p in lparts[1:]:
743 if p in lparts[1:]:
744 pos = lparts.index(p)
744 pos = lparts.index(p)
745 base = os.path.join(*parts[:pos])
745 base = os.path.join(*parts[:pos])
746 raise Abort(_('path %r is inside repo %r') % (path, base))
746 raise Abort(_('path %r is inside repo %r') % (path, base))
747 def check(prefix):
747 def check(prefix):
748 curpath = os.path.join(self.root, prefix)
748 curpath = os.path.join(self.root, prefix)
749 try:
749 try:
750 st = os.lstat(curpath)
750 st = os.lstat(curpath)
751 except OSError, err:
751 except OSError, err:
752 # EINVAL can be raised as invalid path syntax under win32.
752 # EINVAL can be raised as invalid path syntax under win32.
753 # They must be ignored for patterns can be checked too.
753 # They must be ignored for patterns can be checked too.
754 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
754 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
755 raise
755 raise
756 else:
756 else:
757 if stat.S_ISLNK(st.st_mode):
757 if stat.S_ISLNK(st.st_mode):
758 raise Abort(_('path %r traverses symbolic link %r') %
758 raise Abort(_('path %r traverses symbolic link %r') %
759 (path, prefix))
759 (path, prefix))
760 elif (stat.S_ISDIR(st.st_mode) and
760 elif (stat.S_ISDIR(st.st_mode) and
761 os.path.isdir(os.path.join(curpath, '.hg'))):
761 os.path.isdir(os.path.join(curpath, '.hg'))):
762 raise Abort(_('path %r is inside repo %r') %
762 raise Abort(_('path %r is inside repo %r') %
763 (path, prefix))
763 (path, prefix))
764 parts.pop()
764 parts.pop()
765 prefixes = []
765 prefixes = []
766 for n in range(len(parts)):
766 for n in range(len(parts)):
767 prefix = os.sep.join(parts)
767 prefix = os.sep.join(parts)
768 if prefix in self.auditeddir:
768 if prefix in self.auditeddir:
769 break
769 break
770 check(prefix)
770 check(prefix)
771 prefixes.append(prefix)
771 prefixes.append(prefix)
772 parts.pop()
772 parts.pop()
773
773
774 self.audited.add(path)
774 self.audited.add(path)
775 # only add prefixes to the cache after checking everything: we don't
775 # only add prefixes to the cache after checking everything: we don't
776 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
776 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
777 self.auditeddir.update(prefixes)
777 self.auditeddir.update(prefixes)
778
778
779 if os.name == 'nt':
779 if os.name == 'nt':
780 from windows import *
780 from windows import *
781 def expand_glob(pats):
781 def expand_glob(pats):
782 '''On Windows, expand the implicit globs in a list of patterns'''
782 '''On Windows, expand the implicit globs in a list of patterns'''
783 ret = []
783 ret = []
784 for p in pats:
784 for p in pats:
785 kind, name = patkind(p, None)
785 kind, name = patkind(p, None)
786 if kind is None:
786 if kind is None:
787 globbed = glob.glob(name)
787 globbed = glob.glob(name)
788 if globbed:
788 if globbed:
789 ret.extend(globbed)
789 ret.extend(globbed)
790 continue
790 continue
791 # if we couldn't expand the glob, just keep it around
791 # if we couldn't expand the glob, just keep it around
792 ret.append(p)
792 ret.append(p)
793 return ret
793 return ret
794 else:
794 else:
795 from posix import *
795 from posix import *
796
796
797 def makelock(info, pathname):
797 def makelock(info, pathname):
798 try:
798 try:
799 return os.symlink(info, pathname)
799 return os.symlink(info, pathname)
800 except OSError, why:
800 except OSError, why:
801 if why.errno == errno.EEXIST:
801 if why.errno == errno.EEXIST:
802 raise
802 raise
803 except AttributeError: # no symlink in os
803 except AttributeError: # no symlink in os
804 pass
804 pass
805
805
806 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
806 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
807 os.write(ld, info)
807 os.write(ld, info)
808 os.close(ld)
808 os.close(ld)
809
809
810 def readlock(pathname):
810 def readlock(pathname):
811 try:
811 try:
812 return os.readlink(pathname)
812 return os.readlink(pathname)
813 except OSError, why:
813 except OSError, why:
814 if why.errno not in (errno.EINVAL, errno.ENOSYS):
814 if why.errno not in (errno.EINVAL, errno.ENOSYS):
815 raise
815 raise
816 except AttributeError: # no symlink in os
816 except AttributeError: # no symlink in os
817 pass
817 pass
818 return posixfile(pathname).read()
818 return posixfile(pathname).read()
819
819
820 def nlinks(pathname):
820 def nlinks(pathname):
821 """Return number of hardlinks for the given file."""
821 """Return number of hardlinks for the given file."""
822 return os.lstat(pathname).st_nlink
822 return os.lstat(pathname).st_nlink
823
823
824 if hasattr(os, 'link'):
824 if hasattr(os, 'link'):
825 os_link = os.link
825 os_link = os.link
826 else:
826 else:
827 def os_link(src, dst):
827 def os_link(src, dst):
828 raise OSError(0, _("Hardlinks not supported"))
828 raise OSError(0, _("Hardlinks not supported"))
829
829
830 def fstat(fp):
830 def fstat(fp):
831 '''stat file object that may not have fileno method.'''
831 '''stat file object that may not have fileno method.'''
832 try:
832 try:
833 return os.fstat(fp.fileno())
833 return os.fstat(fp.fileno())
834 except AttributeError:
834 except AttributeError:
835 return os.stat(fp.name)
835 return os.stat(fp.name)
836
836
837 # File system features
837 # File system features
838
838
839 def checkcase(path):
839 def checkcase(path):
840 """
840 """
841 Check whether the given path is on a case-sensitive filesystem
841 Check whether the given path is on a case-sensitive filesystem
842
842
843 Requires a path (like /foo/.hg) ending with a foldable final
843 Requires a path (like /foo/.hg) ending with a foldable final
844 directory component.
844 directory component.
845 """
845 """
846 s1 = os.stat(path)
846 s1 = os.stat(path)
847 d, b = os.path.split(path)
847 d, b = os.path.split(path)
848 p2 = os.path.join(d, b.upper())
848 p2 = os.path.join(d, b.upper())
849 if path == p2:
849 if path == p2:
850 p2 = os.path.join(d, b.lower())
850 p2 = os.path.join(d, b.lower())
851 try:
851 try:
852 s2 = os.stat(p2)
852 s2 = os.stat(p2)
853 if s2 == s1:
853 if s2 == s1:
854 return False
854 return False
855 return True
855 return True
856 except:
856 except:
857 return True
857 return True
858
858
859 _fspathcache = {}
859 _fspathcache = {}
860 def fspath(name, root):
860 def fspath(name, root):
861 '''Get name in the case stored in the filesystem
861 '''Get name in the case stored in the filesystem
862
862
863 The name is either relative to root, or it is an absolute path starting
863 The name is either relative to root, or it is an absolute path starting
864 with root. Note that this function is unnecessary, and should not be
864 with root. Note that this function is unnecessary, and should not be
865 called, for case-sensitive filesystems (simply because it's expensive).
865 called, for case-sensitive filesystems (simply because it's expensive).
866 '''
866 '''
867 # If name is absolute, make it relative
867 # If name is absolute, make it relative
868 if name.lower().startswith(root.lower()):
868 if name.lower().startswith(root.lower()):
869 l = len(root)
869 l = len(root)
870 if name[l] == os.sep or name[l] == os.altsep:
870 if name[l] == os.sep or name[l] == os.altsep:
871 l = l + 1
871 l = l + 1
872 name = name[l:]
872 name = name[l:]
873
873
874 if not os.path.exists(os.path.join(root, name)):
874 if not os.path.exists(os.path.join(root, name)):
875 return None
875 return None
876
876
877 seps = os.sep
877 seps = os.sep
878 if os.altsep:
878 if os.altsep:
879 seps = seps + os.altsep
879 seps = seps + os.altsep
880 # Protect backslashes. This gets silly very quickly.
880 # Protect backslashes. This gets silly very quickly.
881 seps.replace('\\','\\\\')
881 seps.replace('\\','\\\\')
882 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
882 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
883 dir = os.path.normcase(os.path.normpath(root))
883 dir = os.path.normcase(os.path.normpath(root))
884 result = []
884 result = []
885 for part, sep in pattern.findall(name):
885 for part, sep in pattern.findall(name):
886 if sep:
886 if sep:
887 result.append(sep)
887 result.append(sep)
888 continue
888 continue
889
889
890 if dir not in _fspathcache:
890 if dir not in _fspathcache:
891 _fspathcache[dir] = os.listdir(dir)
891 _fspathcache[dir] = os.listdir(dir)
892 contents = _fspathcache[dir]
892 contents = _fspathcache[dir]
893
893
894 lpart = part.lower()
894 lpart = part.lower()
895 for n in contents:
895 for n in contents:
896 if n.lower() == lpart:
896 if n.lower() == lpart:
897 result.append(n)
897 result.append(n)
898 break
898 break
899 else:
899 else:
900 # Cannot happen, as the file exists!
900 # Cannot happen, as the file exists!
901 result.append(part)
901 result.append(part)
902 dir = os.path.join(dir, lpart)
902 dir = os.path.join(dir, lpart)
903
903
904 return ''.join(result)
904 return ''.join(result)
905
905
906 def checkexec(path):
906 def checkexec(path):
907 """
907 """
908 Check whether the given path is on a filesystem with UNIX-like exec flags
908 Check whether the given path is on a filesystem with UNIX-like exec flags
909
909
910 Requires a directory (like /foo/.hg)
910 Requires a directory (like /foo/.hg)
911 """
911 """
912
912
913 # VFAT on some Linux versions can flip mode but it doesn't persist
913 # VFAT on some Linux versions can flip mode but it doesn't persist
914 # a FS remount. Frequently we can detect it if files are created
914 # a FS remount. Frequently we can detect it if files are created
915 # with exec bit on.
915 # with exec bit on.
916
916
917 try:
917 try:
918 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
918 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
919 fh, fn = tempfile.mkstemp("", "", path)
919 fh, fn = tempfile.mkstemp("", "", path)
920 try:
920 try:
921 os.close(fh)
921 os.close(fh)
922 m = os.stat(fn).st_mode & 0777
922 m = os.stat(fn).st_mode & 0777
923 new_file_has_exec = m & EXECFLAGS
923 new_file_has_exec = m & EXECFLAGS
924 os.chmod(fn, m ^ EXECFLAGS)
924 os.chmod(fn, m ^ EXECFLAGS)
925 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
925 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
926 finally:
926 finally:
927 os.unlink(fn)
927 os.unlink(fn)
928 except (IOError, OSError):
928 except (IOError, OSError):
929 # we don't care, the user probably won't be able to commit anyway
929 # we don't care, the user probably won't be able to commit anyway
930 return False
930 return False
931 return not (new_file_has_exec or exec_flags_cannot_flip)
931 return not (new_file_has_exec or exec_flags_cannot_flip)
932
932
933 def checklink(path):
933 def checklink(path):
934 """check whether the given path is on a symlink-capable filesystem"""
934 """check whether the given path is on a symlink-capable filesystem"""
935 # mktemp is not racy because symlink creation will fail if the
935 # mktemp is not racy because symlink creation will fail if the
936 # file already exists
936 # file already exists
937 name = tempfile.mktemp(dir=path)
937 name = tempfile.mktemp(dir=path)
938 try:
938 try:
939 os.symlink(".", name)
939 os.symlink(".", name)
940 os.unlink(name)
940 os.unlink(name)
941 return True
941 return True
942 except (OSError, AttributeError):
942 except (OSError, AttributeError):
943 return False
943 return False
944
944
945 def needbinarypatch():
945 def needbinarypatch():
946 """return True if patches should be applied in binary mode by default."""
946 """return True if patches should be applied in binary mode by default."""
947 return os.name == 'nt'
947 return os.name == 'nt'
948
948
949 def endswithsep(path):
949 def endswithsep(path):
950 '''Check path ends with os.sep or os.altsep.'''
950 '''Check path ends with os.sep or os.altsep.'''
951 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
951 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
952
952
953 def splitpath(path):
953 def splitpath(path):
954 '''Split path by os.sep.
954 '''Split path by os.sep.
955 Note that this function does not use os.altsep because this is
955 Note that this function does not use os.altsep because this is
956 an alternative of simple "xxx.split(os.sep)".
956 an alternative of simple "xxx.split(os.sep)".
957 It is recommended to use os.path.normpath() before using this
957 It is recommended to use os.path.normpath() before using this
958 function if need.'''
958 function if need.'''
959 return path.split(os.sep)
959 return path.split(os.sep)
960
960
961 def gui():
961 def gui():
962 '''Are we running in a GUI?'''
962 '''Are we running in a GUI?'''
963 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
963 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
964
964
965 def lookup_reg(key, name=None, scope=None):
965 def lookup_reg(key, name=None, scope=None):
966 return None
966 return None
967
967
968 def mktempcopy(name, emptyok=False, createmode=None):
968 def mktempcopy(name, emptyok=False, createmode=None):
969 """Create a temporary file with the same contents from name
969 """Create a temporary file with the same contents from name
970
970
971 The permission bits are copied from the original file.
971 The permission bits are copied from the original file.
972
972
973 If the temporary file is going to be truncated immediately, you
973 If the temporary file is going to be truncated immediately, you
974 can use emptyok=True as an optimization.
974 can use emptyok=True as an optimization.
975
975
976 Returns the name of the temporary file.
976 Returns the name of the temporary file.
977 """
977 """
978 d, fn = os.path.split(name)
978 d, fn = os.path.split(name)
979 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
979 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
980 os.close(fd)
980 os.close(fd)
981 # Temporary files are created with mode 0600, which is usually not
981 # Temporary files are created with mode 0600, which is usually not
982 # what we want. If the original file already exists, just copy
982 # what we want. If the original file already exists, just copy
983 # its mode. Otherwise, manually obey umask.
983 # its mode. Otherwise, manually obey umask.
984 try:
984 try:
985 st_mode = os.lstat(name).st_mode & 0777
985 st_mode = os.lstat(name).st_mode & 0777
986 except OSError, inst:
986 except OSError, inst:
987 if inst.errno != errno.ENOENT:
987 if inst.errno != errno.ENOENT:
988 raise
988 raise
989 st_mode = createmode
989 st_mode = createmode
990 if st_mode is None:
990 if st_mode is None:
991 st_mode = ~umask
991 st_mode = ~umask
992 st_mode &= 0666
992 st_mode &= 0666
993 os.chmod(temp, st_mode)
993 os.chmod(temp, st_mode)
994 if emptyok:
994 if emptyok:
995 return temp
995 return temp
996 try:
996 try:
997 try:
997 try:
998 ifp = posixfile(name, "rb")
998 ifp = posixfile(name, "rb")
999 except IOError, inst:
999 except IOError, inst:
1000 if inst.errno == errno.ENOENT:
1000 if inst.errno == errno.ENOENT:
1001 return temp
1001 return temp
1002 if not getattr(inst, 'filename', None):
1002 if not getattr(inst, 'filename', None):
1003 inst.filename = name
1003 inst.filename = name
1004 raise
1004 raise
1005 ofp = posixfile(temp, "wb")
1005 ofp = posixfile(temp, "wb")
1006 for chunk in filechunkiter(ifp):
1006 for chunk in filechunkiter(ifp):
1007 ofp.write(chunk)
1007 ofp.write(chunk)
1008 ifp.close()
1008 ifp.close()
1009 ofp.close()
1009 ofp.close()
1010 except:
1010 except:
1011 try: os.unlink(temp)
1011 try: os.unlink(temp)
1012 except: pass
1012 except: pass
1013 raise
1013 raise
1014 return temp
1014 return temp
1015
1015
1016 class atomictempfile(posixfile):
1016 class atomictempfile(posixfile):
1017 """file-like object that atomically updates a file
1017 """file-like object that atomically updates a file
1018
1018
1019 All writes will be redirected to a temporary copy of the original
1019 All writes will be redirected to a temporary copy of the original
1020 file. When rename is called, the copy is renamed to the original
1020 file. When rename is called, the copy is renamed to the original
1021 name, making the changes visible.
1021 name, making the changes visible.
1022 """
1022 """
1023 def __init__(self, name, mode, createmode):
1023 def __init__(self, name, mode, createmode):
1024 self.__name = name
1024 self.__name = name
1025 self.temp = mktempcopy(name, emptyok=('w' in mode),
1025 self.temp = mktempcopy(name, emptyok=('w' in mode),
1026 createmode=createmode)
1026 createmode=createmode)
1027 posixfile.__init__(self, self.temp, mode)
1027 posixfile.__init__(self, self.temp, mode)
1028
1028
1029 def rename(self):
1029 def rename(self):
1030 if not self.closed:
1030 if not self.closed:
1031 posixfile.close(self)
1031 posixfile.close(self)
1032 rename(self.temp, localpath(self.__name))
1032 rename(self.temp, localpath(self.__name))
1033
1033
1034 def __del__(self):
1034 def __del__(self):
1035 if not self.closed:
1035 if not self.closed:
1036 try:
1036 try:
1037 os.unlink(self.temp)
1037 os.unlink(self.temp)
1038 except: pass
1038 except: pass
1039 posixfile.close(self)
1039 posixfile.close(self)
1040
1040
1041 def makedirs(name, mode=None):
1041 def makedirs(name, mode=None):
1042 """recursive directory creation with parent mode inheritance"""
1042 """recursive directory creation with parent mode inheritance"""
1043 try:
1043 try:
1044 os.mkdir(name)
1044 os.mkdir(name)
1045 if mode is not None:
1045 if mode is not None:
1046 os.chmod(name, mode)
1046 os.chmod(name, mode)
1047 return
1047 return
1048 except OSError, err:
1048 except OSError, err:
1049 if err.errno == errno.EEXIST:
1049 if err.errno == errno.EEXIST:
1050 return
1050 return
1051 if err.errno != errno.ENOENT:
1051 if err.errno != errno.ENOENT:
1052 raise
1052 raise
1053 parent = os.path.abspath(os.path.dirname(name))
1053 parent = os.path.abspath(os.path.dirname(name))
1054 makedirs(parent, mode)
1054 makedirs(parent, mode)
1055 makedirs(name, mode)
1055 makedirs(name, mode)
1056
1056
1057 class opener(object):
1057 class opener(object):
1058 """Open files relative to a base directory
1058 """Open files relative to a base directory
1059
1059
1060 This class is used to hide the details of COW semantics and
1060 This class is used to hide the details of COW semantics and
1061 remote file access from higher level code.
1061 remote file access from higher level code.
1062 """
1062 """
1063 def __init__(self, base, audit=True):
1063 def __init__(self, base, audit=True):
1064 self.base = base
1064 self.base = base
1065 if audit:
1065 if audit:
1066 self.audit_path = path_auditor(base)
1066 self.audit_path = path_auditor(base)
1067 else:
1067 else:
1068 self.audit_path = always
1068 self.audit_path = always
1069 self.createmode = None
1069 self.createmode = None
1070
1070
1071 def __getattr__(self, name):
1071 def __getattr__(self, name):
1072 if name == '_can_symlink':
1072 if name == '_can_symlink':
1073 self._can_symlink = checklink(self.base)
1073 self._can_symlink = checklink(self.base)
1074 return self._can_symlink
1074 return self._can_symlink
1075 raise AttributeError(name)
1075 raise AttributeError(name)
1076
1076
1077 def _fixfilemode(self, name):
1077 def _fixfilemode(self, name):
1078 if self.createmode is None:
1078 if self.createmode is None:
1079 return
1079 return
1080 os.chmod(name, self.createmode & 0666)
1080 os.chmod(name, self.createmode & 0666)
1081
1081
1082 def __call__(self, path, mode="r", text=False, atomictemp=False):
1082 def __call__(self, path, mode="r", text=False, atomictemp=False):
1083 self.audit_path(path)
1083 self.audit_path(path)
1084 f = os.path.join(self.base, path)
1084 f = os.path.join(self.base, path)
1085
1085
1086 if not text and "b" not in mode:
1086 if not text and "b" not in mode:
1087 mode += "b" # for that other OS
1087 mode += "b" # for that other OS
1088
1088
1089 nlink = -1
1089 nlink = -1
1090 if mode not in ("r", "rb"):
1090 if mode not in ("r", "rb"):
1091 try:
1091 try:
1092 nlink = nlinks(f)
1092 nlink = nlinks(f)
1093 except OSError:
1093 except OSError:
1094 nlink = 0
1094 nlink = 0
1095 d = os.path.dirname(f)
1095 d = os.path.dirname(f)
1096 if not os.path.isdir(d):
1096 if not os.path.isdir(d):
1097 makedirs(d, self.createmode)
1097 makedirs(d, self.createmode)
1098 if atomictemp:
1098 if atomictemp:
1099 return atomictempfile(f, mode, self.createmode)
1099 return atomictempfile(f, mode, self.createmode)
1100 if nlink > 1:
1100 if nlink > 1:
1101 rename(mktempcopy(f), f)
1101 rename(mktempcopy(f), f)
1102 fp = posixfile(f, mode)
1102 fp = posixfile(f, mode)
1103 if nlink == 0:
1103 if nlink == 0:
1104 self._fixfilemode(f)
1104 self._fixfilemode(f)
1105 return fp
1105 return fp
1106
1106
1107 def symlink(self, src, dst):
1107 def symlink(self, src, dst):
1108 self.audit_path(dst)
1108 self.audit_path(dst)
1109 linkname = os.path.join(self.base, dst)
1109 linkname = os.path.join(self.base, dst)
1110 try:
1110 try:
1111 os.unlink(linkname)
1111 os.unlink(linkname)
1112 except OSError:
1112 except OSError:
1113 pass
1113 pass
1114
1114
1115 dirname = os.path.dirname(linkname)
1115 dirname = os.path.dirname(linkname)
1116 if not os.path.exists(dirname):
1116 if not os.path.exists(dirname):
1117 makedirs(dirname, self.createmode)
1117 makedirs(dirname, self.createmode)
1118
1118
1119 if self._can_symlink:
1119 if self._can_symlink:
1120 try:
1120 try:
1121 os.symlink(src, linkname)
1121 os.symlink(src, linkname)
1122 except OSError, err:
1122 except OSError, err:
1123 raise OSError(err.errno, _('could not symlink to %r: %s') %
1123 raise OSError(err.errno, _('could not symlink to %r: %s') %
1124 (src, err.strerror), linkname)
1124 (src, err.strerror), linkname)
1125 else:
1125 else:
1126 f = self(dst, "w")
1126 f = self(dst, "w")
1127 f.write(src)
1127 f.write(src)
1128 f.close()
1128 f.close()
1129 self._fixfilemode(dst)
1129 self._fixfilemode(dst)
1130
1130
1131 class chunkbuffer(object):
1131 class chunkbuffer(object):
1132 """Allow arbitrary sized chunks of data to be efficiently read from an
1132 """Allow arbitrary sized chunks of data to be efficiently read from an
1133 iterator over chunks of arbitrary size."""
1133 iterator over chunks of arbitrary size."""
1134
1134
1135 def __init__(self, in_iter):
1135 def __init__(self, in_iter):
1136 """in_iter is the iterator that's iterating over the input chunks.
1136 """in_iter is the iterator that's iterating over the input chunks.
1137 targetsize is how big a buffer to try to maintain."""
1137 targetsize is how big a buffer to try to maintain."""
1138 self.iter = iter(in_iter)
1138 self.iter = iter(in_iter)
1139 self.buf = ''
1139 self.buf = ''
1140 self.targetsize = 2**16
1140 self.targetsize = 2**16
1141
1141
1142 def read(self, l):
1142 def read(self, l):
1143 """Read L bytes of data from the iterator of chunks of data.
1143 """Read L bytes of data from the iterator of chunks of data.
1144 Returns less than L bytes if the iterator runs dry."""
1144 Returns less than L bytes if the iterator runs dry."""
1145 if l > len(self.buf) and self.iter:
1145 if l > len(self.buf) and self.iter:
1146 # Clamp to a multiple of self.targetsize
1146 # Clamp to a multiple of self.targetsize
1147 targetsize = max(l, self.targetsize)
1147 targetsize = max(l, self.targetsize)
1148 collector = cStringIO.StringIO()
1148 collector = cStringIO.StringIO()
1149 collector.write(self.buf)
1149 collector.write(self.buf)
1150 collected = len(self.buf)
1150 collected = len(self.buf)
1151 for chunk in self.iter:
1151 for chunk in self.iter:
1152 collector.write(chunk)
1152 collector.write(chunk)
1153 collected += len(chunk)
1153 collected += len(chunk)
1154 if collected >= targetsize:
1154 if collected >= targetsize:
1155 break
1155 break
1156 if collected < targetsize:
1156 if collected < targetsize:
1157 self.iter = False
1157 self.iter = False
1158 self.buf = collector.getvalue()
1158 self.buf = collector.getvalue()
1159 if len(self.buf) == l:
1159 if len(self.buf) == l:
1160 s, self.buf = str(self.buf), ''
1160 s, self.buf = str(self.buf), ''
1161 else:
1161 else:
1162 s, self.buf = self.buf[:l], buffer(self.buf, l)
1162 s, self.buf = self.buf[:l], buffer(self.buf, l)
1163 return s
1163 return s
1164
1164
1165 def filechunkiter(f, size=65536, limit=None):
1165 def filechunkiter(f, size=65536, limit=None):
1166 """Create a generator that produces the data in the file size
1166 """Create a generator that produces the data in the file size
1167 (default 65536) bytes at a time, up to optional limit (default is
1167 (default 65536) bytes at a time, up to optional limit (default is
1168 to read all data). Chunks may be less than size bytes if the
1168 to read all data). Chunks may be less than size bytes if the
1169 chunk is the last chunk in the file, or the file is a socket or
1169 chunk is the last chunk in the file, or the file is a socket or
1170 some other type of file that sometimes reads less data than is
1170 some other type of file that sometimes reads less data than is
1171 requested."""
1171 requested."""
1172 assert size >= 0
1172 assert size >= 0
1173 assert limit is None or limit >= 0
1173 assert limit is None or limit >= 0
1174 while True:
1174 while True:
1175 if limit is None: nbytes = size
1175 if limit is None: nbytes = size
1176 else: nbytes = min(limit, size)
1176 else: nbytes = min(limit, size)
1177 s = nbytes and f.read(nbytes)
1177 s = nbytes and f.read(nbytes)
1178 if not s: break
1178 if not s: break
1179 if limit: limit -= len(s)
1179 if limit: limit -= len(s)
1180 yield s
1180 yield s
1181
1181
1182 def makedate():
1182 def makedate():
1183 lt = time.localtime()
1183 lt = time.localtime()
1184 if lt[8] == 1 and time.daylight:
1184 if lt[8] == 1 and time.daylight:
1185 tz = time.altzone
1185 tz = time.altzone
1186 else:
1186 else:
1187 tz = time.timezone
1187 tz = time.timezone
1188 return time.mktime(lt), tz
1188 return time.mktime(lt), tz
1189
1189
1190 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1190 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
1191 """represent a (unixtime, offset) tuple as a localized time.
1191 """represent a (unixtime, offset) tuple as a localized time.
1192 unixtime is seconds since the epoch, and offset is the time zone's
1192 unixtime is seconds since the epoch, and offset is the time zone's
1193 number of seconds away from UTC. if timezone is false, do not
1193 number of seconds away from UTC. if timezone is false, do not
1194 append time zone to string."""
1194 append time zone to string."""
1195 t, tz = date or makedate()
1195 t, tz = date or makedate()
1196 if "%1" in format or "%2" in format:
1196 if "%1" in format or "%2" in format:
1197 sign = (tz > 0) and "-" or "+"
1197 sign = (tz > 0) and "-" or "+"
1198 minutes = abs(tz) / 60
1198 minutes = abs(tz) / 60
1199 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1199 format = format.replace("%1", "%c%02d" % (sign, minutes / 60))
1200 format = format.replace("%2", "%02d" % (minutes % 60))
1200 format = format.replace("%2", "%02d" % (minutes % 60))
1201 s = time.strftime(format, time.gmtime(float(t) - tz))
1201 s = time.strftime(format, time.gmtime(float(t) - tz))
1202 return s
1202 return s
1203
1203
1204 def shortdate(date=None):
1204 def shortdate(date=None):
1205 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1205 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1206 return datestr(date, format='%Y-%m-%d')
1206 return datestr(date, format='%Y-%m-%d')
1207
1207
1208 def strdate(string, format, defaults=[]):
1208 def strdate(string, format, defaults=[]):
1209 """parse a localized time string and return a (unixtime, offset) tuple.
1209 """parse a localized time string and return a (unixtime, offset) tuple.
1210 if the string cannot be parsed, ValueError is raised."""
1210 if the string cannot be parsed, ValueError is raised."""
1211 def timezone(string):
1211 def timezone(string):
1212 tz = string.split()[-1]
1212 tz = string.split()[-1]
1213 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1213 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1214 sign = (tz[0] == "+") and 1 or -1
1214 sign = (tz[0] == "+") and 1 or -1
1215 hours = int(tz[1:3])
1215 hours = int(tz[1:3])
1216 minutes = int(tz[3:5])
1216 minutes = int(tz[3:5])
1217 return -sign * (hours * 60 + minutes) * 60
1217 return -sign * (hours * 60 + minutes) * 60
1218 if tz == "GMT" or tz == "UTC":
1218 if tz == "GMT" or tz == "UTC":
1219 return 0
1219 return 0
1220 return None
1220 return None
1221
1221
1222 # NOTE: unixtime = localunixtime + offset
1222 # NOTE: unixtime = localunixtime + offset
1223 offset, date = timezone(string), string
1223 offset, date = timezone(string), string
1224 if offset != None:
1224 if offset != None:
1225 date = " ".join(string.split()[:-1])
1225 date = " ".join(string.split()[:-1])
1226
1226
1227 # add missing elements from defaults
1227 # add missing elements from defaults
1228 for part in defaults:
1228 for part in defaults:
1229 found = [True for p in part if ("%"+p) in format]
1229 found = [True for p in part if ("%"+p) in format]
1230 if not found:
1230 if not found:
1231 date += "@" + defaults[part]
1231 date += "@" + defaults[part]
1232 format += "@%" + part[0]
1232 format += "@%" + part[0]
1233
1233
1234 timetuple = time.strptime(date, format)
1234 timetuple = time.strptime(date, format)
1235 localunixtime = int(calendar.timegm(timetuple))
1235 localunixtime = int(calendar.timegm(timetuple))
1236 if offset is None:
1236 if offset is None:
1237 # local timezone
1237 # local timezone
1238 unixtime = int(time.mktime(timetuple))
1238 unixtime = int(time.mktime(timetuple))
1239 offset = unixtime - localunixtime
1239 offset = unixtime - localunixtime
1240 else:
1240 else:
1241 unixtime = localunixtime + offset
1241 unixtime = localunixtime + offset
1242 return unixtime, offset
1242 return unixtime, offset
1243
1243
1244 def parsedate(date, formats=None, defaults=None):
1244 def parsedate(date, formats=None, defaults=None):
1245 """parse a localized date/time string and return a (unixtime, offset) tuple.
1245 """parse a localized date/time string and return a (unixtime, offset) tuple.
1246
1246
1247 The date may be a "unixtime offset" string or in one of the specified
1247 The date may be a "unixtime offset" string or in one of the specified
1248 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1248 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1249 """
1249 """
1250 if not date:
1250 if not date:
1251 return 0, 0
1251 return 0, 0
1252 if isinstance(date, tuple) and len(date) == 2:
1252 if isinstance(date, tuple) and len(date) == 2:
1253 return date
1253 return date
1254 if not formats:
1254 if not formats:
1255 formats = defaultdateformats
1255 formats = defaultdateformats
1256 date = date.strip()
1256 date = date.strip()
1257 try:
1257 try:
1258 when, offset = map(int, date.split(' '))
1258 when, offset = map(int, date.split(' '))
1259 except ValueError:
1259 except ValueError:
1260 # fill out defaults
1260 # fill out defaults
1261 if not defaults:
1261 if not defaults:
1262 defaults = {}
1262 defaults = {}
1263 now = makedate()
1263 now = makedate()
1264 for part in "d mb yY HI M S".split():
1264 for part in "d mb yY HI M S".split():
1265 if part not in defaults:
1265 if part not in defaults:
1266 if part[0] in "HMS":
1266 if part[0] in "HMS":
1267 defaults[part] = "00"
1267 defaults[part] = "00"
1268 else:
1268 else:
1269 defaults[part] = datestr(now, "%" + part[0])
1269 defaults[part] = datestr(now, "%" + part[0])
1270
1270
1271 for format in formats:
1271 for format in formats:
1272 try:
1272 try:
1273 when, offset = strdate(date, format, defaults)
1273 when, offset = strdate(date, format, defaults)
1274 except (ValueError, OverflowError):
1274 except (ValueError, OverflowError):
1275 pass
1275 pass
1276 else:
1276 else:
1277 break
1277 break
1278 else:
1278 else:
1279 raise Abort(_('invalid date: %r ') % date)
1279 raise Abort(_('invalid date: %r ') % date)
1280 # validate explicit (probably user-specified) date and
1280 # validate explicit (probably user-specified) date and
1281 # time zone offset. values must fit in signed 32 bits for
1281 # time zone offset. values must fit in signed 32 bits for
1282 # current 32-bit linux runtimes. timezones go from UTC-12
1282 # current 32-bit linux runtimes. timezones go from UTC-12
1283 # to UTC+14
1283 # to UTC+14
1284 if abs(when) > 0x7fffffff:
1284 if abs(when) > 0x7fffffff:
1285 raise Abort(_('date exceeds 32 bits: %d') % when)
1285 raise Abort(_('date exceeds 32 bits: %d') % when)
1286 if offset < -50400 or offset > 43200:
1286 if offset < -50400 or offset > 43200:
1287 raise Abort(_('impossible time zone offset: %d') % offset)
1287 raise Abort(_('impossible time zone offset: %d') % offset)
1288 return when, offset
1288 return when, offset
1289
1289
1290 def matchdate(date):
1290 def matchdate(date):
1291 """Return a function that matches a given date match specifier
1291 """Return a function that matches a given date match specifier
1292
1292
1293 Formats include:
1293 Formats include:
1294
1294
1295 '{date}' match a given date to the accuracy provided
1295 '{date}' match a given date to the accuracy provided
1296
1296
1297 '<{date}' on or before a given date
1297 '<{date}' on or before a given date
1298
1298
1299 '>{date}' on or after a given date
1299 '>{date}' on or after a given date
1300
1300
1301 """
1301 """
1302
1302
1303 def lower(date):
1303 def lower(date):
1304 d = dict(mb="1", d="1")
1304 d = dict(mb="1", d="1")
1305 return parsedate(date, extendeddateformats, d)[0]
1305 return parsedate(date, extendeddateformats, d)[0]
1306
1306
1307 def upper(date):
1307 def upper(date):
1308 d = dict(mb="12", HI="23", M="59", S="59")
1308 d = dict(mb="12", HI="23", M="59", S="59")
1309 for days in "31 30 29".split():
1309 for days in "31 30 29".split():
1310 try:
1310 try:
1311 d["d"] = days
1311 d["d"] = days
1312 return parsedate(date, extendeddateformats, d)[0]
1312 return parsedate(date, extendeddateformats, d)[0]
1313 except:
1313 except:
1314 pass
1314 pass
1315 d["d"] = "28"
1315 d["d"] = "28"
1316 return parsedate(date, extendeddateformats, d)[0]
1316 return parsedate(date, extendeddateformats, d)[0]
1317
1317
1318 date = date.strip()
1318 if date[0] == "<":
1319 if date[0] == "<":
1319 when = upper(date[1:])
1320 when = upper(date[1:])
1320 return lambda x: x <= when
1321 return lambda x: x <= when
1321 elif date[0] == ">":
1322 elif date[0] == ">":
1322 when = lower(date[1:])
1323 when = lower(date[1:])
1323 return lambda x: x >= when
1324 return lambda x: x >= when
1324 elif date[0] == "-":
1325 elif date[0] == "-":
1325 try:
1326 try:
1326 days = int(date[1:])
1327 days = int(date[1:])
1327 except ValueError:
1328 except ValueError:
1328 raise Abort(_("invalid day spec: %s") % date[1:])
1329 raise Abort(_("invalid day spec: %s") % date[1:])
1329 when = makedate()[0] - days * 3600 * 24
1330 when = makedate()[0] - days * 3600 * 24
1330 return lambda x: x >= when
1331 return lambda x: x >= when
1331 elif " to " in date:
1332 elif " to " in date:
1332 a, b = date.split(" to ")
1333 a, b = date.split(" to ")
1333 start, stop = lower(a), upper(b)
1334 start, stop = lower(a), upper(b)
1334 return lambda x: x >= start and x <= stop
1335 return lambda x: x >= start and x <= stop
1335 else:
1336 else:
1336 start, stop = lower(date), upper(date)
1337 start, stop = lower(date), upper(date)
1337 return lambda x: x >= start and x <= stop
1338 return lambda x: x >= start and x <= stop
1338
1339
1339 def shortuser(user):
1340 def shortuser(user):
1340 """Return a short representation of a user name or email address."""
1341 """Return a short representation of a user name or email address."""
1341 f = user.find('@')
1342 f = user.find('@')
1342 if f >= 0:
1343 if f >= 0:
1343 user = user[:f]
1344 user = user[:f]
1344 f = user.find('<')
1345 f = user.find('<')
1345 if f >= 0:
1346 if f >= 0:
1346 user = user[f+1:]
1347 user = user[f+1:]
1347 f = user.find(' ')
1348 f = user.find(' ')
1348 if f >= 0:
1349 if f >= 0:
1349 user = user[:f]
1350 user = user[:f]
1350 f = user.find('.')
1351 f = user.find('.')
1351 if f >= 0:
1352 if f >= 0:
1352 user = user[:f]
1353 user = user[:f]
1353 return user
1354 return user
1354
1355
1355 def email(author):
1356 def email(author):
1356 '''get email of author.'''
1357 '''get email of author.'''
1357 r = author.find('>')
1358 r = author.find('>')
1358 if r == -1: r = None
1359 if r == -1: r = None
1359 return author[author.find('<')+1:r]
1360 return author[author.find('<')+1:r]
1360
1361
1361 def ellipsis(text, maxlength=400):
1362 def ellipsis(text, maxlength=400):
1362 """Trim string to at most maxlength (default: 400) characters."""
1363 """Trim string to at most maxlength (default: 400) characters."""
1363 if len(text) <= maxlength:
1364 if len(text) <= maxlength:
1364 return text
1365 return text
1365 else:
1366 else:
1366 return "%s..." % (text[:maxlength-3])
1367 return "%s..." % (text[:maxlength-3])
1367
1368
1368 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1369 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1369 '''yield every hg repository under path, recursively.'''
1370 '''yield every hg repository under path, recursively.'''
1370 def errhandler(err):
1371 def errhandler(err):
1371 if err.filename == path:
1372 if err.filename == path:
1372 raise err
1373 raise err
1373 if followsym and hasattr(os.path, 'samestat'):
1374 if followsym and hasattr(os.path, 'samestat'):
1374 def _add_dir_if_not_there(dirlst, dirname):
1375 def _add_dir_if_not_there(dirlst, dirname):
1375 match = False
1376 match = False
1376 samestat = os.path.samestat
1377 samestat = os.path.samestat
1377 dirstat = os.stat(dirname)
1378 dirstat = os.stat(dirname)
1378 for lstdirstat in dirlst:
1379 for lstdirstat in dirlst:
1379 if samestat(dirstat, lstdirstat):
1380 if samestat(dirstat, lstdirstat):
1380 match = True
1381 match = True
1381 break
1382 break
1382 if not match:
1383 if not match:
1383 dirlst.append(dirstat)
1384 dirlst.append(dirstat)
1384 return not match
1385 return not match
1385 else:
1386 else:
1386 followsym = False
1387 followsym = False
1387
1388
1388 if (seen_dirs is None) and followsym:
1389 if (seen_dirs is None) and followsym:
1389 seen_dirs = []
1390 seen_dirs = []
1390 _add_dir_if_not_there(seen_dirs, path)
1391 _add_dir_if_not_there(seen_dirs, path)
1391 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1392 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1392 if '.hg' in dirs:
1393 if '.hg' in dirs:
1393 yield root # found a repository
1394 yield root # found a repository
1394 qroot = os.path.join(root, '.hg', 'patches')
1395 qroot = os.path.join(root, '.hg', 'patches')
1395 if os.path.isdir(os.path.join(qroot, '.hg')):
1396 if os.path.isdir(os.path.join(qroot, '.hg')):
1396 yield qroot # we have a patch queue repo here
1397 yield qroot # we have a patch queue repo here
1397 if recurse:
1398 if recurse:
1398 # avoid recursing inside the .hg directory
1399 # avoid recursing inside the .hg directory
1399 dirs.remove('.hg')
1400 dirs.remove('.hg')
1400 else:
1401 else:
1401 dirs[:] = [] # don't descend further
1402 dirs[:] = [] # don't descend further
1402 elif followsym:
1403 elif followsym:
1403 newdirs = []
1404 newdirs = []
1404 for d in dirs:
1405 for d in dirs:
1405 fname = os.path.join(root, d)
1406 fname = os.path.join(root, d)
1406 if _add_dir_if_not_there(seen_dirs, fname):
1407 if _add_dir_if_not_there(seen_dirs, fname):
1407 if os.path.islink(fname):
1408 if os.path.islink(fname):
1408 for hgname in walkrepos(fname, True, seen_dirs):
1409 for hgname in walkrepos(fname, True, seen_dirs):
1409 yield hgname
1410 yield hgname
1410 else:
1411 else:
1411 newdirs.append(d)
1412 newdirs.append(d)
1412 dirs[:] = newdirs
1413 dirs[:] = newdirs
1413
1414
1414 _rcpath = None
1415 _rcpath = None
1415
1416
1416 def os_rcpath():
1417 def os_rcpath():
1417 '''return default os-specific hgrc search path'''
1418 '''return default os-specific hgrc search path'''
1418 path = system_rcpath()
1419 path = system_rcpath()
1419 path.extend(user_rcpath())
1420 path.extend(user_rcpath())
1420 path = [os.path.normpath(f) for f in path]
1421 path = [os.path.normpath(f) for f in path]
1421 return path
1422 return path
1422
1423
1423 def rcpath():
1424 def rcpath():
1424 '''return hgrc search path. if env var HGRCPATH is set, use it.
1425 '''return hgrc search path. if env var HGRCPATH is set, use it.
1425 for each item in path, if directory, use files ending in .rc,
1426 for each item in path, if directory, use files ending in .rc,
1426 else use item.
1427 else use item.
1427 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1428 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1428 if no HGRCPATH, use default os-specific path.'''
1429 if no HGRCPATH, use default os-specific path.'''
1429 global _rcpath
1430 global _rcpath
1430 if _rcpath is None:
1431 if _rcpath is None:
1431 if 'HGRCPATH' in os.environ:
1432 if 'HGRCPATH' in os.environ:
1432 _rcpath = []
1433 _rcpath = []
1433 for p in os.environ['HGRCPATH'].split(os.pathsep):
1434 for p in os.environ['HGRCPATH'].split(os.pathsep):
1434 if not p: continue
1435 if not p: continue
1435 if os.path.isdir(p):
1436 if os.path.isdir(p):
1436 for f, kind in osutil.listdir(p):
1437 for f, kind in osutil.listdir(p):
1437 if f.endswith('.rc'):
1438 if f.endswith('.rc'):
1438 _rcpath.append(os.path.join(p, f))
1439 _rcpath.append(os.path.join(p, f))
1439 else:
1440 else:
1440 _rcpath.append(p)
1441 _rcpath.append(p)
1441 else:
1442 else:
1442 _rcpath = os_rcpath()
1443 _rcpath = os_rcpath()
1443 return _rcpath
1444 return _rcpath
1444
1445
1445 def bytecount(nbytes):
1446 def bytecount(nbytes):
1446 '''return byte count formatted as readable string, with units'''
1447 '''return byte count formatted as readable string, with units'''
1447
1448
1448 units = (
1449 units = (
1449 (100, 1<<30, _('%.0f GB')),
1450 (100, 1<<30, _('%.0f GB')),
1450 (10, 1<<30, _('%.1f GB')),
1451 (10, 1<<30, _('%.1f GB')),
1451 (1, 1<<30, _('%.2f GB')),
1452 (1, 1<<30, _('%.2f GB')),
1452 (100, 1<<20, _('%.0f MB')),
1453 (100, 1<<20, _('%.0f MB')),
1453 (10, 1<<20, _('%.1f MB')),
1454 (10, 1<<20, _('%.1f MB')),
1454 (1, 1<<20, _('%.2f MB')),
1455 (1, 1<<20, _('%.2f MB')),
1455 (100, 1<<10, _('%.0f KB')),
1456 (100, 1<<10, _('%.0f KB')),
1456 (10, 1<<10, _('%.1f KB')),
1457 (10, 1<<10, _('%.1f KB')),
1457 (1, 1<<10, _('%.2f KB')),
1458 (1, 1<<10, _('%.2f KB')),
1458 (1, 1, _('%.0f bytes')),
1459 (1, 1, _('%.0f bytes')),
1459 )
1460 )
1460
1461
1461 for multiplier, divisor, format in units:
1462 for multiplier, divisor, format in units:
1462 if nbytes >= divisor * multiplier:
1463 if nbytes >= divisor * multiplier:
1463 return format % (nbytes / float(divisor))
1464 return format % (nbytes / float(divisor))
1464 return units[-1][2] % nbytes
1465 return units[-1][2] % nbytes
1465
1466
1466 def drop_scheme(scheme, path):
1467 def drop_scheme(scheme, path):
1467 sc = scheme + ':'
1468 sc = scheme + ':'
1468 if path.startswith(sc):
1469 if path.startswith(sc):
1469 path = path[len(sc):]
1470 path = path[len(sc):]
1470 if path.startswith('//'):
1471 if path.startswith('//'):
1471 path = path[2:]
1472 path = path[2:]
1472 return path
1473 return path
1473
1474
1474 def uirepr(s):
1475 def uirepr(s):
1475 # Avoid double backslash in Windows path repr()
1476 # Avoid double backslash in Windows path repr()
1476 return repr(s).replace('\\\\', '\\')
1477 return repr(s).replace('\\\\', '\\')
1477
1478
1478 def termwidth():
1479 def termwidth():
1479 if 'COLUMNS' in os.environ:
1480 if 'COLUMNS' in os.environ:
1480 try:
1481 try:
1481 return int(os.environ['COLUMNS'])
1482 return int(os.environ['COLUMNS'])
1482 except ValueError:
1483 except ValueError:
1483 pass
1484 pass
1484 try:
1485 try:
1485 import termios, array, fcntl
1486 import termios, array, fcntl
1486 for dev in (sys.stdout, sys.stdin):
1487 for dev in (sys.stdout, sys.stdin):
1487 try:
1488 try:
1488 fd = dev.fileno()
1489 fd = dev.fileno()
1489 if not os.isatty(fd):
1490 if not os.isatty(fd):
1490 continue
1491 continue
1491 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1492 arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
1492 return array.array('h', arri)[1]
1493 return array.array('h', arri)[1]
1493 except ValueError:
1494 except ValueError:
1494 pass
1495 pass
1495 except ImportError:
1496 except ImportError:
1496 pass
1497 pass
1497 return 80
1498 return 80
1498
1499
1499 def iterlines(iterator):
1500 def iterlines(iterator):
1500 for chunk in iterator:
1501 for chunk in iterator:
1501 for line in chunk.splitlines():
1502 for line in chunk.splitlines():
1502 yield line
1503 yield line
@@ -1,45 +1,83
1 #!/bin/sh
1 #!/bin/sh
2
2
3 # This runs with TZ="GMT"
3 # This runs with TZ="GMT"
4 hg init
4 hg init
5 echo "test-parse-date" > a
5 echo "test-parse-date" > a
6 hg add a
6 hg add a
7 hg ci -d "2006-02-01 13:00:30" -m "rev 0"
7 hg ci -d "2006-02-01 13:00:30" -m "rev 0"
8 echo "hi!" >> a
8 echo "hi!" >> a
9 hg ci -d "2006-02-01 13:00:30 -0500" -m "rev 1"
9 hg ci -d "2006-02-01 13:00:30 -0500" -m "rev 1"
10 hg tag -d "2006-04-15 13:30" "Hi"
10 hg tag -d "2006-04-15 13:30" "Hi"
11 hg backout --merge -d "2006-04-15 13:30 +0200" -m "rev 3" 1
11 hg backout --merge -d "2006-04-15 13:30 +0200" -m "rev 3" 1
12 hg ci -d "1150000000 14400" -m "rev 4 (merge)"
12 hg ci -d "1150000000 14400" -m "rev 4 (merge)"
13 echo "fail" >> a
13 echo "fail" >> a
14 hg ci -d "should fail" -m "fail"
14 hg ci -d "should fail" -m "fail"
15 hg ci -d "100000000000000000 1400" -m "fail"
15 hg ci -d "100000000000000000 1400" -m "fail"
16 hg ci -d "100000 1400000" -m "fail"
16 hg ci -d "100000 1400000" -m "fail"
17
17
18 # Check with local timezone other than GMT and with DST
18 # Check with local timezone other than GMT and with DST
19 TZ="PST+8PDT"
19 TZ="PST+8PDT"
20 export TZ
20 export TZ
21 # PST=UTC-8 / PDT=UTC-7
21 # PST=UTC-8 / PDT=UTC-7
22 hg debugrebuildstate
22 hg debugrebuildstate
23 echo "a" > a
23 echo "a" > a
24 hg ci -d "2006-07-15 13:30" -m "summer@UTC-7"
24 hg ci -d "2006-07-15 13:30" -m "summer@UTC-7"
25 hg debugrebuildstate
25 hg debugrebuildstate
26 echo "b" > a
26 echo "b" > a
27 hg ci -d "2006-07-15 13:30 +0500" -m "summer@UTC+5"
27 hg ci -d "2006-07-15 13:30 +0500" -m "summer@UTC+5"
28 hg debugrebuildstate
28 hg debugrebuildstate
29 echo "c" > a
29 echo "c" > a
30 hg ci -d "2006-01-15 13:30" -m "winter@UTC-8"
30 hg ci -d "2006-01-15 13:30" -m "winter@UTC-8"
31 hg debugrebuildstate
31 hg debugrebuildstate
32 echo "d" > a
32 echo "d" > a
33 hg ci -d "2006-01-15 13:30 +0500" -m "winter@UTC+5"
33 hg ci -d "2006-01-15 13:30 +0500" -m "winter@UTC+5"
34 hg log --template '{date|date}\n'
34 hg log --template '{date|date}\n'
35
35
36 # Test issue1014 (fractional timezones)
36 # Test issue1014 (fractional timezones)
37 hg debugdate "1000000000 -16200" # 0430
37 hg debugdate "1000000000 -16200" # 0430
38 hg debugdate "1000000000 -15300" # 0415
38 hg debugdate "1000000000 -15300" # 0415
39 hg debugdate "1000000000 -14400" # 0400
39 hg debugdate "1000000000 -14400" # 0400
40 hg debugdate "1000000000 0" # GMT
40 hg debugdate "1000000000 0" # GMT
41 hg debugdate "1000000000 14400" # -0400
41 hg debugdate "1000000000 14400" # -0400
42 hg debugdate "1000000000 15300" # -0415
42 hg debugdate "1000000000 15300" # -0415
43 hg debugdate "1000000000 16200" # -0430
43 hg debugdate "1000000000 16200" # -0430
44 hg debugdate "Sat Sep 08 21:16:40 2001 +0430"
44 hg debugdate "Sat Sep 08 21:16:40 2001 +0430"
45 hg debugdate "Sat Sep 08 21:16:40 2001 -0430"
45 hg debugdate "Sat Sep 08 21:16:40 2001 -0430"
46
47 #Test date formats with '>' or '<' accompanied by space characters
48 hg log -d '>' --template '{date|date}\n'
49 hg log -d '<' hg log -d '>' --template '{date|date}\n'
50
51 hg log -d ' >' --template '{date|date}\n'
52 hg log -d ' <' --template '{date|date}\n'
53
54 hg log -d '> ' --template '{date|date}\n'
55 hg log -d '< ' --template '{date|date}\n'
56
57 hg log -d ' > ' --template '{date|date}\n'
58 hg log -d ' < ' --template '{date|date}\n'
59
60
61 hg log -d '>02/01' --template '{date|date}\n'
62 hg log -d '<02/01' --template '{date|date}\n'
63
64 hg log -d ' >02/01' --template '{date|date}\n'
65 hg log -d ' <02/01' --template '{date|date}\n'
66
67 hg log -d '> 02/01' --template '{date|date}\n'
68 hg log -d '< 02/01' --template '{date|date}\n'
69
70 hg log -d ' > 02/01' --template '{date|date}\n'
71 hg log -d ' < 02/01' --template '{date|date}\n'
72
73 hg log -d '>02/01 ' --template '{date|date}\n'
74 hg log -d '<02/01 ' --template '{date|date}\n'
75
76 hg log -d ' >02/01 ' --template '{date|date}\n'
77 hg log -d ' <02/01 ' --template '{date|date}\n'
78
79 hg log -d '> 02/01 ' --template '{date|date}\n'
80 hg log -d '< 02/01 ' --template '{date|date}\n'
81
82 hg log -d ' > 02/01 ' --template '{date|date}\n'
83 hg log -d ' < 02/01 ' --template '{date|date}\n' No newline at end of file
@@ -1,36 +1,144
1 reverting a
1 reverting a
2 created new head
2 created new head
3 changeset 3:107ce1ee2b43 backs out changeset 1:25a1420a55f8
3 changeset 3:107ce1ee2b43 backs out changeset 1:25a1420a55f8
4 merging with changeset 3:107ce1ee2b43
4 merging with changeset 3:107ce1ee2b43
5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
5 1 files updated, 0 files merged, 0 files removed, 0 files unresolved
6 (branch merge, don't forget to commit)
6 (branch merge, don't forget to commit)
7 abort: invalid date: 'should fail'
7 abort: invalid date: 'should fail'
8 abort: date exceeds 32 bits: 100000000000000000
8 abort: date exceeds 32 bits: 100000000000000000
9 abort: impossible time zone offset: 1400000
9 abort: impossible time zone offset: 1400000
10 Sun Jan 15 13:30:00 2006 +0500
10 Sun Jan 15 13:30:00 2006 +0500
11 Sun Jan 15 13:30:00 2006 -0800
11 Sun Jan 15 13:30:00 2006 -0800
12 Sat Jul 15 13:30:00 2006 +0500
12 Sat Jul 15 13:30:00 2006 +0500
13 Sat Jul 15 13:30:00 2006 -0700
13 Sat Jul 15 13:30:00 2006 -0700
14 Sun Jun 11 00:26:40 2006 -0400
14 Sun Jun 11 00:26:40 2006 -0400
15 Sat Apr 15 13:30:00 2006 +0200
15 Sat Apr 15 13:30:00 2006 +0200
16 Sat Apr 15 13:30:00 2006 +0000
16 Sat Apr 15 13:30:00 2006 +0000
17 Wed Feb 01 13:00:30 2006 -0500
17 Wed Feb 01 13:00:30 2006 -0500
18 Wed Feb 01 13:00:30 2006 +0000
18 Wed Feb 01 13:00:30 2006 +0000
19 internal: 1000000000 -16200
19 internal: 1000000000 -16200
20 standard: Sun Sep 09 06:16:40 2001 +0430
20 standard: Sun Sep 09 06:16:40 2001 +0430
21 internal: 1000000000 -15300
21 internal: 1000000000 -15300
22 standard: Sun Sep 09 06:01:40 2001 +0415
22 standard: Sun Sep 09 06:01:40 2001 +0415
23 internal: 1000000000 -14400
23 internal: 1000000000 -14400
24 standard: Sun Sep 09 05:46:40 2001 +0400
24 standard: Sun Sep 09 05:46:40 2001 +0400
25 internal: 1000000000 0
25 internal: 1000000000 0
26 standard: Sun Sep 09 01:46:40 2001 +0000
26 standard: Sun Sep 09 01:46:40 2001 +0000
27 internal: 1000000000 14400
27 internal: 1000000000 14400
28 standard: Sat Sep 08 21:46:40 2001 -0400
28 standard: Sat Sep 08 21:46:40 2001 -0400
29 internal: 1000000000 15300
29 internal: 1000000000 15300
30 standard: Sat Sep 08 21:31:40 2001 -0415
30 standard: Sat Sep 08 21:31:40 2001 -0415
31 internal: 1000000000 16200
31 internal: 1000000000 16200
32 standard: Sat Sep 08 21:16:40 2001 -0430
32 standard: Sat Sep 08 21:16:40 2001 -0430
33 internal: 999967600 -16200
33 internal: 999967600 -16200
34 standard: Sat Sep 08 21:16:40 2001 +0430
34 standard: Sat Sep 08 21:16:40 2001 +0430
35 internal: 1000000000 16200
35 internal: 1000000000 16200
36 standard: Sat Sep 08 21:16:40 2001 -0430
36 standard: Sat Sep 08 21:16:40 2001 -0430
37 Sun Jan 15 13:30:00 2006 +0500
38 Sun Jan 15 13:30:00 2006 -0800
39 Sat Jul 15 13:30:00 2006 +0500
40 Sat Jul 15 13:30:00 2006 -0700
41 Sun Jun 11 00:26:40 2006 -0400
42 Sat Apr 15 13:30:00 2006 +0200
43 Sat Apr 15 13:30:00 2006 +0000
44 Wed Feb 01 13:00:30 2006 -0500
45 Wed Feb 01 13:00:30 2006 +0000
46 Sun Jan 15 13:30:00 2006 +0500
47 Sun Jan 15 13:30:00 2006 -0800
48 Sat Jul 15 13:30:00 2006 +0500
49 Sat Jul 15 13:30:00 2006 -0700
50 Sun Jun 11 00:26:40 2006 -0400
51 Sat Apr 15 13:30:00 2006 +0200
52 Sat Apr 15 13:30:00 2006 +0000
53 Wed Feb 01 13:00:30 2006 -0500
54 Wed Feb 01 13:00:30 2006 +0000
55 Sun Jan 15 13:30:00 2006 +0500
56 Sun Jan 15 13:30:00 2006 -0800
57 Sat Jul 15 13:30:00 2006 +0500
58 Sat Jul 15 13:30:00 2006 -0700
59 Sun Jun 11 00:26:40 2006 -0400
60 Sat Apr 15 13:30:00 2006 +0200
61 Sat Apr 15 13:30:00 2006 +0000
62 Wed Feb 01 13:00:30 2006 -0500
63 Wed Feb 01 13:00:30 2006 +0000
64 Sun Jan 15 13:30:00 2006 +0500
65 Sun Jan 15 13:30:00 2006 -0800
66 Sat Jul 15 13:30:00 2006 +0500
67 Sat Jul 15 13:30:00 2006 -0700
68 Sun Jun 11 00:26:40 2006 -0400
69 Sat Apr 15 13:30:00 2006 +0200
70 Sat Apr 15 13:30:00 2006 +0000
71 Wed Feb 01 13:00:30 2006 -0500
72 Wed Feb 01 13:00:30 2006 +0000
73 Sun Jan 15 13:30:00 2006 +0500
74 Sun Jan 15 13:30:00 2006 -0800
75 Sat Jul 15 13:30:00 2006 +0500
76 Sat Jul 15 13:30:00 2006 -0700
77 Sun Jun 11 00:26:40 2006 -0400
78 Sat Apr 15 13:30:00 2006 +0200
79 Sat Apr 15 13:30:00 2006 +0000
80 Wed Feb 01 13:00:30 2006 -0500
81 Wed Feb 01 13:00:30 2006 +0000
82 Sun Jan 15 13:30:00 2006 +0500
83 Sun Jan 15 13:30:00 2006 -0800
84 Sat Jul 15 13:30:00 2006 +0500
85 Sat Jul 15 13:30:00 2006 -0700
86 Sun Jun 11 00:26:40 2006 -0400
87 Sat Apr 15 13:30:00 2006 +0200
88 Sat Apr 15 13:30:00 2006 +0000
89 Wed Feb 01 13:00:30 2006 -0500
90 Wed Feb 01 13:00:30 2006 +0000
91 Sun Jan 15 13:30:00 2006 +0500
92 Sun Jan 15 13:30:00 2006 -0800
93 Sat Jul 15 13:30:00 2006 +0500
94 Sat Jul 15 13:30:00 2006 -0700
95 Sun Jun 11 00:26:40 2006 -0400
96 Sat Apr 15 13:30:00 2006 +0200
97 Sat Apr 15 13:30:00 2006 +0000
98 Wed Feb 01 13:00:30 2006 -0500
99 Wed Feb 01 13:00:30 2006 +0000
100 Sun Jan 15 13:30:00 2006 +0500
101 Sun Jan 15 13:30:00 2006 -0800
102 Sat Jul 15 13:30:00 2006 +0500
103 Sat Jul 15 13:30:00 2006 -0700
104 Sun Jun 11 00:26:40 2006 -0400
105 Sat Apr 15 13:30:00 2006 +0200
106 Sat Apr 15 13:30:00 2006 +0000
107 Wed Feb 01 13:00:30 2006 -0500
108 Wed Feb 01 13:00:30 2006 +0000
109 Sun Jan 15 13:30:00 2006 +0500
110 Sun Jan 15 13:30:00 2006 -0800
111 Sat Jul 15 13:30:00 2006 +0500
112 Sat Jul 15 13:30:00 2006 -0700
113 Sun Jun 11 00:26:40 2006 -0400
114 Sat Apr 15 13:30:00 2006 +0200
115 Sat Apr 15 13:30:00 2006 +0000
116 Wed Feb 01 13:00:30 2006 -0500
117 Wed Feb 01 13:00:30 2006 +0000
118 Sun Jan 15 13:30:00 2006 +0500
119 Sun Jan 15 13:30:00 2006 -0800
120 Sat Jul 15 13:30:00 2006 +0500
121 Sat Jul 15 13:30:00 2006 -0700
122 Sun Jun 11 00:26:40 2006 -0400
123 Sat Apr 15 13:30:00 2006 +0200
124 Sat Apr 15 13:30:00 2006 +0000
125 Wed Feb 01 13:00:30 2006 -0500
126 Wed Feb 01 13:00:30 2006 +0000
127 Sun Jan 15 13:30:00 2006 +0500
128 Sun Jan 15 13:30:00 2006 -0800
129 Sat Jul 15 13:30:00 2006 +0500
130 Sat Jul 15 13:30:00 2006 -0700
131 Sun Jun 11 00:26:40 2006 -0400
132 Sat Apr 15 13:30:00 2006 +0200
133 Sat Apr 15 13:30:00 2006 +0000
134 Wed Feb 01 13:00:30 2006 -0500
135 Wed Feb 01 13:00:30 2006 +0000
136 Sun Jan 15 13:30:00 2006 +0500
137 Sun Jan 15 13:30:00 2006 -0800
138 Sat Jul 15 13:30:00 2006 +0500
139 Sat Jul 15 13:30:00 2006 -0700
140 Sun Jun 11 00:26:40 2006 -0400
141 Sat Apr 15 13:30:00 2006 +0200
142 Sat Apr 15 13:30:00 2006 +0000
143 Wed Feb 01 13:00:30 2006 -0500
144 Wed Feb 01 13:00:30 2006 +0000
General Comments 0
You need to be logged in to leave comments. Login now