##// END OF EJS Templates
util: add an interpolate() function to for replacing multiple values...
Steve Losh -
r11988:8380ed69 default
parent child Browse files
Show More
@@ -1,259 +1,259 b''
1 # filemerge.py - file-level merge handling for Mercurial
1 # filemerge.py - file-level merge handling for Mercurial
2 #
2 #
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
3 # Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import short
8 from node import short
9 from i18n import _
9 from i18n import _
10 import util, simplemerge, match, error
10 import util, simplemerge, match, error
11 import os, tempfile, re, filecmp
11 import os, tempfile, re, filecmp
12
12
13 def _toolstr(ui, tool, part, default=""):
13 def _toolstr(ui, tool, part, default=""):
14 return ui.config("merge-tools", tool + "." + part, default)
14 return ui.config("merge-tools", tool + "." + part, default)
15
15
16 def _toolbool(ui, tool, part, default=False):
16 def _toolbool(ui, tool, part, default=False):
17 return ui.configbool("merge-tools", tool + "." + part, default)
17 return ui.configbool("merge-tools", tool + "." + part, default)
18
18
19 def _toollist(ui, tool, part, default=[]):
19 def _toollist(ui, tool, part, default=[]):
20 return ui.configlist("merge-tools", tool + "." + part, default)
20 return ui.configlist("merge-tools", tool + "." + part, default)
21
21
22 _internal = ['internal:' + s
22 _internal = ['internal:' + s
23 for s in 'fail local other merge prompt dump'.split()]
23 for s in 'fail local other merge prompt dump'.split()]
24
24
25 def _findtool(ui, tool):
25 def _findtool(ui, tool):
26 if tool in _internal:
26 if tool in _internal:
27 return tool
27 return tool
28 k = _toolstr(ui, tool, "regkey")
28 k = _toolstr(ui, tool, "regkey")
29 if k:
29 if k:
30 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
30 p = util.lookup_reg(k, _toolstr(ui, tool, "regname"))
31 if p:
31 if p:
32 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
32 p = util.find_exe(p + _toolstr(ui, tool, "regappend"))
33 if p:
33 if p:
34 return p
34 return p
35 return util.find_exe(_toolstr(ui, tool, "executable", tool))
35 return util.find_exe(_toolstr(ui, tool, "executable", tool))
36
36
37 def _picktool(repo, ui, path, binary, symlink):
37 def _picktool(repo, ui, path, binary, symlink):
38 def check(tool, pat, symlink, binary):
38 def check(tool, pat, symlink, binary):
39 tmsg = tool
39 tmsg = tool
40 if pat:
40 if pat:
41 tmsg += " specified for " + pat
41 tmsg += " specified for " + pat
42 if not _findtool(ui, tool):
42 if not _findtool(ui, tool):
43 if pat: # explicitly requested tool deserves a warning
43 if pat: # explicitly requested tool deserves a warning
44 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
44 ui.warn(_("couldn't find merge tool %s\n") % tmsg)
45 else: # configured but non-existing tools are more silent
45 else: # configured but non-existing tools are more silent
46 ui.note(_("couldn't find merge tool %s\n") % tmsg)
46 ui.note(_("couldn't find merge tool %s\n") % tmsg)
47 elif symlink and not _toolbool(ui, tool, "symlink"):
47 elif symlink and not _toolbool(ui, tool, "symlink"):
48 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
48 ui.warn(_("tool %s can't handle symlinks\n") % tmsg)
49 elif binary and not _toolbool(ui, tool, "binary"):
49 elif binary and not _toolbool(ui, tool, "binary"):
50 ui.warn(_("tool %s can't handle binary\n") % tmsg)
50 ui.warn(_("tool %s can't handle binary\n") % tmsg)
51 elif not util.gui() and _toolbool(ui, tool, "gui"):
51 elif not util.gui() and _toolbool(ui, tool, "gui"):
52 ui.warn(_("tool %s requires a GUI\n") % tmsg)
52 ui.warn(_("tool %s requires a GUI\n") % tmsg)
53 else:
53 else:
54 return True
54 return True
55 return False
55 return False
56
56
57 # HGMERGE takes precedence
57 # HGMERGE takes precedence
58 hgmerge = os.environ.get("HGMERGE")
58 hgmerge = os.environ.get("HGMERGE")
59 if hgmerge:
59 if hgmerge:
60 return (hgmerge, hgmerge)
60 return (hgmerge, hgmerge)
61
61
62 # then patterns
62 # then patterns
63 for pat, tool in ui.configitems("merge-patterns"):
63 for pat, tool in ui.configitems("merge-patterns"):
64 mf = match.match(repo.root, '', [pat])
64 mf = match.match(repo.root, '', [pat])
65 if mf(path) and check(tool, pat, symlink, False):
65 if mf(path) and check(tool, pat, symlink, False):
66 toolpath = _findtool(ui, tool)
66 toolpath = _findtool(ui, tool)
67 return (tool, '"' + toolpath + '"')
67 return (tool, '"' + toolpath + '"')
68
68
69 # then merge tools
69 # then merge tools
70 tools = {}
70 tools = {}
71 for k, v in ui.configitems("merge-tools"):
71 for k, v in ui.configitems("merge-tools"):
72 t = k.split('.')[0]
72 t = k.split('.')[0]
73 if t not in tools:
73 if t not in tools:
74 tools[t] = int(_toolstr(ui, t, "priority", "0"))
74 tools[t] = int(_toolstr(ui, t, "priority", "0"))
75 names = tools.keys()
75 names = tools.keys()
76 tools = sorted([(-p, t) for t, p in tools.items()])
76 tools = sorted([(-p, t) for t, p in tools.items()])
77 uimerge = ui.config("ui", "merge")
77 uimerge = ui.config("ui", "merge")
78 if uimerge:
78 if uimerge:
79 if uimerge not in names:
79 if uimerge not in names:
80 return (uimerge, uimerge)
80 return (uimerge, uimerge)
81 tools.insert(0, (None, uimerge)) # highest priority
81 tools.insert(0, (None, uimerge)) # highest priority
82 tools.append((None, "hgmerge")) # the old default, if found
82 tools.append((None, "hgmerge")) # the old default, if found
83 for p, t in tools:
83 for p, t in tools:
84 if check(t, None, symlink, binary):
84 if check(t, None, symlink, binary):
85 toolpath = _findtool(ui, t)
85 toolpath = _findtool(ui, t)
86 return (t, '"' + toolpath + '"')
86 return (t, '"' + toolpath + '"')
87 # internal merge as last resort
87 # internal merge as last resort
88 return (not (symlink or binary) and "internal:merge" or None, None)
88 return (not (symlink or binary) and "internal:merge" or None, None)
89
89
90 def _eoltype(data):
90 def _eoltype(data):
91 "Guess the EOL type of a file"
91 "Guess the EOL type of a file"
92 if '\0' in data: # binary
92 if '\0' in data: # binary
93 return None
93 return None
94 if '\r\n' in data: # Windows
94 if '\r\n' in data: # Windows
95 return '\r\n'
95 return '\r\n'
96 if '\r' in data: # Old Mac
96 if '\r' in data: # Old Mac
97 return '\r'
97 return '\r'
98 if '\n' in data: # UNIX
98 if '\n' in data: # UNIX
99 return '\n'
99 return '\n'
100 return None # unknown
100 return None # unknown
101
101
102 def _matcheol(file, origfile):
102 def _matcheol(file, origfile):
103 "Convert EOL markers in a file to match origfile"
103 "Convert EOL markers in a file to match origfile"
104 tostyle = _eoltype(open(origfile, "rb").read())
104 tostyle = _eoltype(open(origfile, "rb").read())
105 if tostyle:
105 if tostyle:
106 data = open(file, "rb").read()
106 data = open(file, "rb").read()
107 style = _eoltype(data)
107 style = _eoltype(data)
108 if style:
108 if style:
109 newdata = data.replace(style, tostyle)
109 newdata = data.replace(style, tostyle)
110 if newdata != data:
110 if newdata != data:
111 open(file, "wb").write(newdata)
111 open(file, "wb").write(newdata)
112
112
113 def filemerge(repo, mynode, orig, fcd, fco, fca):
113 def filemerge(repo, mynode, orig, fcd, fco, fca):
114 """perform a 3-way merge in the working directory
114 """perform a 3-way merge in the working directory
115
115
116 mynode = parent node before merge
116 mynode = parent node before merge
117 orig = original local filename before merge
117 orig = original local filename before merge
118 fco = other file context
118 fco = other file context
119 fca = ancestor file context
119 fca = ancestor file context
120 fcd = local file context for current/destination file
120 fcd = local file context for current/destination file
121 """
121 """
122
122
123 def temp(prefix, ctx):
123 def temp(prefix, ctx):
124 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
124 pre = "%s~%s." % (os.path.basename(ctx.path()), prefix)
125 (fd, name) = tempfile.mkstemp(prefix=pre)
125 (fd, name) = tempfile.mkstemp(prefix=pre)
126 data = repo.wwritedata(ctx.path(), ctx.data())
126 data = repo.wwritedata(ctx.path(), ctx.data())
127 f = os.fdopen(fd, "wb")
127 f = os.fdopen(fd, "wb")
128 f.write(data)
128 f.write(data)
129 f.close()
129 f.close()
130 return name
130 return name
131
131
132 def isbin(ctx):
132 def isbin(ctx):
133 try:
133 try:
134 return util.binary(ctx.data())
134 return util.binary(ctx.data())
135 except IOError:
135 except IOError:
136 return False
136 return False
137
137
138 if not fco.cmp(fcd): # files identical?
138 if not fco.cmp(fcd): # files identical?
139 return None
139 return None
140
140
141 if fca == fco: # backwards, use working dir parent as ancestor
141 if fca == fco: # backwards, use working dir parent as ancestor
142 fca = fcd.parents()[0]
142 fca = fcd.parents()[0]
143
143
144 ui = repo.ui
144 ui = repo.ui
145 fd = fcd.path()
145 fd = fcd.path()
146 binary = isbin(fcd) or isbin(fco) or isbin(fca)
146 binary = isbin(fcd) or isbin(fco) or isbin(fca)
147 symlink = 'l' in fcd.flags() + fco.flags()
147 symlink = 'l' in fcd.flags() + fco.flags()
148 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
148 tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
149 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
149 ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
150 (tool, fd, binary, symlink))
150 (tool, fd, binary, symlink))
151
151
152 if not tool or tool == 'internal:prompt':
152 if not tool or tool == 'internal:prompt':
153 tool = "internal:local"
153 tool = "internal:local"
154 if ui.promptchoice(_(" no tool found to merge %s\n"
154 if ui.promptchoice(_(" no tool found to merge %s\n"
155 "keep (l)ocal or take (o)ther?") % fd,
155 "keep (l)ocal or take (o)ther?") % fd,
156 (_("&Local"), _("&Other")), 0):
156 (_("&Local"), _("&Other")), 0):
157 tool = "internal:other"
157 tool = "internal:other"
158 if tool == "internal:local":
158 if tool == "internal:local":
159 return 0
159 return 0
160 if tool == "internal:other":
160 if tool == "internal:other":
161 repo.wwrite(fd, fco.data(), fco.flags())
161 repo.wwrite(fd, fco.data(), fco.flags())
162 return 0
162 return 0
163 if tool == "internal:fail":
163 if tool == "internal:fail":
164 return 1
164 return 1
165
165
166 # do the actual merge
166 # do the actual merge
167 a = repo.wjoin(fd)
167 a = repo.wjoin(fd)
168 b = temp("base", fca)
168 b = temp("base", fca)
169 c = temp("other", fco)
169 c = temp("other", fco)
170 out = ""
170 out = ""
171 back = a + ".orig"
171 back = a + ".orig"
172 util.copyfile(a, back)
172 util.copyfile(a, back)
173
173
174 if orig != fco.path():
174 if orig != fco.path():
175 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
175 ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd))
176 else:
176 else:
177 ui.status(_("merging %s\n") % fd)
177 ui.status(_("merging %s\n") % fd)
178
178
179 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
179 ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
180
180
181 # do we attempt to simplemerge first?
181 # do we attempt to simplemerge first?
182 try:
182 try:
183 premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
183 premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
184 except error.ConfigError:
184 except error.ConfigError:
185 premerge = _toolstr(ui, tool, "premerge").lower()
185 premerge = _toolstr(ui, tool, "premerge").lower()
186 valid = 'keep'.split()
186 valid = 'keep'.split()
187 if premerge not in valid:
187 if premerge not in valid:
188 _valid = ', '.join(["'" + v + "'" for v in valid])
188 _valid = ', '.join(["'" + v + "'" for v in valid])
189 raise error.ConfigError(_("%s.premerge not valid "
189 raise error.ConfigError(_("%s.premerge not valid "
190 "('%s' is neither boolean nor %s)") %
190 "('%s' is neither boolean nor %s)") %
191 (tool, premerge, _valid))
191 (tool, premerge, _valid))
192
192
193 if premerge:
193 if premerge:
194 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
194 r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
195 if not r:
195 if not r:
196 ui.debug(" premerge successful\n")
196 ui.debug(" premerge successful\n")
197 os.unlink(back)
197 os.unlink(back)
198 os.unlink(b)
198 os.unlink(b)
199 os.unlink(c)
199 os.unlink(c)
200 return 0
200 return 0
201 if premerge != 'keep':
201 if premerge != 'keep':
202 util.copyfile(back, a) # restore from backup and try again
202 util.copyfile(back, a) # restore from backup and try again
203
203
204 env = dict(HG_FILE=fd,
204 env = dict(HG_FILE=fd,
205 HG_MY_NODE=short(mynode),
205 HG_MY_NODE=short(mynode),
206 HG_OTHER_NODE=str(fco.changectx()),
206 HG_OTHER_NODE=str(fco.changectx()),
207 HG_BASE_NODE=str(fca.changectx()),
207 HG_BASE_NODE=str(fca.changectx()),
208 HG_MY_ISLINK='l' in fcd.flags(),
208 HG_MY_ISLINK='l' in fcd.flags(),
209 HG_OTHER_ISLINK='l' in fco.flags(),
209 HG_OTHER_ISLINK='l' in fco.flags(),
210 HG_BASE_ISLINK='l' in fca.flags())
210 HG_BASE_ISLINK='l' in fca.flags())
211
211
212 if tool == "internal:merge":
212 if tool == "internal:merge":
213 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
213 r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
214 elif tool == 'internal:dump':
214 elif tool == 'internal:dump':
215 a = repo.wjoin(fd)
215 a = repo.wjoin(fd)
216 util.copyfile(a, a + ".local")
216 util.copyfile(a, a + ".local")
217 repo.wwrite(fd + ".other", fco.data(), fco.flags())
217 repo.wwrite(fd + ".other", fco.data(), fco.flags())
218 repo.wwrite(fd + ".base", fca.data(), fca.flags())
218 repo.wwrite(fd + ".base", fca.data(), fca.flags())
219 return 1 # unresolved
219 return 1 # unresolved
220 else:
220 else:
221 args = _toolstr(ui, tool, "args", '$local $base $other')
221 args = _toolstr(ui, tool, "args", '$local $base $other')
222 if "$output" in args:
222 if "$output" in args:
223 out, a = a, back # read input from backup, write to original
223 out, a = a, back # read input from backup, write to original
224 replace = dict(local=a, base=b, other=c, output=out)
224 replace = dict(local=a, base=b, other=c, output=out)
225 args = re.sub("\$(local|base|other|output)",
225 args = util.interpolate(r'\$', replace, args,
226 lambda x: '"%s"' % util.localpath(replace[x.group()[1:]]), args)
226 lambda s: '"%s"' % util.localpath(s))
227 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
227 r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env)
228
228
229 if not r and (_toolbool(ui, tool, "checkconflicts") or
229 if not r and (_toolbool(ui, tool, "checkconflicts") or
230 'conflicts' in _toollist(ui, tool, "check")):
230 'conflicts' in _toollist(ui, tool, "check")):
231 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
231 if re.match("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data()):
232 r = 1
232 r = 1
233
233
234 checked = False
234 checked = False
235 if 'prompt' in _toollist(ui, tool, "check"):
235 if 'prompt' in _toollist(ui, tool, "check"):
236 checked = True
236 checked = True
237 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
237 if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd,
238 (_("&Yes"), _("&No")), 1):
238 (_("&Yes"), _("&No")), 1):
239 r = 1
239 r = 1
240
240
241 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
241 if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
242 'changed' in _toollist(ui, tool, "check")):
242 'changed' in _toollist(ui, tool, "check")):
243 if filecmp.cmp(repo.wjoin(fd), back):
243 if filecmp.cmp(repo.wjoin(fd), back):
244 if ui.promptchoice(_(" output file %s appears unchanged\n"
244 if ui.promptchoice(_(" output file %s appears unchanged\n"
245 "was merge successful (yn)?") % fd,
245 "was merge successful (yn)?") % fd,
246 (_("&Yes"), _("&No")), 1):
246 (_("&Yes"), _("&No")), 1):
247 r = 1
247 r = 1
248
248
249 if _toolbool(ui, tool, "fixeol"):
249 if _toolbool(ui, tool, "fixeol"):
250 _matcheol(repo.wjoin(fd), back)
250 _matcheol(repo.wjoin(fd), back)
251
251
252 if r:
252 if r:
253 ui.warn(_("merging %s failed!\n") % fd)
253 ui.warn(_("merging %s failed!\n") % fd)
254 else:
254 else:
255 os.unlink(back)
255 os.unlink(back)
256
256
257 os.unlink(b)
257 os.unlink(b)
258 os.unlink(c)
258 os.unlink(c)
259 return r
259 return r
@@ -1,1410 +1,1425 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, stat, time, calendar, textwrap, unicodedata, signal
19 import os, stat, time, calendar, textwrap, unicodedata, signal
20 import imp
20 import imp
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 return open(outname, 'rb').read()
201 return open(outname, 'rb').read()
202 finally:
202 finally:
203 try:
203 try:
204 if inname:
204 if inname:
205 os.unlink(inname)
205 os.unlink(inname)
206 except:
206 except:
207 pass
207 pass
208 try:
208 try:
209 if outname:
209 if outname:
210 os.unlink(outname)
210 os.unlink(outname)
211 except:
211 except:
212 pass
212 pass
213
213
214 filtertable = {
214 filtertable = {
215 'tempfile:': tempfilter,
215 'tempfile:': tempfilter,
216 'pipe:': pipefilter,
216 'pipe:': pipefilter,
217 }
217 }
218
218
219 def filter(s, cmd):
219 def filter(s, cmd):
220 "filter a string through a command that transforms its input to its output"
220 "filter a string through a command that transforms its input to its output"
221 for name, fn in filtertable.iteritems():
221 for name, fn in filtertable.iteritems():
222 if cmd.startswith(name):
222 if cmd.startswith(name):
223 return fn(s, cmd[len(name):].lstrip())
223 return fn(s, cmd[len(name):].lstrip())
224 return pipefilter(s, cmd)
224 return pipefilter(s, cmd)
225
225
226 def binary(s):
226 def binary(s):
227 """return true if a string is binary data"""
227 """return true if a string is binary data"""
228 return bool(s and '\0' in s)
228 return bool(s and '\0' in s)
229
229
230 def increasingchunks(source, min=1024, max=65536):
230 def increasingchunks(source, min=1024, max=65536):
231 '''return no less than min bytes per chunk while data remains,
231 '''return no less than min bytes per chunk while data remains,
232 doubling min after each chunk until it reaches max'''
232 doubling min after each chunk until it reaches max'''
233 def log2(x):
233 def log2(x):
234 if not x:
234 if not x:
235 return 0
235 return 0
236 i = 0
236 i = 0
237 while x:
237 while x:
238 x >>= 1
238 x >>= 1
239 i += 1
239 i += 1
240 return i - 1
240 return i - 1
241
241
242 buf = []
242 buf = []
243 blen = 0
243 blen = 0
244 for chunk in source:
244 for chunk in source:
245 buf.append(chunk)
245 buf.append(chunk)
246 blen += len(chunk)
246 blen += len(chunk)
247 if blen >= min:
247 if blen >= min:
248 if min < max:
248 if min < max:
249 min = min << 1
249 min = min << 1
250 nmin = 1 << log2(blen)
250 nmin = 1 << log2(blen)
251 if nmin > min:
251 if nmin > min:
252 min = nmin
252 min = nmin
253 if min > max:
253 if min > max:
254 min = max
254 min = max
255 yield ''.join(buf)
255 yield ''.join(buf)
256 blen = 0
256 blen = 0
257 buf = []
257 buf = []
258 if buf:
258 if buf:
259 yield ''.join(buf)
259 yield ''.join(buf)
260
260
261 Abort = error.Abort
261 Abort = error.Abort
262
262
263 def always(fn):
263 def always(fn):
264 return True
264 return True
265
265
266 def never(fn):
266 def never(fn):
267 return False
267 return False
268
268
269 def pathto(root, n1, n2):
269 def pathto(root, n1, n2):
270 '''return the relative path from one place to another.
270 '''return the relative path from one place to another.
271 root should use os.sep to separate directories
271 root should use os.sep to separate directories
272 n1 should use os.sep to separate directories
272 n1 should use os.sep to separate directories
273 n2 should use "/" to separate directories
273 n2 should use "/" to separate directories
274 returns an os.sep-separated path.
274 returns an os.sep-separated path.
275
275
276 If n1 is a relative path, it's assumed it's
276 If n1 is a relative path, it's assumed it's
277 relative to root.
277 relative to root.
278 n2 should always be relative to root.
278 n2 should always be relative to root.
279 '''
279 '''
280 if not n1:
280 if not n1:
281 return localpath(n2)
281 return localpath(n2)
282 if os.path.isabs(n1):
282 if os.path.isabs(n1):
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
283 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
284 return os.path.join(root, localpath(n2))
284 return os.path.join(root, localpath(n2))
285 n2 = '/'.join((pconvert(root), n2))
285 n2 = '/'.join((pconvert(root), n2))
286 a, b = splitpath(n1), n2.split('/')
286 a, b = splitpath(n1), n2.split('/')
287 a.reverse()
287 a.reverse()
288 b.reverse()
288 b.reverse()
289 while a and b and a[-1] == b[-1]:
289 while a and b and a[-1] == b[-1]:
290 a.pop()
290 a.pop()
291 b.pop()
291 b.pop()
292 b.reverse()
292 b.reverse()
293 return os.sep.join((['..'] * len(a)) + b) or '.'
293 return os.sep.join((['..'] * len(a)) + b) or '.'
294
294
295 def canonpath(root, cwd, myname):
295 def canonpath(root, cwd, myname):
296 """return the canonical path of myname, given cwd and root"""
296 """return the canonical path of myname, given cwd and root"""
297 if endswithsep(root):
297 if endswithsep(root):
298 rootsep = root
298 rootsep = root
299 else:
299 else:
300 rootsep = root + os.sep
300 rootsep = root + os.sep
301 name = myname
301 name = myname
302 if not os.path.isabs(name):
302 if not os.path.isabs(name):
303 name = os.path.join(root, cwd, name)
303 name = os.path.join(root, cwd, name)
304 name = os.path.normpath(name)
304 name = os.path.normpath(name)
305 audit_path = path_auditor(root)
305 audit_path = path_auditor(root)
306 if name != rootsep and name.startswith(rootsep):
306 if name != rootsep and name.startswith(rootsep):
307 name = name[len(rootsep):]
307 name = name[len(rootsep):]
308 audit_path(name)
308 audit_path(name)
309 return pconvert(name)
309 return pconvert(name)
310 elif name == root:
310 elif name == root:
311 return ''
311 return ''
312 else:
312 else:
313 # Determine whether `name' is in the hierarchy at or beneath `root',
313 # Determine whether `name' is in the hierarchy at or beneath `root',
314 # by iterating name=dirname(name) until that causes no change (can't
314 # by iterating name=dirname(name) until that causes no change (can't
315 # check name == '/', because that doesn't work on windows). For each
315 # check name == '/', because that doesn't work on windows). For each
316 # `name', compare dev/inode numbers. If they match, the list `rel'
316 # `name', compare dev/inode numbers. If they match, the list `rel'
317 # holds the reversed list of components making up the relative file
317 # holds the reversed list of components making up the relative file
318 # name we want.
318 # name we want.
319 root_st = os.stat(root)
319 root_st = os.stat(root)
320 rel = []
320 rel = []
321 while True:
321 while True:
322 try:
322 try:
323 name_st = os.stat(name)
323 name_st = os.stat(name)
324 except OSError:
324 except OSError:
325 break
325 break
326 if samestat(name_st, root_st):
326 if samestat(name_st, root_st):
327 if not rel:
327 if not rel:
328 # name was actually the same as root (maybe a symlink)
328 # name was actually the same as root (maybe a symlink)
329 return ''
329 return ''
330 rel.reverse()
330 rel.reverse()
331 name = os.path.join(*rel)
331 name = os.path.join(*rel)
332 audit_path(name)
332 audit_path(name)
333 return pconvert(name)
333 return pconvert(name)
334 dirname, basename = os.path.split(name)
334 dirname, basename = os.path.split(name)
335 rel.append(basename)
335 rel.append(basename)
336 if dirname == name:
336 if dirname == name:
337 break
337 break
338 name = dirname
338 name = dirname
339
339
340 raise Abort('%s not under root' % myname)
340 raise Abort('%s not under root' % myname)
341
341
342 _hgexecutable = None
342 _hgexecutable = None
343
343
344 def main_is_frozen():
344 def main_is_frozen():
345 """return True if we are a frozen executable.
345 """return True if we are a frozen executable.
346
346
347 The code supports py2exe (most common, Windows only) and tools/freeze
347 The code supports py2exe (most common, Windows only) and tools/freeze
348 (portable, not much used).
348 (portable, not much used).
349 """
349 """
350 return (hasattr(sys, "frozen") or # new py2exe
350 return (hasattr(sys, "frozen") or # new py2exe
351 hasattr(sys, "importers") or # old py2exe
351 hasattr(sys, "importers") or # old py2exe
352 imp.is_frozen("__main__")) # tools/freeze
352 imp.is_frozen("__main__")) # tools/freeze
353
353
354 def hgexecutable():
354 def hgexecutable():
355 """return location of the 'hg' executable.
355 """return location of the 'hg' executable.
356
356
357 Defaults to $HG or 'hg' in the search path.
357 Defaults to $HG or 'hg' in the search path.
358 """
358 """
359 if _hgexecutable is None:
359 if _hgexecutable is None:
360 hg = os.environ.get('HG')
360 hg = os.environ.get('HG')
361 if hg:
361 if hg:
362 set_hgexecutable(hg)
362 set_hgexecutable(hg)
363 elif main_is_frozen():
363 elif main_is_frozen():
364 set_hgexecutable(sys.executable)
364 set_hgexecutable(sys.executable)
365 else:
365 else:
366 exe = find_exe('hg') or os.path.basename(sys.argv[0])
366 exe = find_exe('hg') or os.path.basename(sys.argv[0])
367 set_hgexecutable(exe)
367 set_hgexecutable(exe)
368 return _hgexecutable
368 return _hgexecutable
369
369
370 def set_hgexecutable(path):
370 def set_hgexecutable(path):
371 """set location of the 'hg' executable"""
371 """set location of the 'hg' executable"""
372 global _hgexecutable
372 global _hgexecutable
373 _hgexecutable = path
373 _hgexecutable = path
374
374
375 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
375 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
376 '''enhanced shell command execution.
376 '''enhanced shell command execution.
377 run with environment maybe modified, maybe in different dir.
377 run with environment maybe modified, maybe in different dir.
378
378
379 if command fails and onerr is None, return status. if ui object,
379 if command fails and onerr is None, return status. if ui object,
380 print error message and return status, else raise onerr object as
380 print error message and return status, else raise onerr object as
381 exception.
381 exception.
382
382
383 if out is specified, it is assumed to be a file-like object that has a
383 if out is specified, it is assumed to be a file-like object that has a
384 write() method. stdout and stderr will be redirected to out.'''
384 write() method. stdout and stderr will be redirected to out.'''
385 def py2shell(val):
385 def py2shell(val):
386 'convert python object into string that is useful to shell'
386 'convert python object into string that is useful to shell'
387 if val is None or val is False:
387 if val is None or val is False:
388 return '0'
388 return '0'
389 if val is True:
389 if val is True:
390 return '1'
390 return '1'
391 return str(val)
391 return str(val)
392 origcmd = cmd
392 origcmd = cmd
393 if os.name == 'nt':
393 if os.name == 'nt':
394 cmd = '"%s"' % cmd
394 cmd = '"%s"' % cmd
395 env = dict(os.environ)
395 env = dict(os.environ)
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
396 env.update((k, py2shell(v)) for k, v in environ.iteritems())
397 env['HG'] = hgexecutable()
397 env['HG'] = hgexecutable()
398 if out is None:
398 if out is None:
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
399 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
400 env=env, cwd=cwd)
400 env=env, cwd=cwd)
401 else:
401 else:
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
402 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
403 env=env, cwd=cwd, stdout=subprocess.PIPE,
404 stderr=subprocess.STDOUT)
404 stderr=subprocess.STDOUT)
405 for line in proc.stdout:
405 for line in proc.stdout:
406 out.write(line)
406 out.write(line)
407 proc.wait()
407 proc.wait()
408 rc = proc.returncode
408 rc = proc.returncode
409 if sys.platform == 'OpenVMS' and rc & 1:
409 if sys.platform == 'OpenVMS' and rc & 1:
410 rc = 0
410 rc = 0
411 if rc and onerr:
411 if rc and onerr:
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
412 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
413 explain_exit(rc)[0])
413 explain_exit(rc)[0])
414 if errprefix:
414 if errprefix:
415 errmsg = '%s: %s' % (errprefix, errmsg)
415 errmsg = '%s: %s' % (errprefix, errmsg)
416 try:
416 try:
417 onerr.warn(errmsg + '\n')
417 onerr.warn(errmsg + '\n')
418 except AttributeError:
418 except AttributeError:
419 raise onerr(errmsg)
419 raise onerr(errmsg)
420 return rc
420 return rc
421
421
422 def checksignature(func):
422 def checksignature(func):
423 '''wrap a function with code to check for calling errors'''
423 '''wrap a function with code to check for calling errors'''
424 def check(*args, **kwargs):
424 def check(*args, **kwargs):
425 try:
425 try:
426 return func(*args, **kwargs)
426 return func(*args, **kwargs)
427 except TypeError:
427 except TypeError:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
428 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
429 raise error.SignatureError
429 raise error.SignatureError
430 raise
430 raise
431
431
432 return check
432 return check
433
433
434 # os.path.lexists is not available on python2.3
434 # os.path.lexists is not available on python2.3
435 def lexists(filename):
435 def lexists(filename):
436 "test whether a file with this name exists. does not follow symlinks"
436 "test whether a file with this name exists. does not follow symlinks"
437 try:
437 try:
438 os.lstat(filename)
438 os.lstat(filename)
439 except:
439 except:
440 return False
440 return False
441 return True
441 return True
442
442
443 def unlink(f):
443 def unlink(f):
444 """unlink and remove the directory if it is empty"""
444 """unlink and remove the directory if it is empty"""
445 os.unlink(f)
445 os.unlink(f)
446 # try removing directories that might now be empty
446 # try removing directories that might now be empty
447 try:
447 try:
448 os.removedirs(os.path.dirname(f))
448 os.removedirs(os.path.dirname(f))
449 except OSError:
449 except OSError:
450 pass
450 pass
451
451
452 def copyfile(src, dest):
452 def copyfile(src, dest):
453 "copy a file, preserving mode and atime/mtime"
453 "copy a file, preserving mode and atime/mtime"
454 if os.path.islink(src):
454 if os.path.islink(src):
455 try:
455 try:
456 os.unlink(dest)
456 os.unlink(dest)
457 except:
457 except:
458 pass
458 pass
459 os.symlink(os.readlink(src), dest)
459 os.symlink(os.readlink(src), dest)
460 else:
460 else:
461 try:
461 try:
462 shutil.copyfile(src, dest)
462 shutil.copyfile(src, dest)
463 shutil.copystat(src, dest)
463 shutil.copystat(src, dest)
464 except shutil.Error, inst:
464 except shutil.Error, inst:
465 raise Abort(str(inst))
465 raise Abort(str(inst))
466
466
467 def copyfiles(src, dst, hardlink=None):
467 def copyfiles(src, dst, hardlink=None):
468 """Copy a directory tree using hardlinks if possible"""
468 """Copy a directory tree using hardlinks if possible"""
469
469
470 if hardlink is None:
470 if hardlink is None:
471 hardlink = (os.stat(src).st_dev ==
471 hardlink = (os.stat(src).st_dev ==
472 os.stat(os.path.dirname(dst)).st_dev)
472 os.stat(os.path.dirname(dst)).st_dev)
473
473
474 num = 0
474 num = 0
475 if os.path.isdir(src):
475 if os.path.isdir(src):
476 os.mkdir(dst)
476 os.mkdir(dst)
477 for name, kind in osutil.listdir(src):
477 for name, kind in osutil.listdir(src):
478 srcname = os.path.join(src, name)
478 srcname = os.path.join(src, name)
479 dstname = os.path.join(dst, name)
479 dstname = os.path.join(dst, name)
480 hardlink, n = copyfiles(srcname, dstname, hardlink)
480 hardlink, n = copyfiles(srcname, dstname, hardlink)
481 num += n
481 num += n
482 else:
482 else:
483 if hardlink:
483 if hardlink:
484 try:
484 try:
485 os_link(src, dst)
485 os_link(src, dst)
486 except (IOError, OSError):
486 except (IOError, OSError):
487 hardlink = False
487 hardlink = False
488 shutil.copy(src, dst)
488 shutil.copy(src, dst)
489 else:
489 else:
490 shutil.copy(src, dst)
490 shutil.copy(src, dst)
491 num += 1
491 num += 1
492
492
493 return hardlink, num
493 return hardlink, num
494
494
495 class path_auditor(object):
495 class path_auditor(object):
496 '''ensure that a filesystem path contains no banned components.
496 '''ensure that a filesystem path contains no banned components.
497 the following properties of a path are checked:
497 the following properties of a path are checked:
498
498
499 - under top-level .hg
499 - under top-level .hg
500 - starts at the root of a windows drive
500 - starts at the root of a windows drive
501 - contains ".."
501 - contains ".."
502 - traverses a symlink (e.g. a/symlink_here/b)
502 - traverses a symlink (e.g. a/symlink_here/b)
503 - inside a nested repository'''
503 - inside a nested repository'''
504
504
505 def __init__(self, root):
505 def __init__(self, root):
506 self.audited = set()
506 self.audited = set()
507 self.auditeddir = set()
507 self.auditeddir = set()
508 self.root = root
508 self.root = root
509
509
510 def __call__(self, path):
510 def __call__(self, path):
511 if path in self.audited:
511 if path in self.audited:
512 return
512 return
513 normpath = os.path.normcase(path)
513 normpath = os.path.normcase(path)
514 parts = splitpath(normpath)
514 parts = splitpath(normpath)
515 if (os.path.splitdrive(path)[0]
515 if (os.path.splitdrive(path)[0]
516 or parts[0].lower() in ('.hg', '.hg.', '')
516 or parts[0].lower() in ('.hg', '.hg.', '')
517 or os.pardir in parts):
517 or os.pardir in parts):
518 raise Abort(_("path contains illegal component: %s") % path)
518 raise Abort(_("path contains illegal component: %s") % path)
519 if '.hg' in path.lower():
519 if '.hg' in path.lower():
520 lparts = [p.lower() for p in parts]
520 lparts = [p.lower() for p in parts]
521 for p in '.hg', '.hg.':
521 for p in '.hg', '.hg.':
522 if p in lparts[1:]:
522 if p in lparts[1:]:
523 pos = lparts.index(p)
523 pos = lparts.index(p)
524 base = os.path.join(*parts[:pos])
524 base = os.path.join(*parts[:pos])
525 raise Abort(_('path %r is inside repo %r') % (path, base))
525 raise Abort(_('path %r is inside repo %r') % (path, base))
526 def check(prefix):
526 def check(prefix):
527 curpath = os.path.join(self.root, prefix)
527 curpath = os.path.join(self.root, prefix)
528 try:
528 try:
529 st = os.lstat(curpath)
529 st = os.lstat(curpath)
530 except OSError, err:
530 except OSError, err:
531 # EINVAL can be raised as invalid path syntax under win32.
531 # EINVAL can be raised as invalid path syntax under win32.
532 # They must be ignored for patterns can be checked too.
532 # They must be ignored for patterns can be checked too.
533 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
533 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
534 raise
534 raise
535 else:
535 else:
536 if stat.S_ISLNK(st.st_mode):
536 if stat.S_ISLNK(st.st_mode):
537 raise Abort(_('path %r traverses symbolic link %r') %
537 raise Abort(_('path %r traverses symbolic link %r') %
538 (path, prefix))
538 (path, prefix))
539 elif (stat.S_ISDIR(st.st_mode) and
539 elif (stat.S_ISDIR(st.st_mode) and
540 os.path.isdir(os.path.join(curpath, '.hg'))):
540 os.path.isdir(os.path.join(curpath, '.hg'))):
541 raise Abort(_('path %r is inside repo %r') %
541 raise Abort(_('path %r is inside repo %r') %
542 (path, prefix))
542 (path, prefix))
543 parts.pop()
543 parts.pop()
544 prefixes = []
544 prefixes = []
545 while parts:
545 while parts:
546 prefix = os.sep.join(parts)
546 prefix = os.sep.join(parts)
547 if prefix in self.auditeddir:
547 if prefix in self.auditeddir:
548 break
548 break
549 check(prefix)
549 check(prefix)
550 prefixes.append(prefix)
550 prefixes.append(prefix)
551 parts.pop()
551 parts.pop()
552
552
553 self.audited.add(path)
553 self.audited.add(path)
554 # only add prefixes to the cache after checking everything: we don't
554 # only add prefixes to the cache after checking everything: we don't
555 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
555 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
556 self.auditeddir.update(prefixes)
556 self.auditeddir.update(prefixes)
557
557
558 def nlinks(pathname):
558 def nlinks(pathname):
559 """Return number of hardlinks for the given file."""
559 """Return number of hardlinks for the given file."""
560 return os.lstat(pathname).st_nlink
560 return os.lstat(pathname).st_nlink
561
561
562 if hasattr(os, 'link'):
562 if hasattr(os, 'link'):
563 os_link = os.link
563 os_link = os.link
564 else:
564 else:
565 def os_link(src, dst):
565 def os_link(src, dst):
566 raise OSError(0, _("Hardlinks not supported"))
566 raise OSError(0, _("Hardlinks not supported"))
567
567
568 def lookup_reg(key, name=None, scope=None):
568 def lookup_reg(key, name=None, scope=None):
569 return None
569 return None
570
570
571 def hidewindow():
571 def hidewindow():
572 """Hide current shell window.
572 """Hide current shell window.
573
573
574 Used to hide the window opened when starting asynchronous
574 Used to hide the window opened when starting asynchronous
575 child process under Windows, unneeded on other systems.
575 child process under Windows, unneeded on other systems.
576 """
576 """
577 pass
577 pass
578
578
579 if os.name == 'nt':
579 if os.name == 'nt':
580 from windows import *
580 from windows import *
581 else:
581 else:
582 from posix import *
582 from posix import *
583
583
584 def makelock(info, pathname):
584 def makelock(info, pathname):
585 try:
585 try:
586 return os.symlink(info, pathname)
586 return os.symlink(info, pathname)
587 except OSError, why:
587 except OSError, why:
588 if why.errno == errno.EEXIST:
588 if why.errno == errno.EEXIST:
589 raise
589 raise
590 except AttributeError: # no symlink in os
590 except AttributeError: # no symlink in os
591 pass
591 pass
592
592
593 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
593 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
594 os.write(ld, info)
594 os.write(ld, info)
595 os.close(ld)
595 os.close(ld)
596
596
597 def readlock(pathname):
597 def readlock(pathname):
598 try:
598 try:
599 return os.readlink(pathname)
599 return os.readlink(pathname)
600 except OSError, why:
600 except OSError, why:
601 if why.errno not in (errno.EINVAL, errno.ENOSYS):
601 if why.errno not in (errno.EINVAL, errno.ENOSYS):
602 raise
602 raise
603 except AttributeError: # no symlink in os
603 except AttributeError: # no symlink in os
604 pass
604 pass
605 return posixfile(pathname).read()
605 return posixfile(pathname).read()
606
606
607 def fstat(fp):
607 def fstat(fp):
608 '''stat file object that may not have fileno method.'''
608 '''stat file object that may not have fileno method.'''
609 try:
609 try:
610 return os.fstat(fp.fileno())
610 return os.fstat(fp.fileno())
611 except AttributeError:
611 except AttributeError:
612 return os.stat(fp.name)
612 return os.stat(fp.name)
613
613
614 # File system features
614 # File system features
615
615
616 def checkcase(path):
616 def checkcase(path):
617 """
617 """
618 Check whether the given path is on a case-sensitive filesystem
618 Check whether the given path is on a case-sensitive filesystem
619
619
620 Requires a path (like /foo/.hg) ending with a foldable final
620 Requires a path (like /foo/.hg) ending with a foldable final
621 directory component.
621 directory component.
622 """
622 """
623 s1 = os.stat(path)
623 s1 = os.stat(path)
624 d, b = os.path.split(path)
624 d, b = os.path.split(path)
625 p2 = os.path.join(d, b.upper())
625 p2 = os.path.join(d, b.upper())
626 if path == p2:
626 if path == p2:
627 p2 = os.path.join(d, b.lower())
627 p2 = os.path.join(d, b.lower())
628 try:
628 try:
629 s2 = os.stat(p2)
629 s2 = os.stat(p2)
630 if s2 == s1:
630 if s2 == s1:
631 return False
631 return False
632 return True
632 return True
633 except:
633 except:
634 return True
634 return True
635
635
636 _fspathcache = {}
636 _fspathcache = {}
637 def fspath(name, root):
637 def fspath(name, root):
638 '''Get name in the case stored in the filesystem
638 '''Get name in the case stored in the filesystem
639
639
640 The name is either relative to root, or it is an absolute path starting
640 The name is either relative to root, or it is an absolute path starting
641 with root. Note that this function is unnecessary, and should not be
641 with root. Note that this function is unnecessary, and should not be
642 called, for case-sensitive filesystems (simply because it's expensive).
642 called, for case-sensitive filesystems (simply because it's expensive).
643 '''
643 '''
644 # If name is absolute, make it relative
644 # If name is absolute, make it relative
645 if name.lower().startswith(root.lower()):
645 if name.lower().startswith(root.lower()):
646 l = len(root)
646 l = len(root)
647 if name[l] == os.sep or name[l] == os.altsep:
647 if name[l] == os.sep or name[l] == os.altsep:
648 l = l + 1
648 l = l + 1
649 name = name[l:]
649 name = name[l:]
650
650
651 if not os.path.exists(os.path.join(root, name)):
651 if not os.path.exists(os.path.join(root, name)):
652 return None
652 return None
653
653
654 seps = os.sep
654 seps = os.sep
655 if os.altsep:
655 if os.altsep:
656 seps = seps + os.altsep
656 seps = seps + os.altsep
657 # Protect backslashes. This gets silly very quickly.
657 # Protect backslashes. This gets silly very quickly.
658 seps.replace('\\','\\\\')
658 seps.replace('\\','\\\\')
659 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
659 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
660 dir = os.path.normcase(os.path.normpath(root))
660 dir = os.path.normcase(os.path.normpath(root))
661 result = []
661 result = []
662 for part, sep in pattern.findall(name):
662 for part, sep in pattern.findall(name):
663 if sep:
663 if sep:
664 result.append(sep)
664 result.append(sep)
665 continue
665 continue
666
666
667 if dir not in _fspathcache:
667 if dir not in _fspathcache:
668 _fspathcache[dir] = os.listdir(dir)
668 _fspathcache[dir] = os.listdir(dir)
669 contents = _fspathcache[dir]
669 contents = _fspathcache[dir]
670
670
671 lpart = part.lower()
671 lpart = part.lower()
672 lenp = len(part)
672 lenp = len(part)
673 for n in contents:
673 for n in contents:
674 if lenp == len(n) and n.lower() == lpart:
674 if lenp == len(n) and n.lower() == lpart:
675 result.append(n)
675 result.append(n)
676 break
676 break
677 else:
677 else:
678 # Cannot happen, as the file exists!
678 # Cannot happen, as the file exists!
679 result.append(part)
679 result.append(part)
680 dir = os.path.join(dir, lpart)
680 dir = os.path.join(dir, lpart)
681
681
682 return ''.join(result)
682 return ''.join(result)
683
683
684 def checkexec(path):
684 def checkexec(path):
685 """
685 """
686 Check whether the given path is on a filesystem with UNIX-like exec flags
686 Check whether the given path is on a filesystem with UNIX-like exec flags
687
687
688 Requires a directory (like /foo/.hg)
688 Requires a directory (like /foo/.hg)
689 """
689 """
690
690
691 # VFAT on some Linux versions can flip mode but it doesn't persist
691 # VFAT on some Linux versions can flip mode but it doesn't persist
692 # a FS remount. Frequently we can detect it if files are created
692 # a FS remount. Frequently we can detect it if files are created
693 # with exec bit on.
693 # with exec bit on.
694
694
695 try:
695 try:
696 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
696 EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
697 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
697 fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-')
698 try:
698 try:
699 os.close(fh)
699 os.close(fh)
700 m = os.stat(fn).st_mode & 0777
700 m = os.stat(fn).st_mode & 0777
701 new_file_has_exec = m & EXECFLAGS
701 new_file_has_exec = m & EXECFLAGS
702 os.chmod(fn, m ^ EXECFLAGS)
702 os.chmod(fn, m ^ EXECFLAGS)
703 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
703 exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m)
704 finally:
704 finally:
705 os.unlink(fn)
705 os.unlink(fn)
706 except (IOError, OSError):
706 except (IOError, OSError):
707 # we don't care, the user probably won't be able to commit anyway
707 # we don't care, the user probably won't be able to commit anyway
708 return False
708 return False
709 return not (new_file_has_exec or exec_flags_cannot_flip)
709 return not (new_file_has_exec or exec_flags_cannot_flip)
710
710
711 def checklink(path):
711 def checklink(path):
712 """check whether the given path is on a symlink-capable filesystem"""
712 """check whether the given path is on a symlink-capable filesystem"""
713 # mktemp is not racy because symlink creation will fail if the
713 # mktemp is not racy because symlink creation will fail if the
714 # file already exists
714 # file already exists
715 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
715 name = tempfile.mktemp(dir=path, prefix='hg-checklink-')
716 try:
716 try:
717 os.symlink(".", name)
717 os.symlink(".", name)
718 os.unlink(name)
718 os.unlink(name)
719 return True
719 return True
720 except (OSError, AttributeError):
720 except (OSError, AttributeError):
721 return False
721 return False
722
722
723 def needbinarypatch():
723 def needbinarypatch():
724 """return True if patches should be applied in binary mode by default."""
724 """return True if patches should be applied in binary mode by default."""
725 return os.name == 'nt'
725 return os.name == 'nt'
726
726
727 def endswithsep(path):
727 def endswithsep(path):
728 '''Check path ends with os.sep or os.altsep.'''
728 '''Check path ends with os.sep or os.altsep.'''
729 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
729 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
730
730
731 def splitpath(path):
731 def splitpath(path):
732 '''Split path by os.sep.
732 '''Split path by os.sep.
733 Note that this function does not use os.altsep because this is
733 Note that this function does not use os.altsep because this is
734 an alternative of simple "xxx.split(os.sep)".
734 an alternative of simple "xxx.split(os.sep)".
735 It is recommended to use os.path.normpath() before using this
735 It is recommended to use os.path.normpath() before using this
736 function if need.'''
736 function if need.'''
737 return path.split(os.sep)
737 return path.split(os.sep)
738
738
739 def gui():
739 def gui():
740 '''Are we running in a GUI?'''
740 '''Are we running in a GUI?'''
741 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
741 return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY")
742
742
743 def mktempcopy(name, emptyok=False, createmode=None):
743 def mktempcopy(name, emptyok=False, createmode=None):
744 """Create a temporary file with the same contents from name
744 """Create a temporary file with the same contents from name
745
745
746 The permission bits are copied from the original file.
746 The permission bits are copied from the original file.
747
747
748 If the temporary file is going to be truncated immediately, you
748 If the temporary file is going to be truncated immediately, you
749 can use emptyok=True as an optimization.
749 can use emptyok=True as an optimization.
750
750
751 Returns the name of the temporary file.
751 Returns the name of the temporary file.
752 """
752 """
753 d, fn = os.path.split(name)
753 d, fn = os.path.split(name)
754 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
754 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
755 os.close(fd)
755 os.close(fd)
756 # Temporary files are created with mode 0600, which is usually not
756 # Temporary files are created with mode 0600, which is usually not
757 # what we want. If the original file already exists, just copy
757 # what we want. If the original file already exists, just copy
758 # its mode. Otherwise, manually obey umask.
758 # its mode. Otherwise, manually obey umask.
759 try:
759 try:
760 st_mode = os.lstat(name).st_mode & 0777
760 st_mode = os.lstat(name).st_mode & 0777
761 except OSError, inst:
761 except OSError, inst:
762 if inst.errno != errno.ENOENT:
762 if inst.errno != errno.ENOENT:
763 raise
763 raise
764 st_mode = createmode
764 st_mode = createmode
765 if st_mode is None:
765 if st_mode is None:
766 st_mode = ~umask
766 st_mode = ~umask
767 st_mode &= 0666
767 st_mode &= 0666
768 os.chmod(temp, st_mode)
768 os.chmod(temp, st_mode)
769 if emptyok:
769 if emptyok:
770 return temp
770 return temp
771 try:
771 try:
772 try:
772 try:
773 ifp = posixfile(name, "rb")
773 ifp = posixfile(name, "rb")
774 except IOError, inst:
774 except IOError, inst:
775 if inst.errno == errno.ENOENT:
775 if inst.errno == errno.ENOENT:
776 return temp
776 return temp
777 if not getattr(inst, 'filename', None):
777 if not getattr(inst, 'filename', None):
778 inst.filename = name
778 inst.filename = name
779 raise
779 raise
780 ofp = posixfile(temp, "wb")
780 ofp = posixfile(temp, "wb")
781 for chunk in filechunkiter(ifp):
781 for chunk in filechunkiter(ifp):
782 ofp.write(chunk)
782 ofp.write(chunk)
783 ifp.close()
783 ifp.close()
784 ofp.close()
784 ofp.close()
785 except:
785 except:
786 try: os.unlink(temp)
786 try: os.unlink(temp)
787 except: pass
787 except: pass
788 raise
788 raise
789 return temp
789 return temp
790
790
791 class atomictempfile(object):
791 class atomictempfile(object):
792 """file-like object that atomically updates a file
792 """file-like object that atomically updates a file
793
793
794 All writes will be redirected to a temporary copy of the original
794 All writes will be redirected to a temporary copy of the original
795 file. When rename is called, the copy is renamed to the original
795 file. When rename is called, the copy is renamed to the original
796 name, making the changes visible.
796 name, making the changes visible.
797 """
797 """
798 def __init__(self, name, mode='w+b', createmode=None):
798 def __init__(self, name, mode='w+b', createmode=None):
799 self.__name = name
799 self.__name = name
800 self._fp = None
800 self._fp = None
801 self.temp = mktempcopy(name, emptyok=('w' in mode),
801 self.temp = mktempcopy(name, emptyok=('w' in mode),
802 createmode=createmode)
802 createmode=createmode)
803 self._fp = posixfile(self.temp, mode)
803 self._fp = posixfile(self.temp, mode)
804
804
805 def __getattr__(self, name):
805 def __getattr__(self, name):
806 return getattr(self._fp, name)
806 return getattr(self._fp, name)
807
807
808 def rename(self):
808 def rename(self):
809 if not self._fp.closed:
809 if not self._fp.closed:
810 self._fp.close()
810 self._fp.close()
811 rename(self.temp, localpath(self.__name))
811 rename(self.temp, localpath(self.__name))
812
812
813 def __del__(self):
813 def __del__(self):
814 if not self._fp:
814 if not self._fp:
815 return
815 return
816 if not self._fp.closed:
816 if not self._fp.closed:
817 try:
817 try:
818 os.unlink(self.temp)
818 os.unlink(self.temp)
819 except: pass
819 except: pass
820 self._fp.close()
820 self._fp.close()
821
821
822 def makedirs(name, mode=None):
822 def makedirs(name, mode=None):
823 """recursive directory creation with parent mode inheritance"""
823 """recursive directory creation with parent mode inheritance"""
824 try:
824 try:
825 os.mkdir(name)
825 os.mkdir(name)
826 if mode is not None:
826 if mode is not None:
827 os.chmod(name, mode)
827 os.chmod(name, mode)
828 return
828 return
829 except OSError, err:
829 except OSError, err:
830 if err.errno == errno.EEXIST:
830 if err.errno == errno.EEXIST:
831 return
831 return
832 if err.errno != errno.ENOENT:
832 if err.errno != errno.ENOENT:
833 raise
833 raise
834 parent = os.path.abspath(os.path.dirname(name))
834 parent = os.path.abspath(os.path.dirname(name))
835 makedirs(parent, mode)
835 makedirs(parent, mode)
836 makedirs(name, mode)
836 makedirs(name, mode)
837
837
838 class opener(object):
838 class opener(object):
839 """Open files relative to a base directory
839 """Open files relative to a base directory
840
840
841 This class is used to hide the details of COW semantics and
841 This class is used to hide the details of COW semantics and
842 remote file access from higher level code.
842 remote file access from higher level code.
843 """
843 """
844 def __init__(self, base, audit=True):
844 def __init__(self, base, audit=True):
845 self.base = base
845 self.base = base
846 if audit:
846 if audit:
847 self.audit_path = path_auditor(base)
847 self.audit_path = path_auditor(base)
848 else:
848 else:
849 self.audit_path = always
849 self.audit_path = always
850 self.createmode = None
850 self.createmode = None
851
851
852 @propertycache
852 @propertycache
853 def _can_symlink(self):
853 def _can_symlink(self):
854 return checklink(self.base)
854 return checklink(self.base)
855
855
856 def _fixfilemode(self, name):
856 def _fixfilemode(self, name):
857 if self.createmode is None:
857 if self.createmode is None:
858 return
858 return
859 os.chmod(name, self.createmode & 0666)
859 os.chmod(name, self.createmode & 0666)
860
860
861 def __call__(self, path, mode="r", text=False, atomictemp=False):
861 def __call__(self, path, mode="r", text=False, atomictemp=False):
862 self.audit_path(path)
862 self.audit_path(path)
863 f = os.path.join(self.base, path)
863 f = os.path.join(self.base, path)
864
864
865 if not text and "b" not in mode:
865 if not text and "b" not in mode:
866 mode += "b" # for that other OS
866 mode += "b" # for that other OS
867
867
868 nlink = -1
868 nlink = -1
869 if mode not in ("r", "rb"):
869 if mode not in ("r", "rb"):
870 try:
870 try:
871 nlink = nlinks(f)
871 nlink = nlinks(f)
872 except OSError:
872 except OSError:
873 nlink = 0
873 nlink = 0
874 d = os.path.dirname(f)
874 d = os.path.dirname(f)
875 if not os.path.isdir(d):
875 if not os.path.isdir(d):
876 makedirs(d, self.createmode)
876 makedirs(d, self.createmode)
877 if atomictemp:
877 if atomictemp:
878 return atomictempfile(f, mode, self.createmode)
878 return atomictempfile(f, mode, self.createmode)
879 if nlink > 1:
879 if nlink > 1:
880 rename(mktempcopy(f), f)
880 rename(mktempcopy(f), f)
881 fp = posixfile(f, mode)
881 fp = posixfile(f, mode)
882 if nlink == 0:
882 if nlink == 0:
883 self._fixfilemode(f)
883 self._fixfilemode(f)
884 return fp
884 return fp
885
885
886 def symlink(self, src, dst):
886 def symlink(self, src, dst):
887 self.audit_path(dst)
887 self.audit_path(dst)
888 linkname = os.path.join(self.base, dst)
888 linkname = os.path.join(self.base, dst)
889 try:
889 try:
890 os.unlink(linkname)
890 os.unlink(linkname)
891 except OSError:
891 except OSError:
892 pass
892 pass
893
893
894 dirname = os.path.dirname(linkname)
894 dirname = os.path.dirname(linkname)
895 if not os.path.exists(dirname):
895 if not os.path.exists(dirname):
896 makedirs(dirname, self.createmode)
896 makedirs(dirname, self.createmode)
897
897
898 if self._can_symlink:
898 if self._can_symlink:
899 try:
899 try:
900 os.symlink(src, linkname)
900 os.symlink(src, linkname)
901 except OSError, err:
901 except OSError, err:
902 raise OSError(err.errno, _('could not symlink to %r: %s') %
902 raise OSError(err.errno, _('could not symlink to %r: %s') %
903 (src, err.strerror), linkname)
903 (src, err.strerror), linkname)
904 else:
904 else:
905 f = self(dst, "w")
905 f = self(dst, "w")
906 f.write(src)
906 f.write(src)
907 f.close()
907 f.close()
908 self._fixfilemode(dst)
908 self._fixfilemode(dst)
909
909
910 class chunkbuffer(object):
910 class chunkbuffer(object):
911 """Allow arbitrary sized chunks of data to be efficiently read from an
911 """Allow arbitrary sized chunks of data to be efficiently read from an
912 iterator over chunks of arbitrary size."""
912 iterator over chunks of arbitrary size."""
913
913
914 def __init__(self, in_iter):
914 def __init__(self, in_iter):
915 """in_iter is the iterator that's iterating over the input chunks.
915 """in_iter is the iterator that's iterating over the input chunks.
916 targetsize is how big a buffer to try to maintain."""
916 targetsize is how big a buffer to try to maintain."""
917 def splitbig(chunks):
917 def splitbig(chunks):
918 for chunk in chunks:
918 for chunk in chunks:
919 if len(chunk) > 2**20:
919 if len(chunk) > 2**20:
920 pos = 0
920 pos = 0
921 while pos < len(chunk):
921 while pos < len(chunk):
922 end = pos + 2 ** 18
922 end = pos + 2 ** 18
923 yield chunk[pos:end]
923 yield chunk[pos:end]
924 pos = end
924 pos = end
925 else:
925 else:
926 yield chunk
926 yield chunk
927 self.iter = splitbig(in_iter)
927 self.iter = splitbig(in_iter)
928 self._queue = []
928 self._queue = []
929
929
930 def read(self, l):
930 def read(self, l):
931 """Read L bytes of data from the iterator of chunks of data.
931 """Read L bytes of data from the iterator of chunks of data.
932 Returns less than L bytes if the iterator runs dry."""
932 Returns less than L bytes if the iterator runs dry."""
933 left = l
933 left = l
934 buf = ''
934 buf = ''
935 queue = self._queue
935 queue = self._queue
936 while left > 0:
936 while left > 0:
937 # refill the queue
937 # refill the queue
938 if not queue:
938 if not queue:
939 target = 2**18
939 target = 2**18
940 for chunk in self.iter:
940 for chunk in self.iter:
941 queue.append(chunk)
941 queue.append(chunk)
942 target -= len(chunk)
942 target -= len(chunk)
943 if target <= 0:
943 if target <= 0:
944 break
944 break
945 if not queue:
945 if not queue:
946 break
946 break
947
947
948 chunk = queue.pop(0)
948 chunk = queue.pop(0)
949 left -= len(chunk)
949 left -= len(chunk)
950 if left < 0:
950 if left < 0:
951 queue.insert(0, chunk[left:])
951 queue.insert(0, chunk[left:])
952 buf += chunk[:left]
952 buf += chunk[:left]
953 else:
953 else:
954 buf += chunk
954 buf += chunk
955
955
956 return buf
956 return buf
957
957
958 def filechunkiter(f, size=65536, limit=None):
958 def filechunkiter(f, size=65536, limit=None):
959 """Create a generator that produces the data in the file size
959 """Create a generator that produces the data in the file size
960 (default 65536) bytes at a time, up to optional limit (default is
960 (default 65536) bytes at a time, up to optional limit (default is
961 to read all data). Chunks may be less than size bytes if the
961 to read all data). Chunks may be less than size bytes if the
962 chunk is the last chunk in the file, or the file is a socket or
962 chunk is the last chunk in the file, or the file is a socket or
963 some other type of file that sometimes reads less data than is
963 some other type of file that sometimes reads less data than is
964 requested."""
964 requested."""
965 assert size >= 0
965 assert size >= 0
966 assert limit is None or limit >= 0
966 assert limit is None or limit >= 0
967 while True:
967 while True:
968 if limit is None:
968 if limit is None:
969 nbytes = size
969 nbytes = size
970 else:
970 else:
971 nbytes = min(limit, size)
971 nbytes = min(limit, size)
972 s = nbytes and f.read(nbytes)
972 s = nbytes and f.read(nbytes)
973 if not s:
973 if not s:
974 break
974 break
975 if limit:
975 if limit:
976 limit -= len(s)
976 limit -= len(s)
977 yield s
977 yield s
978
978
979 def makedate():
979 def makedate():
980 lt = time.localtime()
980 lt = time.localtime()
981 if lt[8] == 1 and time.daylight:
981 if lt[8] == 1 and time.daylight:
982 tz = time.altzone
982 tz = time.altzone
983 else:
983 else:
984 tz = time.timezone
984 tz = time.timezone
985 return time.mktime(lt), tz
985 return time.mktime(lt), tz
986
986
987 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
987 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
988 """represent a (unixtime, offset) tuple as a localized time.
988 """represent a (unixtime, offset) tuple as a localized time.
989 unixtime is seconds since the epoch, and offset is the time zone's
989 unixtime is seconds since the epoch, and offset is the time zone's
990 number of seconds away from UTC. if timezone is false, do not
990 number of seconds away from UTC. if timezone is false, do not
991 append time zone to string."""
991 append time zone to string."""
992 t, tz = date or makedate()
992 t, tz = date or makedate()
993 if "%1" in format or "%2" in format:
993 if "%1" in format or "%2" in format:
994 sign = (tz > 0) and "-" or "+"
994 sign = (tz > 0) and "-" or "+"
995 minutes = abs(tz) // 60
995 minutes = abs(tz) // 60
996 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
996 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
997 format = format.replace("%2", "%02d" % (minutes % 60))
997 format = format.replace("%2", "%02d" % (minutes % 60))
998 s = time.strftime(format, time.gmtime(float(t) - tz))
998 s = time.strftime(format, time.gmtime(float(t) - tz))
999 return s
999 return s
1000
1000
1001 def shortdate(date=None):
1001 def shortdate(date=None):
1002 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1002 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1003 return datestr(date, format='%Y-%m-%d')
1003 return datestr(date, format='%Y-%m-%d')
1004
1004
1005 def strdate(string, format, defaults=[]):
1005 def strdate(string, format, defaults=[]):
1006 """parse a localized time string and return a (unixtime, offset) tuple.
1006 """parse a localized time string and return a (unixtime, offset) tuple.
1007 if the string cannot be parsed, ValueError is raised."""
1007 if the string cannot be parsed, ValueError is raised."""
1008 def timezone(string):
1008 def timezone(string):
1009 tz = string.split()[-1]
1009 tz = string.split()[-1]
1010 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1010 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1011 sign = (tz[0] == "+") and 1 or -1
1011 sign = (tz[0] == "+") and 1 or -1
1012 hours = int(tz[1:3])
1012 hours = int(tz[1:3])
1013 minutes = int(tz[3:5])
1013 minutes = int(tz[3:5])
1014 return -sign * (hours * 60 + minutes) * 60
1014 return -sign * (hours * 60 + minutes) * 60
1015 if tz == "GMT" or tz == "UTC":
1015 if tz == "GMT" or tz == "UTC":
1016 return 0
1016 return 0
1017 return None
1017 return None
1018
1018
1019 # NOTE: unixtime = localunixtime + offset
1019 # NOTE: unixtime = localunixtime + offset
1020 offset, date = timezone(string), string
1020 offset, date = timezone(string), string
1021 if offset != None:
1021 if offset != None:
1022 date = " ".join(string.split()[:-1])
1022 date = " ".join(string.split()[:-1])
1023
1023
1024 # add missing elements from defaults
1024 # add missing elements from defaults
1025 for part in defaults:
1025 for part in defaults:
1026 found = [True for p in part if ("%"+p) in format]
1026 found = [True for p in part if ("%"+p) in format]
1027 if not found:
1027 if not found:
1028 date += "@" + defaults[part]
1028 date += "@" + defaults[part]
1029 format += "@%" + part[0]
1029 format += "@%" + part[0]
1030
1030
1031 timetuple = time.strptime(date, format)
1031 timetuple = time.strptime(date, format)
1032 localunixtime = int(calendar.timegm(timetuple))
1032 localunixtime = int(calendar.timegm(timetuple))
1033 if offset is None:
1033 if offset is None:
1034 # local timezone
1034 # local timezone
1035 unixtime = int(time.mktime(timetuple))
1035 unixtime = int(time.mktime(timetuple))
1036 offset = unixtime - localunixtime
1036 offset = unixtime - localunixtime
1037 else:
1037 else:
1038 unixtime = localunixtime + offset
1038 unixtime = localunixtime + offset
1039 return unixtime, offset
1039 return unixtime, offset
1040
1040
1041 def parsedate(date, formats=None, defaults=None):
1041 def parsedate(date, formats=None, defaults=None):
1042 """parse a localized date/time string and return a (unixtime, offset) tuple.
1042 """parse a localized date/time string and return a (unixtime, offset) tuple.
1043
1043
1044 The date may be a "unixtime offset" string or in one of the specified
1044 The date may be a "unixtime offset" string or in one of the specified
1045 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1045 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1046 """
1046 """
1047 if not date:
1047 if not date:
1048 return 0, 0
1048 return 0, 0
1049 if isinstance(date, tuple) and len(date) == 2:
1049 if isinstance(date, tuple) and len(date) == 2:
1050 return date
1050 return date
1051 if not formats:
1051 if not formats:
1052 formats = defaultdateformats
1052 formats = defaultdateformats
1053 date = date.strip()
1053 date = date.strip()
1054 try:
1054 try:
1055 when, offset = map(int, date.split(' '))
1055 when, offset = map(int, date.split(' '))
1056 except ValueError:
1056 except ValueError:
1057 # fill out defaults
1057 # fill out defaults
1058 if not defaults:
1058 if not defaults:
1059 defaults = {}
1059 defaults = {}
1060 now = makedate()
1060 now = makedate()
1061 for part in "d mb yY HI M S".split():
1061 for part in "d mb yY HI M S".split():
1062 if part not in defaults:
1062 if part not in defaults:
1063 if part[0] in "HMS":
1063 if part[0] in "HMS":
1064 defaults[part] = "00"
1064 defaults[part] = "00"
1065 else:
1065 else:
1066 defaults[part] = datestr(now, "%" + part[0])
1066 defaults[part] = datestr(now, "%" + part[0])
1067
1067
1068 for format in formats:
1068 for format in formats:
1069 try:
1069 try:
1070 when, offset = strdate(date, format, defaults)
1070 when, offset = strdate(date, format, defaults)
1071 except (ValueError, OverflowError):
1071 except (ValueError, OverflowError):
1072 pass
1072 pass
1073 else:
1073 else:
1074 break
1074 break
1075 else:
1075 else:
1076 raise Abort(_('invalid date: %r ') % date)
1076 raise Abort(_('invalid date: %r ') % date)
1077 # validate explicit (probably user-specified) date and
1077 # validate explicit (probably user-specified) date and
1078 # time zone offset. values must fit in signed 32 bits for
1078 # time zone offset. values must fit in signed 32 bits for
1079 # current 32-bit linux runtimes. timezones go from UTC-12
1079 # current 32-bit linux runtimes. timezones go from UTC-12
1080 # to UTC+14
1080 # to UTC+14
1081 if abs(when) > 0x7fffffff:
1081 if abs(when) > 0x7fffffff:
1082 raise Abort(_('date exceeds 32 bits: %d') % when)
1082 raise Abort(_('date exceeds 32 bits: %d') % when)
1083 if offset < -50400 or offset > 43200:
1083 if offset < -50400 or offset > 43200:
1084 raise Abort(_('impossible time zone offset: %d') % offset)
1084 raise Abort(_('impossible time zone offset: %d') % offset)
1085 return when, offset
1085 return when, offset
1086
1086
1087 def matchdate(date):
1087 def matchdate(date):
1088 """Return a function that matches a given date match specifier
1088 """Return a function that matches a given date match specifier
1089
1089
1090 Formats include:
1090 Formats include:
1091
1091
1092 '{date}' match a given date to the accuracy provided
1092 '{date}' match a given date to the accuracy provided
1093
1093
1094 '<{date}' on or before a given date
1094 '<{date}' on or before a given date
1095
1095
1096 '>{date}' on or after a given date
1096 '>{date}' on or after a given date
1097
1097
1098 """
1098 """
1099
1099
1100 def lower(date):
1100 def lower(date):
1101 d = dict(mb="1", d="1")
1101 d = dict(mb="1", d="1")
1102 return parsedate(date, extendeddateformats, d)[0]
1102 return parsedate(date, extendeddateformats, d)[0]
1103
1103
1104 def upper(date):
1104 def upper(date):
1105 d = dict(mb="12", HI="23", M="59", S="59")
1105 d = dict(mb="12", HI="23", M="59", S="59")
1106 for days in "31 30 29".split():
1106 for days in "31 30 29".split():
1107 try:
1107 try:
1108 d["d"] = days
1108 d["d"] = days
1109 return parsedate(date, extendeddateformats, d)[0]
1109 return parsedate(date, extendeddateformats, d)[0]
1110 except:
1110 except:
1111 pass
1111 pass
1112 d["d"] = "28"
1112 d["d"] = "28"
1113 return parsedate(date, extendeddateformats, d)[0]
1113 return parsedate(date, extendeddateformats, d)[0]
1114
1114
1115 date = date.strip()
1115 date = date.strip()
1116 if date[0] == "<":
1116 if date[0] == "<":
1117 when = upper(date[1:])
1117 when = upper(date[1:])
1118 return lambda x: x <= when
1118 return lambda x: x <= when
1119 elif date[0] == ">":
1119 elif date[0] == ">":
1120 when = lower(date[1:])
1120 when = lower(date[1:])
1121 return lambda x: x >= when
1121 return lambda x: x >= when
1122 elif date[0] == "-":
1122 elif date[0] == "-":
1123 try:
1123 try:
1124 days = int(date[1:])
1124 days = int(date[1:])
1125 except ValueError:
1125 except ValueError:
1126 raise Abort(_("invalid day spec: %s") % date[1:])
1126 raise Abort(_("invalid day spec: %s") % date[1:])
1127 when = makedate()[0] - days * 3600 * 24
1127 when = makedate()[0] - days * 3600 * 24
1128 return lambda x: x >= when
1128 return lambda x: x >= when
1129 elif " to " in date:
1129 elif " to " in date:
1130 a, b = date.split(" to ")
1130 a, b = date.split(" to ")
1131 start, stop = lower(a), upper(b)
1131 start, stop = lower(a), upper(b)
1132 return lambda x: x >= start and x <= stop
1132 return lambda x: x >= start and x <= stop
1133 else:
1133 else:
1134 start, stop = lower(date), upper(date)
1134 start, stop = lower(date), upper(date)
1135 return lambda x: x >= start and x <= stop
1135 return lambda x: x >= start and x <= stop
1136
1136
1137 def shortuser(user):
1137 def shortuser(user):
1138 """Return a short representation of a user name or email address."""
1138 """Return a short representation of a user name or email address."""
1139 f = user.find('@')
1139 f = user.find('@')
1140 if f >= 0:
1140 if f >= 0:
1141 user = user[:f]
1141 user = user[:f]
1142 f = user.find('<')
1142 f = user.find('<')
1143 if f >= 0:
1143 if f >= 0:
1144 user = user[f + 1:]
1144 user = user[f + 1:]
1145 f = user.find(' ')
1145 f = user.find(' ')
1146 if f >= 0:
1146 if f >= 0:
1147 user = user[:f]
1147 user = user[:f]
1148 f = user.find('.')
1148 f = user.find('.')
1149 if f >= 0:
1149 if f >= 0:
1150 user = user[:f]
1150 user = user[:f]
1151 return user
1151 return user
1152
1152
1153 def email(author):
1153 def email(author):
1154 '''get email of author.'''
1154 '''get email of author.'''
1155 r = author.find('>')
1155 r = author.find('>')
1156 if r == -1:
1156 if r == -1:
1157 r = None
1157 r = None
1158 return author[author.find('<') + 1:r]
1158 return author[author.find('<') + 1:r]
1159
1159
1160 def ellipsis(text, maxlength=400):
1160 def ellipsis(text, maxlength=400):
1161 """Trim string to at most maxlength (default: 400) characters."""
1161 """Trim string to at most maxlength (default: 400) characters."""
1162 if len(text) <= maxlength:
1162 if len(text) <= maxlength:
1163 return text
1163 return text
1164 else:
1164 else:
1165 return "%s..." % (text[:maxlength - 3])
1165 return "%s..." % (text[:maxlength - 3])
1166
1166
1167 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1167 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1168 '''yield every hg repository under path, recursively.'''
1168 '''yield every hg repository under path, recursively.'''
1169 def errhandler(err):
1169 def errhandler(err):
1170 if err.filename == path:
1170 if err.filename == path:
1171 raise err
1171 raise err
1172 if followsym and hasattr(os.path, 'samestat'):
1172 if followsym and hasattr(os.path, 'samestat'):
1173 def _add_dir_if_not_there(dirlst, dirname):
1173 def _add_dir_if_not_there(dirlst, dirname):
1174 match = False
1174 match = False
1175 samestat = os.path.samestat
1175 samestat = os.path.samestat
1176 dirstat = os.stat(dirname)
1176 dirstat = os.stat(dirname)
1177 for lstdirstat in dirlst:
1177 for lstdirstat in dirlst:
1178 if samestat(dirstat, lstdirstat):
1178 if samestat(dirstat, lstdirstat):
1179 match = True
1179 match = True
1180 break
1180 break
1181 if not match:
1181 if not match:
1182 dirlst.append(dirstat)
1182 dirlst.append(dirstat)
1183 return not match
1183 return not match
1184 else:
1184 else:
1185 followsym = False
1185 followsym = False
1186
1186
1187 if (seen_dirs is None) and followsym:
1187 if (seen_dirs is None) and followsym:
1188 seen_dirs = []
1188 seen_dirs = []
1189 _add_dir_if_not_there(seen_dirs, path)
1189 _add_dir_if_not_there(seen_dirs, path)
1190 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1190 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1191 dirs.sort()
1191 dirs.sort()
1192 if '.hg' in dirs:
1192 if '.hg' in dirs:
1193 yield root # found a repository
1193 yield root # found a repository
1194 qroot = os.path.join(root, '.hg', 'patches')
1194 qroot = os.path.join(root, '.hg', 'patches')
1195 if os.path.isdir(os.path.join(qroot, '.hg')):
1195 if os.path.isdir(os.path.join(qroot, '.hg')):
1196 yield qroot # we have a patch queue repo here
1196 yield qroot # we have a patch queue repo here
1197 if recurse:
1197 if recurse:
1198 # avoid recursing inside the .hg directory
1198 # avoid recursing inside the .hg directory
1199 dirs.remove('.hg')
1199 dirs.remove('.hg')
1200 else:
1200 else:
1201 dirs[:] = [] # don't descend further
1201 dirs[:] = [] # don't descend further
1202 elif followsym:
1202 elif followsym:
1203 newdirs = []
1203 newdirs = []
1204 for d in dirs:
1204 for d in dirs:
1205 fname = os.path.join(root, d)
1205 fname = os.path.join(root, d)
1206 if _add_dir_if_not_there(seen_dirs, fname):
1206 if _add_dir_if_not_there(seen_dirs, fname):
1207 if os.path.islink(fname):
1207 if os.path.islink(fname):
1208 for hgname in walkrepos(fname, True, seen_dirs):
1208 for hgname in walkrepos(fname, True, seen_dirs):
1209 yield hgname
1209 yield hgname
1210 else:
1210 else:
1211 newdirs.append(d)
1211 newdirs.append(d)
1212 dirs[:] = newdirs
1212 dirs[:] = newdirs
1213
1213
1214 _rcpath = None
1214 _rcpath = None
1215
1215
1216 def os_rcpath():
1216 def os_rcpath():
1217 '''return default os-specific hgrc search path'''
1217 '''return default os-specific hgrc search path'''
1218 path = system_rcpath()
1218 path = system_rcpath()
1219 path.extend(user_rcpath())
1219 path.extend(user_rcpath())
1220 path = [os.path.normpath(f) for f in path]
1220 path = [os.path.normpath(f) for f in path]
1221 return path
1221 return path
1222
1222
1223 def rcpath():
1223 def rcpath():
1224 '''return hgrc search path. if env var HGRCPATH is set, use it.
1224 '''return hgrc search path. if env var HGRCPATH is set, use it.
1225 for each item in path, if directory, use files ending in .rc,
1225 for each item in path, if directory, use files ending in .rc,
1226 else use item.
1226 else use item.
1227 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1227 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1228 if no HGRCPATH, use default os-specific path.'''
1228 if no HGRCPATH, use default os-specific path.'''
1229 global _rcpath
1229 global _rcpath
1230 if _rcpath is None:
1230 if _rcpath is None:
1231 if 'HGRCPATH' in os.environ:
1231 if 'HGRCPATH' in os.environ:
1232 _rcpath = []
1232 _rcpath = []
1233 for p in os.environ['HGRCPATH'].split(os.pathsep):
1233 for p in os.environ['HGRCPATH'].split(os.pathsep):
1234 if not p:
1234 if not p:
1235 continue
1235 continue
1236 p = expandpath(p)
1236 p = expandpath(p)
1237 if os.path.isdir(p):
1237 if os.path.isdir(p):
1238 for f, kind in osutil.listdir(p):
1238 for f, kind in osutil.listdir(p):
1239 if f.endswith('.rc'):
1239 if f.endswith('.rc'):
1240 _rcpath.append(os.path.join(p, f))
1240 _rcpath.append(os.path.join(p, f))
1241 else:
1241 else:
1242 _rcpath.append(p)
1242 _rcpath.append(p)
1243 else:
1243 else:
1244 _rcpath = os_rcpath()
1244 _rcpath = os_rcpath()
1245 return _rcpath
1245 return _rcpath
1246
1246
1247 def bytecount(nbytes):
1247 def bytecount(nbytes):
1248 '''return byte count formatted as readable string, with units'''
1248 '''return byte count formatted as readable string, with units'''
1249
1249
1250 units = (
1250 units = (
1251 (100, 1 << 30, _('%.0f GB')),
1251 (100, 1 << 30, _('%.0f GB')),
1252 (10, 1 << 30, _('%.1f GB')),
1252 (10, 1 << 30, _('%.1f GB')),
1253 (1, 1 << 30, _('%.2f GB')),
1253 (1, 1 << 30, _('%.2f GB')),
1254 (100, 1 << 20, _('%.0f MB')),
1254 (100, 1 << 20, _('%.0f MB')),
1255 (10, 1 << 20, _('%.1f MB')),
1255 (10, 1 << 20, _('%.1f MB')),
1256 (1, 1 << 20, _('%.2f MB')),
1256 (1, 1 << 20, _('%.2f MB')),
1257 (100, 1 << 10, _('%.0f KB')),
1257 (100, 1 << 10, _('%.0f KB')),
1258 (10, 1 << 10, _('%.1f KB')),
1258 (10, 1 << 10, _('%.1f KB')),
1259 (1, 1 << 10, _('%.2f KB')),
1259 (1, 1 << 10, _('%.2f KB')),
1260 (1, 1, _('%.0f bytes')),
1260 (1, 1, _('%.0f bytes')),
1261 )
1261 )
1262
1262
1263 for multiplier, divisor, format in units:
1263 for multiplier, divisor, format in units:
1264 if nbytes >= divisor * multiplier:
1264 if nbytes >= divisor * multiplier:
1265 return format % (nbytes / float(divisor))
1265 return format % (nbytes / float(divisor))
1266 return units[-1][2] % nbytes
1266 return units[-1][2] % nbytes
1267
1267
1268 def drop_scheme(scheme, path):
1268 def drop_scheme(scheme, path):
1269 sc = scheme + ':'
1269 sc = scheme + ':'
1270 if path.startswith(sc):
1270 if path.startswith(sc):
1271 path = path[len(sc):]
1271 path = path[len(sc):]
1272 if path.startswith('//'):
1272 if path.startswith('//'):
1273 if scheme == 'file':
1273 if scheme == 'file':
1274 i = path.find('/', 2)
1274 i = path.find('/', 2)
1275 if i == -1:
1275 if i == -1:
1276 return ''
1276 return ''
1277 # On Windows, absolute paths are rooted at the current drive
1277 # On Windows, absolute paths are rooted at the current drive
1278 # root. On POSIX they are rooted at the file system root.
1278 # root. On POSIX they are rooted at the file system root.
1279 if os.name == 'nt':
1279 if os.name == 'nt':
1280 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1280 droot = os.path.splitdrive(os.getcwd())[0] + '/'
1281 path = os.path.join(droot, path[i + 1:])
1281 path = os.path.join(droot, path[i + 1:])
1282 else:
1282 else:
1283 path = path[i:]
1283 path = path[i:]
1284 else:
1284 else:
1285 path = path[2:]
1285 path = path[2:]
1286 return path
1286 return path
1287
1287
1288 def uirepr(s):
1288 def uirepr(s):
1289 # Avoid double backslash in Windows path repr()
1289 # Avoid double backslash in Windows path repr()
1290 return repr(s).replace('\\\\', '\\')
1290 return repr(s).replace('\\\\', '\\')
1291
1291
1292 #### naming convention of below implementation follows 'textwrap' module
1292 #### naming convention of below implementation follows 'textwrap' module
1293
1293
1294 class MBTextWrapper(textwrap.TextWrapper):
1294 class MBTextWrapper(textwrap.TextWrapper):
1295 def __init__(self, **kwargs):
1295 def __init__(self, **kwargs):
1296 textwrap.TextWrapper.__init__(self, **kwargs)
1296 textwrap.TextWrapper.__init__(self, **kwargs)
1297
1297
1298 def _cutdown(self, str, space_left):
1298 def _cutdown(self, str, space_left):
1299 l = 0
1299 l = 0
1300 ucstr = unicode(str, encoding.encoding)
1300 ucstr = unicode(str, encoding.encoding)
1301 w = unicodedata.east_asian_width
1301 w = unicodedata.east_asian_width
1302 for i in xrange(len(ucstr)):
1302 for i in xrange(len(ucstr)):
1303 l += w(ucstr[i]) in 'WFA' and 2 or 1
1303 l += w(ucstr[i]) in 'WFA' and 2 or 1
1304 if space_left < l:
1304 if space_left < l:
1305 return (ucstr[:i].encode(encoding.encoding),
1305 return (ucstr[:i].encode(encoding.encoding),
1306 ucstr[i:].encode(encoding.encoding))
1306 ucstr[i:].encode(encoding.encoding))
1307 return str, ''
1307 return str, ''
1308
1308
1309 # ----------------------------------------
1309 # ----------------------------------------
1310 # overriding of base class
1310 # overriding of base class
1311
1311
1312 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1312 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1313 space_left = max(width - cur_len, 1)
1313 space_left = max(width - cur_len, 1)
1314
1314
1315 if self.break_long_words:
1315 if self.break_long_words:
1316 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1316 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1317 cur_line.append(cut)
1317 cur_line.append(cut)
1318 reversed_chunks[-1] = res
1318 reversed_chunks[-1] = res
1319 elif not cur_line:
1319 elif not cur_line:
1320 cur_line.append(reversed_chunks.pop())
1320 cur_line.append(reversed_chunks.pop())
1321
1321
1322 #### naming convention of above implementation follows 'textwrap' module
1322 #### naming convention of above implementation follows 'textwrap' module
1323
1323
1324 def wrap(line, width=None, initindent='', hangindent=''):
1324 def wrap(line, width=None, initindent='', hangindent=''):
1325 if width is None:
1325 if width is None:
1326 width = termwidth() - 2
1326 width = termwidth() - 2
1327 maxindent = max(len(hangindent), len(initindent))
1327 maxindent = max(len(hangindent), len(initindent))
1328 if width <= maxindent:
1328 if width <= maxindent:
1329 # adjust for weird terminal size
1329 # adjust for weird terminal size
1330 width = max(78, maxindent + 1)
1330 width = max(78, maxindent + 1)
1331 wrapper = MBTextWrapper(width=width,
1331 wrapper = MBTextWrapper(width=width,
1332 initial_indent=initindent,
1332 initial_indent=initindent,
1333 subsequent_indent=hangindent)
1333 subsequent_indent=hangindent)
1334 return wrapper.fill(line)
1334 return wrapper.fill(line)
1335
1335
1336 def iterlines(iterator):
1336 def iterlines(iterator):
1337 for chunk in iterator:
1337 for chunk in iterator:
1338 for line in chunk.splitlines():
1338 for line in chunk.splitlines():
1339 yield line
1339 yield line
1340
1340
1341 def expandpath(path):
1341 def expandpath(path):
1342 return os.path.expanduser(os.path.expandvars(path))
1342 return os.path.expanduser(os.path.expandvars(path))
1343
1343
1344 def hgcmd():
1344 def hgcmd():
1345 """Return the command used to execute current hg
1345 """Return the command used to execute current hg
1346
1346
1347 This is different from hgexecutable() because on Windows we want
1347 This is different from hgexecutable() because on Windows we want
1348 to avoid things opening new shell windows like batch files, so we
1348 to avoid things opening new shell windows like batch files, so we
1349 get either the python call or current executable.
1349 get either the python call or current executable.
1350 """
1350 """
1351 if main_is_frozen():
1351 if main_is_frozen():
1352 return [sys.executable]
1352 return [sys.executable]
1353 return gethgcmd()
1353 return gethgcmd()
1354
1354
1355 def rundetached(args, condfn):
1355 def rundetached(args, condfn):
1356 """Execute the argument list in a detached process.
1356 """Execute the argument list in a detached process.
1357
1357
1358 condfn is a callable which is called repeatedly and should return
1358 condfn is a callable which is called repeatedly and should return
1359 True once the child process is known to have started successfully.
1359 True once the child process is known to have started successfully.
1360 At this point, the child process PID is returned. If the child
1360 At this point, the child process PID is returned. If the child
1361 process fails to start or finishes before condfn() evaluates to
1361 process fails to start or finishes before condfn() evaluates to
1362 True, return -1.
1362 True, return -1.
1363 """
1363 """
1364 # Windows case is easier because the child process is either
1364 # Windows case is easier because the child process is either
1365 # successfully starting and validating the condition or exiting
1365 # successfully starting and validating the condition or exiting
1366 # on failure. We just poll on its PID. On Unix, if the child
1366 # on failure. We just poll on its PID. On Unix, if the child
1367 # process fails to start, it will be left in a zombie state until
1367 # process fails to start, it will be left in a zombie state until
1368 # the parent wait on it, which we cannot do since we expect a long
1368 # the parent wait on it, which we cannot do since we expect a long
1369 # running process on success. Instead we listen for SIGCHLD telling
1369 # running process on success. Instead we listen for SIGCHLD telling
1370 # us our child process terminated.
1370 # us our child process terminated.
1371 terminated = set()
1371 terminated = set()
1372 def handler(signum, frame):
1372 def handler(signum, frame):
1373 terminated.add(os.wait())
1373 terminated.add(os.wait())
1374 prevhandler = None
1374 prevhandler = None
1375 if hasattr(signal, 'SIGCHLD'):
1375 if hasattr(signal, 'SIGCHLD'):
1376 prevhandler = signal.signal(signal.SIGCHLD, handler)
1376 prevhandler = signal.signal(signal.SIGCHLD, handler)
1377 try:
1377 try:
1378 pid = spawndetached(args)
1378 pid = spawndetached(args)
1379 while not condfn():
1379 while not condfn():
1380 if ((pid in terminated or not testpid(pid))
1380 if ((pid in terminated or not testpid(pid))
1381 and not condfn()):
1381 and not condfn()):
1382 return -1
1382 return -1
1383 time.sleep(0.1)
1383 time.sleep(0.1)
1384 return pid
1384 return pid
1385 finally:
1385 finally:
1386 if prevhandler is not None:
1386 if prevhandler is not None:
1387 signal.signal(signal.SIGCHLD, prevhandler)
1387 signal.signal(signal.SIGCHLD, prevhandler)
1388
1388
1389 try:
1389 try:
1390 any, all = any, all
1390 any, all = any, all
1391 except NameError:
1391 except NameError:
1392 def any(iterable):
1392 def any(iterable):
1393 for i in iterable:
1393 for i in iterable:
1394 if i:
1394 if i:
1395 return True
1395 return True
1396 return False
1396 return False
1397
1397
1398 def all(iterable):
1398 def all(iterable):
1399 for i in iterable:
1399 for i in iterable:
1400 if not i:
1400 if not i:
1401 return False
1401 return False
1402 return True
1402 return True
1403
1403
1404 def termwidth():
1404 def termwidth():
1405 if 'COLUMNS' in os.environ:
1405 if 'COLUMNS' in os.environ:
1406 try:
1406 try:
1407 return int(os.environ['COLUMNS'])
1407 return int(os.environ['COLUMNS'])
1408 except ValueError:
1408 except ValueError:
1409 pass
1409 pass
1410 return termwidth_()
1410 return termwidth_()
1411
1412 def interpolate(prefix, mapping, s, fn=None):
1413 """Return the result of interpolating items in the mapping into string s.
1414
1415 prefix is a single character string, or a two character string with
1416 a backslash as the first character if the prefix needs to be escaped in
1417 a regular expression.
1418
1419 fn is an optional function that will be applied to the replacement text
1420 just before replacement.
1421 """
1422 fn = fn or (lambda s: s)
1423 r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys())))
1424 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1425
General Comments 0
You need to be logged in to leave comments. Login now