##// END OF EJS Templates
move os_rcpath from util to scmutil
Adrian Buehlmann -
r13985:26335a81 default
parent child Browse files
Show More
@@ -1,323 +1,330 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error, osutil
9 import util, error, osutil
10 import os, errno, stat
10 import os, errno, stat
11
11
12 def checkfilename(f):
12 def checkfilename(f):
13 '''Check that the filename f is an acceptable filename for a tracked file'''
13 '''Check that the filename f is an acceptable filename for a tracked file'''
14 if '\r' in f or '\n' in f:
14 if '\r' in f or '\n' in f:
15 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
15 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
16
16
17 def checkportable(ui, f):
17 def checkportable(ui, f):
18 '''Check if filename f is portable and warn or abort depending on config'''
18 '''Check if filename f is portable and warn or abort depending on config'''
19 checkfilename(f)
19 checkfilename(f)
20 val = ui.config('ui', 'portablefilenames', 'warn')
20 val = ui.config('ui', 'portablefilenames', 'warn')
21 lval = val.lower()
21 lval = val.lower()
22 abort = os.name == 'nt' or lval == 'abort'
22 abort = os.name == 'nt' or lval == 'abort'
23 bval = util.parsebool(val)
23 bval = util.parsebool(val)
24 if abort or lval == 'warn' or bval:
24 if abort or lval == 'warn' or bval:
25 msg = util.checkwinfilename(f)
25 msg = util.checkwinfilename(f)
26 if msg:
26 if msg:
27 if abort:
27 if abort:
28 raise util.Abort("%s: %r" % (msg, f))
28 raise util.Abort("%s: %r" % (msg, f))
29 ui.warn(_("warning: %s: %r\n") % (msg, f))
29 ui.warn(_("warning: %s: %r\n") % (msg, f))
30 elif bval is None and lval != 'ignore':
30 elif bval is None and lval != 'ignore':
31 raise error.ConfigError(
31 raise error.ConfigError(
32 _("ui.portablefilenames value is invalid ('%s')") % val)
32 _("ui.portablefilenames value is invalid ('%s')") % val)
33
33
34 class path_auditor(object):
34 class path_auditor(object):
35 '''ensure that a filesystem path contains no banned components.
35 '''ensure that a filesystem path contains no banned components.
36 the following properties of a path are checked:
36 the following properties of a path are checked:
37
37
38 - ends with a directory separator
38 - ends with a directory separator
39 - under top-level .hg
39 - under top-level .hg
40 - starts at the root of a windows drive
40 - starts at the root of a windows drive
41 - contains ".."
41 - contains ".."
42 - traverses a symlink (e.g. a/symlink_here/b)
42 - traverses a symlink (e.g. a/symlink_here/b)
43 - inside a nested repository (a callback can be used to approve
43 - inside a nested repository (a callback can be used to approve
44 some nested repositories, e.g., subrepositories)
44 some nested repositories, e.g., subrepositories)
45 '''
45 '''
46
46
47 def __init__(self, root, callback=None):
47 def __init__(self, root, callback=None):
48 self.audited = set()
48 self.audited = set()
49 self.auditeddir = set()
49 self.auditeddir = set()
50 self.root = root
50 self.root = root
51 self.callback = callback
51 self.callback = callback
52
52
53 def __call__(self, path):
53 def __call__(self, path):
54 '''Check the relative path.
54 '''Check the relative path.
55 path may contain a pattern (e.g. foodir/**.txt)'''
55 path may contain a pattern (e.g. foodir/**.txt)'''
56
56
57 if path in self.audited:
57 if path in self.audited:
58 return
58 return
59 # AIX ignores "/" at end of path, others raise EISDIR.
59 # AIX ignores "/" at end of path, others raise EISDIR.
60 if util.endswithsep(path):
60 if util.endswithsep(path):
61 raise util.Abort(_("path ends in directory separator: %s") % path)
61 raise util.Abort(_("path ends in directory separator: %s") % path)
62 normpath = os.path.normcase(path)
62 normpath = os.path.normcase(path)
63 parts = util.splitpath(normpath)
63 parts = util.splitpath(normpath)
64 if (os.path.splitdrive(path)[0]
64 if (os.path.splitdrive(path)[0]
65 or parts[0].lower() in ('.hg', '.hg.', '')
65 or parts[0].lower() in ('.hg', '.hg.', '')
66 or os.pardir in parts):
66 or os.pardir in parts):
67 raise util.Abort(_("path contains illegal component: %s") % path)
67 raise util.Abort(_("path contains illegal component: %s") % path)
68 if '.hg' in path.lower():
68 if '.hg' in path.lower():
69 lparts = [p.lower() for p in parts]
69 lparts = [p.lower() for p in parts]
70 for p in '.hg', '.hg.':
70 for p in '.hg', '.hg.':
71 if p in lparts[1:]:
71 if p in lparts[1:]:
72 pos = lparts.index(p)
72 pos = lparts.index(p)
73 base = os.path.join(*parts[:pos])
73 base = os.path.join(*parts[:pos])
74 raise util.Abort(_('path %r is inside nested repo %r')
74 raise util.Abort(_('path %r is inside nested repo %r')
75 % (path, base))
75 % (path, base))
76
76
77 parts.pop()
77 parts.pop()
78 prefixes = []
78 prefixes = []
79 while parts:
79 while parts:
80 prefix = os.sep.join(parts)
80 prefix = os.sep.join(parts)
81 if prefix in self.auditeddir:
81 if prefix in self.auditeddir:
82 break
82 break
83 curpath = os.path.join(self.root, prefix)
83 curpath = os.path.join(self.root, prefix)
84 try:
84 try:
85 st = os.lstat(curpath)
85 st = os.lstat(curpath)
86 except OSError, err:
86 except OSError, err:
87 # EINVAL can be raised as invalid path syntax under win32.
87 # EINVAL can be raised as invalid path syntax under win32.
88 # They must be ignored for patterns can be checked too.
88 # They must be ignored for patterns can be checked too.
89 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
89 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
90 raise
90 raise
91 else:
91 else:
92 if stat.S_ISLNK(st.st_mode):
92 if stat.S_ISLNK(st.st_mode):
93 raise util.Abort(
93 raise util.Abort(
94 _('path %r traverses symbolic link %r')
94 _('path %r traverses symbolic link %r')
95 % (path, prefix))
95 % (path, prefix))
96 elif (stat.S_ISDIR(st.st_mode) and
96 elif (stat.S_ISDIR(st.st_mode) and
97 os.path.isdir(os.path.join(curpath, '.hg'))):
97 os.path.isdir(os.path.join(curpath, '.hg'))):
98 if not self.callback or not self.callback(curpath):
98 if not self.callback or not self.callback(curpath):
99 raise util.Abort(_('path %r is inside nested repo %r') %
99 raise util.Abort(_('path %r is inside nested repo %r') %
100 (path, prefix))
100 (path, prefix))
101 prefixes.append(prefix)
101 prefixes.append(prefix)
102 parts.pop()
102 parts.pop()
103
103
104 self.audited.add(path)
104 self.audited.add(path)
105 # only add prefixes to the cache after checking everything: we don't
105 # only add prefixes to the cache after checking everything: we don't
106 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
106 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
107 self.auditeddir.update(prefixes)
107 self.auditeddir.update(prefixes)
108
108
109 class opener(object):
109 class opener(object):
110 '''Open files relative to a base directory
110 '''Open files relative to a base directory
111
111
112 This class is used to hide the details of COW semantics and
112 This class is used to hide the details of COW semantics and
113 remote file access from higher level code.
113 remote file access from higher level code.
114 '''
114 '''
115 def __init__(self, base, audit=True):
115 def __init__(self, base, audit=True):
116 self.base = base
116 self.base = base
117 if audit:
117 if audit:
118 self.auditor = path_auditor(base)
118 self.auditor = path_auditor(base)
119 else:
119 else:
120 self.auditor = util.always
120 self.auditor = util.always
121 self.createmode = None
121 self.createmode = None
122 self._trustnlink = None
122 self._trustnlink = None
123
123
124 @util.propertycache
124 @util.propertycache
125 def _can_symlink(self):
125 def _can_symlink(self):
126 return util.checklink(self.base)
126 return util.checklink(self.base)
127
127
128 def _fixfilemode(self, name):
128 def _fixfilemode(self, name):
129 if self.createmode is None:
129 if self.createmode is None:
130 return
130 return
131 os.chmod(name, self.createmode & 0666)
131 os.chmod(name, self.createmode & 0666)
132
132
133 def __call__(self, path, mode="r", text=False, atomictemp=False):
133 def __call__(self, path, mode="r", text=False, atomictemp=False):
134 r = util.checkosfilename(path)
134 r = util.checkosfilename(path)
135 if r:
135 if r:
136 raise util.Abort("%s: %r" % (r, path))
136 raise util.Abort("%s: %r" % (r, path))
137 self.auditor(path)
137 self.auditor(path)
138 f = os.path.join(self.base, path)
138 f = os.path.join(self.base, path)
139
139
140 if not text and "b" not in mode:
140 if not text and "b" not in mode:
141 mode += "b" # for that other OS
141 mode += "b" # for that other OS
142
142
143 nlink = -1
143 nlink = -1
144 dirname, basename = os.path.split(f)
144 dirname, basename = os.path.split(f)
145 # If basename is empty, then the path is malformed because it points
145 # If basename is empty, then the path is malformed because it points
146 # to a directory. Let the posixfile() call below raise IOError.
146 # to a directory. Let the posixfile() call below raise IOError.
147 if basename and mode not in ('r', 'rb'):
147 if basename and mode not in ('r', 'rb'):
148 if atomictemp:
148 if atomictemp:
149 if not os.path.isdir(dirname):
149 if not os.path.isdir(dirname):
150 util.makedirs(dirname, self.createmode)
150 util.makedirs(dirname, self.createmode)
151 return util.atomictempfile(f, mode, self.createmode)
151 return util.atomictempfile(f, mode, self.createmode)
152 try:
152 try:
153 if 'w' in mode:
153 if 'w' in mode:
154 util.unlink(f)
154 util.unlink(f)
155 nlink = 0
155 nlink = 0
156 else:
156 else:
157 # nlinks() may behave differently for files on Windows
157 # nlinks() may behave differently for files on Windows
158 # shares if the file is open.
158 # shares if the file is open.
159 fd = util.posixfile(f)
159 fd = util.posixfile(f)
160 nlink = util.nlinks(f)
160 nlink = util.nlinks(f)
161 if nlink < 1:
161 if nlink < 1:
162 nlink = 2 # force mktempcopy (issue1922)
162 nlink = 2 # force mktempcopy (issue1922)
163 fd.close()
163 fd.close()
164 except (OSError, IOError), e:
164 except (OSError, IOError), e:
165 if e.errno != errno.ENOENT:
165 if e.errno != errno.ENOENT:
166 raise
166 raise
167 nlink = 0
167 nlink = 0
168 if not os.path.isdir(dirname):
168 if not os.path.isdir(dirname):
169 util.makedirs(dirname, self.createmode)
169 util.makedirs(dirname, self.createmode)
170 if nlink > 0:
170 if nlink > 0:
171 if self._trustnlink is None:
171 if self._trustnlink is None:
172 self._trustnlink = nlink > 1 or util.checknlink(f)
172 self._trustnlink = nlink > 1 or util.checknlink(f)
173 if nlink > 1 or not self._trustnlink:
173 if nlink > 1 or not self._trustnlink:
174 util.rename(util.mktempcopy(f), f)
174 util.rename(util.mktempcopy(f), f)
175 fp = util.posixfile(f, mode)
175 fp = util.posixfile(f, mode)
176 if nlink == 0:
176 if nlink == 0:
177 self._fixfilemode(f)
177 self._fixfilemode(f)
178 return fp
178 return fp
179
179
180 def symlink(self, src, dst):
180 def symlink(self, src, dst):
181 self.auditor(dst)
181 self.auditor(dst)
182 linkname = os.path.join(self.base, dst)
182 linkname = os.path.join(self.base, dst)
183 try:
183 try:
184 os.unlink(linkname)
184 os.unlink(linkname)
185 except OSError:
185 except OSError:
186 pass
186 pass
187
187
188 dirname = os.path.dirname(linkname)
188 dirname = os.path.dirname(linkname)
189 if not os.path.exists(dirname):
189 if not os.path.exists(dirname):
190 util.makedirs(dirname, self.createmode)
190 util.makedirs(dirname, self.createmode)
191
191
192 if self._can_symlink:
192 if self._can_symlink:
193 try:
193 try:
194 os.symlink(src, linkname)
194 os.symlink(src, linkname)
195 except OSError, err:
195 except OSError, err:
196 raise OSError(err.errno, _('could not symlink to %r: %s') %
196 raise OSError(err.errno, _('could not symlink to %r: %s') %
197 (src, err.strerror), linkname)
197 (src, err.strerror), linkname)
198 else:
198 else:
199 f = self(dst, "w")
199 f = self(dst, "w")
200 f.write(src)
200 f.write(src)
201 f.close()
201 f.close()
202 self._fixfilemode(dst)
202 self._fixfilemode(dst)
203
203
204 def canonpath(root, cwd, myname, auditor=None):
204 def canonpath(root, cwd, myname, auditor=None):
205 '''return the canonical path of myname, given cwd and root'''
205 '''return the canonical path of myname, given cwd and root'''
206 if util.endswithsep(root):
206 if util.endswithsep(root):
207 rootsep = root
207 rootsep = root
208 else:
208 else:
209 rootsep = root + os.sep
209 rootsep = root + os.sep
210 name = myname
210 name = myname
211 if not os.path.isabs(name):
211 if not os.path.isabs(name):
212 name = os.path.join(root, cwd, name)
212 name = os.path.join(root, cwd, name)
213 name = os.path.normpath(name)
213 name = os.path.normpath(name)
214 if auditor is None:
214 if auditor is None:
215 auditor = path_auditor(root)
215 auditor = path_auditor(root)
216 if name != rootsep and name.startswith(rootsep):
216 if name != rootsep and name.startswith(rootsep):
217 name = name[len(rootsep):]
217 name = name[len(rootsep):]
218 auditor(name)
218 auditor(name)
219 return util.pconvert(name)
219 return util.pconvert(name)
220 elif name == root:
220 elif name == root:
221 return ''
221 return ''
222 else:
222 else:
223 # Determine whether `name' is in the hierarchy at or beneath `root',
223 # Determine whether `name' is in the hierarchy at or beneath `root',
224 # by iterating name=dirname(name) until that causes no change (can't
224 # by iterating name=dirname(name) until that causes no change (can't
225 # check name == '/', because that doesn't work on windows). For each
225 # check name == '/', because that doesn't work on windows). For each
226 # `name', compare dev/inode numbers. If they match, the list `rel'
226 # `name', compare dev/inode numbers. If they match, the list `rel'
227 # holds the reversed list of components making up the relative file
227 # holds the reversed list of components making up the relative file
228 # name we want.
228 # name we want.
229 root_st = os.stat(root)
229 root_st = os.stat(root)
230 rel = []
230 rel = []
231 while True:
231 while True:
232 try:
232 try:
233 name_st = os.stat(name)
233 name_st = os.stat(name)
234 except OSError:
234 except OSError:
235 break
235 break
236 if util.samestat(name_st, root_st):
236 if util.samestat(name_st, root_st):
237 if not rel:
237 if not rel:
238 # name was actually the same as root (maybe a symlink)
238 # name was actually the same as root (maybe a symlink)
239 return ''
239 return ''
240 rel.reverse()
240 rel.reverse()
241 name = os.path.join(*rel)
241 name = os.path.join(*rel)
242 auditor(name)
242 auditor(name)
243 return util.pconvert(name)
243 return util.pconvert(name)
244 dirname, basename = os.path.split(name)
244 dirname, basename = os.path.split(name)
245 rel.append(basename)
245 rel.append(basename)
246 if dirname == name:
246 if dirname == name:
247 break
247 break
248 name = dirname
248 name = dirname
249
249
250 raise util.Abort('%s not under root' % myname)
250 raise util.Abort('%s not under root' % myname)
251
251
252 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
252 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
253 '''yield every hg repository under path, recursively.'''
253 '''yield every hg repository under path, recursively.'''
254 def errhandler(err):
254 def errhandler(err):
255 if err.filename == path:
255 if err.filename == path:
256 raise err
256 raise err
257 if followsym and hasattr(os.path, 'samestat'):
257 if followsym and hasattr(os.path, 'samestat'):
258 def _add_dir_if_not_there(dirlst, dirname):
258 def _add_dir_if_not_there(dirlst, dirname):
259 match = False
259 match = False
260 samestat = os.path.samestat
260 samestat = os.path.samestat
261 dirstat = os.stat(dirname)
261 dirstat = os.stat(dirname)
262 for lstdirstat in dirlst:
262 for lstdirstat in dirlst:
263 if samestat(dirstat, lstdirstat):
263 if samestat(dirstat, lstdirstat):
264 match = True
264 match = True
265 break
265 break
266 if not match:
266 if not match:
267 dirlst.append(dirstat)
267 dirlst.append(dirstat)
268 return not match
268 return not match
269 else:
269 else:
270 followsym = False
270 followsym = False
271
271
272 if (seen_dirs is None) and followsym:
272 if (seen_dirs is None) and followsym:
273 seen_dirs = []
273 seen_dirs = []
274 _add_dir_if_not_there(seen_dirs, path)
274 _add_dir_if_not_there(seen_dirs, path)
275 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
275 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
276 dirs.sort()
276 dirs.sort()
277 if '.hg' in dirs:
277 if '.hg' in dirs:
278 yield root # found a repository
278 yield root # found a repository
279 qroot = os.path.join(root, '.hg', 'patches')
279 qroot = os.path.join(root, '.hg', 'patches')
280 if os.path.isdir(os.path.join(qroot, '.hg')):
280 if os.path.isdir(os.path.join(qroot, '.hg')):
281 yield qroot # we have a patch queue repo here
281 yield qroot # we have a patch queue repo here
282 if recurse:
282 if recurse:
283 # avoid recursing inside the .hg directory
283 # avoid recursing inside the .hg directory
284 dirs.remove('.hg')
284 dirs.remove('.hg')
285 else:
285 else:
286 dirs[:] = [] # don't descend further
286 dirs[:] = [] # don't descend further
287 elif followsym:
287 elif followsym:
288 newdirs = []
288 newdirs = []
289 for d in dirs:
289 for d in dirs:
290 fname = os.path.join(root, d)
290 fname = os.path.join(root, d)
291 if _add_dir_if_not_there(seen_dirs, fname):
291 if _add_dir_if_not_there(seen_dirs, fname):
292 if os.path.islink(fname):
292 if os.path.islink(fname):
293 for hgname in walkrepos(fname, True, seen_dirs):
293 for hgname in walkrepos(fname, True, seen_dirs):
294 yield hgname
294 yield hgname
295 else:
295 else:
296 newdirs.append(d)
296 newdirs.append(d)
297 dirs[:] = newdirs
297 dirs[:] = newdirs
298
298
299 def os_rcpath():
300 '''return default os-specific hgrc search path'''
301 path = util.system_rcpath()
302 path.extend(util.user_rcpath())
303 path = [os.path.normpath(f) for f in path]
304 return path
305
299 _rcpath = None
306 _rcpath = None
300
307
301 def rcpath():
308 def rcpath():
302 '''return hgrc search path. if env var HGRCPATH is set, use it.
309 '''return hgrc search path. if env var HGRCPATH is set, use it.
303 for each item in path, if directory, use files ending in .rc,
310 for each item in path, if directory, use files ending in .rc,
304 else use item.
311 else use item.
305 make HGRCPATH empty to only look in .hg/hgrc of current repo.
312 make HGRCPATH empty to only look in .hg/hgrc of current repo.
306 if no HGRCPATH, use default os-specific path.'''
313 if no HGRCPATH, use default os-specific path.'''
307 global _rcpath
314 global _rcpath
308 if _rcpath is None:
315 if _rcpath is None:
309 if 'HGRCPATH' in os.environ:
316 if 'HGRCPATH' in os.environ:
310 _rcpath = []
317 _rcpath = []
311 for p in os.environ['HGRCPATH'].split(os.pathsep):
318 for p in os.environ['HGRCPATH'].split(os.pathsep):
312 if not p:
319 if not p:
313 continue
320 continue
314 p = util.expandpath(p)
321 p = util.expandpath(p)
315 if os.path.isdir(p):
322 if os.path.isdir(p):
316 for f, kind in osutil.listdir(p):
323 for f, kind in osutil.listdir(p):
317 if f.endswith('.rc'):
324 if f.endswith('.rc'):
318 _rcpath.append(os.path.join(p, f))
325 _rcpath.append(os.path.join(p, f))
319 else:
326 else:
320 _rcpath.append(p)
327 _rcpath.append(p)
321 else:
328 else:
322 _rcpath = util.os_rcpath()
329 _rcpath = os_rcpath()
323 return _rcpath
330 return _rcpath
@@ -1,1290 +1,1283 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, calendar, textwrap, unicodedata, signal
19 import os, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except:
209 except:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except:
214 except:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 _hgexecutable = None
298 _hgexecutable = None
299
299
300 def main_is_frozen():
300 def main_is_frozen():
301 """return True if we are a frozen executable.
301 """return True if we are a frozen executable.
302
302
303 The code supports py2exe (most common, Windows only) and tools/freeze
303 The code supports py2exe (most common, Windows only) and tools/freeze
304 (portable, not much used).
304 (portable, not much used).
305 """
305 """
306 return (hasattr(sys, "frozen") or # new py2exe
306 return (hasattr(sys, "frozen") or # new py2exe
307 hasattr(sys, "importers") or # old py2exe
307 hasattr(sys, "importers") or # old py2exe
308 imp.is_frozen("__main__")) # tools/freeze
308 imp.is_frozen("__main__")) # tools/freeze
309
309
310 def hgexecutable():
310 def hgexecutable():
311 """return location of the 'hg' executable.
311 """return location of the 'hg' executable.
312
312
313 Defaults to $HG or 'hg' in the search path.
313 Defaults to $HG or 'hg' in the search path.
314 """
314 """
315 if _hgexecutable is None:
315 if _hgexecutable is None:
316 hg = os.environ.get('HG')
316 hg = os.environ.get('HG')
317 if hg:
317 if hg:
318 set_hgexecutable(hg)
318 set_hgexecutable(hg)
319 elif main_is_frozen():
319 elif main_is_frozen():
320 set_hgexecutable(sys.executable)
320 set_hgexecutable(sys.executable)
321 else:
321 else:
322 exe = find_exe('hg') or os.path.basename(sys.argv[0])
322 exe = find_exe('hg') or os.path.basename(sys.argv[0])
323 set_hgexecutable(exe)
323 set_hgexecutable(exe)
324 return _hgexecutable
324 return _hgexecutable
325
325
326 def set_hgexecutable(path):
326 def set_hgexecutable(path):
327 """set location of the 'hg' executable"""
327 """set location of the 'hg' executable"""
328 global _hgexecutable
328 global _hgexecutable
329 _hgexecutable = path
329 _hgexecutable = path
330
330
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
332 '''enhanced shell command execution.
332 '''enhanced shell command execution.
333 run with environment maybe modified, maybe in different dir.
333 run with environment maybe modified, maybe in different dir.
334
334
335 if command fails and onerr is None, return status. if ui object,
335 if command fails and onerr is None, return status. if ui object,
336 print error message and return status, else raise onerr object as
336 print error message and return status, else raise onerr object as
337 exception.
337 exception.
338
338
339 if out is specified, it is assumed to be a file-like object that has a
339 if out is specified, it is assumed to be a file-like object that has a
340 write() method. stdout and stderr will be redirected to out.'''
340 write() method. stdout and stderr will be redirected to out.'''
341 try:
341 try:
342 sys.stdout.flush()
342 sys.stdout.flush()
343 except Exception:
343 except Exception:
344 pass
344 pass
345 def py2shell(val):
345 def py2shell(val):
346 'convert python object into string that is useful to shell'
346 'convert python object into string that is useful to shell'
347 if val is None or val is False:
347 if val is None or val is False:
348 return '0'
348 return '0'
349 if val is True:
349 if val is True:
350 return '1'
350 return '1'
351 return str(val)
351 return str(val)
352 origcmd = cmd
352 origcmd = cmd
353 cmd = quotecommand(cmd)
353 cmd = quotecommand(cmd)
354 env = dict(os.environ)
354 env = dict(os.environ)
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
356 env['HG'] = hgexecutable()
356 env['HG'] = hgexecutable()
357 if out is None:
357 if out is None:
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
359 env=env, cwd=cwd)
359 env=env, cwd=cwd)
360 else:
360 else:
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
363 stderr=subprocess.STDOUT)
363 stderr=subprocess.STDOUT)
364 for line in proc.stdout:
364 for line in proc.stdout:
365 out.write(line)
365 out.write(line)
366 proc.wait()
366 proc.wait()
367 rc = proc.returncode
367 rc = proc.returncode
368 if sys.platform == 'OpenVMS' and rc & 1:
368 if sys.platform == 'OpenVMS' and rc & 1:
369 rc = 0
369 rc = 0
370 if rc and onerr:
370 if rc and onerr:
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
372 explain_exit(rc)[0])
372 explain_exit(rc)[0])
373 if errprefix:
373 if errprefix:
374 errmsg = '%s: %s' % (errprefix, errmsg)
374 errmsg = '%s: %s' % (errprefix, errmsg)
375 try:
375 try:
376 onerr.warn(errmsg + '\n')
376 onerr.warn(errmsg + '\n')
377 except AttributeError:
377 except AttributeError:
378 raise onerr(errmsg)
378 raise onerr(errmsg)
379 return rc
379 return rc
380
380
381 def checksignature(func):
381 def checksignature(func):
382 '''wrap a function with code to check for calling errors'''
382 '''wrap a function with code to check for calling errors'''
383 def check(*args, **kwargs):
383 def check(*args, **kwargs):
384 try:
384 try:
385 return func(*args, **kwargs)
385 return func(*args, **kwargs)
386 except TypeError:
386 except TypeError:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
388 raise error.SignatureError
388 raise error.SignatureError
389 raise
389 raise
390
390
391 return check
391 return check
392
392
393 def makedir(path, notindexed):
393 def makedir(path, notindexed):
394 os.mkdir(path)
394 os.mkdir(path)
395
395
396 def unlinkpath(f):
396 def unlinkpath(f):
397 """unlink and remove the directory if it is empty"""
397 """unlink and remove the directory if it is empty"""
398 os.unlink(f)
398 os.unlink(f)
399 # try removing directories that might now be empty
399 # try removing directories that might now be empty
400 try:
400 try:
401 os.removedirs(os.path.dirname(f))
401 os.removedirs(os.path.dirname(f))
402 except OSError:
402 except OSError:
403 pass
403 pass
404
404
405 def copyfile(src, dest):
405 def copyfile(src, dest):
406 "copy a file, preserving mode and atime/mtime"
406 "copy a file, preserving mode and atime/mtime"
407 if os.path.islink(src):
407 if os.path.islink(src):
408 try:
408 try:
409 os.unlink(dest)
409 os.unlink(dest)
410 except:
410 except:
411 pass
411 pass
412 os.symlink(os.readlink(src), dest)
412 os.symlink(os.readlink(src), dest)
413 else:
413 else:
414 try:
414 try:
415 shutil.copyfile(src, dest)
415 shutil.copyfile(src, dest)
416 shutil.copymode(src, dest)
416 shutil.copymode(src, dest)
417 except shutil.Error, inst:
417 except shutil.Error, inst:
418 raise Abort(str(inst))
418 raise Abort(str(inst))
419
419
420 def copyfiles(src, dst, hardlink=None):
420 def copyfiles(src, dst, hardlink=None):
421 """Copy a directory tree using hardlinks if possible"""
421 """Copy a directory tree using hardlinks if possible"""
422
422
423 if hardlink is None:
423 if hardlink is None:
424 hardlink = (os.stat(src).st_dev ==
424 hardlink = (os.stat(src).st_dev ==
425 os.stat(os.path.dirname(dst)).st_dev)
425 os.stat(os.path.dirname(dst)).st_dev)
426
426
427 num = 0
427 num = 0
428 if os.path.isdir(src):
428 if os.path.isdir(src):
429 os.mkdir(dst)
429 os.mkdir(dst)
430 for name, kind in osutil.listdir(src):
430 for name, kind in osutil.listdir(src):
431 srcname = os.path.join(src, name)
431 srcname = os.path.join(src, name)
432 dstname = os.path.join(dst, name)
432 dstname = os.path.join(dst, name)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
434 num += n
434 num += n
435 else:
435 else:
436 if hardlink:
436 if hardlink:
437 try:
437 try:
438 os_link(src, dst)
438 os_link(src, dst)
439 except (IOError, OSError):
439 except (IOError, OSError):
440 hardlink = False
440 hardlink = False
441 shutil.copy(src, dst)
441 shutil.copy(src, dst)
442 else:
442 else:
443 shutil.copy(src, dst)
443 shutil.copy(src, dst)
444 num += 1
444 num += 1
445
445
446 return hardlink, num
446 return hardlink, num
447
447
448 _windows_reserved_filenames = '''con prn aux nul
448 _windows_reserved_filenames = '''con prn aux nul
449 com1 com2 com3 com4 com5 com6 com7 com8 com9
449 com1 com2 com3 com4 com5 com6 com7 com8 com9
450 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
450 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
451 _windows_reserved_chars = ':*?"<>|'
451 _windows_reserved_chars = ':*?"<>|'
452 def checkwinfilename(path):
452 def checkwinfilename(path):
453 '''Check that the base-relative path is a valid filename on Windows.
453 '''Check that the base-relative path is a valid filename on Windows.
454 Returns None if the path is ok, or a UI string describing the problem.
454 Returns None if the path is ok, or a UI string describing the problem.
455
455
456 >>> checkwinfilename("just/a/normal/path")
456 >>> checkwinfilename("just/a/normal/path")
457 >>> checkwinfilename("foo/bar/con.xml")
457 >>> checkwinfilename("foo/bar/con.xml")
458 "filename contains 'con', which is reserved on Windows"
458 "filename contains 'con', which is reserved on Windows"
459 >>> checkwinfilename("foo/con.xml/bar")
459 >>> checkwinfilename("foo/con.xml/bar")
460 "filename contains 'con', which is reserved on Windows"
460 "filename contains 'con', which is reserved on Windows"
461 >>> checkwinfilename("foo/bar/xml.con")
461 >>> checkwinfilename("foo/bar/xml.con")
462 >>> checkwinfilename("foo/bar/AUX/bla.txt")
462 >>> checkwinfilename("foo/bar/AUX/bla.txt")
463 "filename contains 'AUX', which is reserved on Windows"
463 "filename contains 'AUX', which is reserved on Windows"
464 >>> checkwinfilename("foo/bar/bla:.txt")
464 >>> checkwinfilename("foo/bar/bla:.txt")
465 "filename contains ':', which is reserved on Windows"
465 "filename contains ':', which is reserved on Windows"
466 >>> checkwinfilename("foo/bar/b\07la.txt")
466 >>> checkwinfilename("foo/bar/b\07la.txt")
467 "filename contains '\\\\x07', which is invalid on Windows"
467 "filename contains '\\\\x07', which is invalid on Windows"
468 >>> checkwinfilename("foo/bar/bla ")
468 >>> checkwinfilename("foo/bar/bla ")
469 "filename ends with ' ', which is not allowed on Windows"
469 "filename ends with ' ', which is not allowed on Windows"
470 '''
470 '''
471 for n in path.replace('\\', '/').split('/'):
471 for n in path.replace('\\', '/').split('/'):
472 if not n:
472 if not n:
473 continue
473 continue
474 for c in n:
474 for c in n:
475 if c in _windows_reserved_chars:
475 if c in _windows_reserved_chars:
476 return _("filename contains '%s', which is reserved "
476 return _("filename contains '%s', which is reserved "
477 "on Windows") % c
477 "on Windows") % c
478 if ord(c) <= 31:
478 if ord(c) <= 31:
479 return _("filename contains %r, which is invalid "
479 return _("filename contains %r, which is invalid "
480 "on Windows") % c
480 "on Windows") % c
481 base = n.split('.')[0]
481 base = n.split('.')[0]
482 if base and base.lower() in _windows_reserved_filenames:
482 if base and base.lower() in _windows_reserved_filenames:
483 return _("filename contains '%s', which is reserved "
483 return _("filename contains '%s', which is reserved "
484 "on Windows") % base
484 "on Windows") % base
485 t = n[-1]
485 t = n[-1]
486 if t in '. ':
486 if t in '. ':
487 return _("filename ends with '%s', which is not allowed "
487 return _("filename ends with '%s', which is not allowed "
488 "on Windows") % t
488 "on Windows") % t
489
489
490 def lookup_reg(key, name=None, scope=None):
490 def lookup_reg(key, name=None, scope=None):
491 return None
491 return None
492
492
493 def hidewindow():
493 def hidewindow():
494 """Hide current shell window.
494 """Hide current shell window.
495
495
496 Used to hide the window opened when starting asynchronous
496 Used to hide the window opened when starting asynchronous
497 child process under Windows, unneeded on other systems.
497 child process under Windows, unneeded on other systems.
498 """
498 """
499 pass
499 pass
500
500
501 if os.name == 'nt':
501 if os.name == 'nt':
502 checkosfilename = checkwinfilename
502 checkosfilename = checkwinfilename
503 from windows import *
503 from windows import *
504 else:
504 else:
505 from posix import *
505 from posix import *
506
506
507 def makelock(info, pathname):
507 def makelock(info, pathname):
508 try:
508 try:
509 return os.symlink(info, pathname)
509 return os.symlink(info, pathname)
510 except OSError, why:
510 except OSError, why:
511 if why.errno == errno.EEXIST:
511 if why.errno == errno.EEXIST:
512 raise
512 raise
513 except AttributeError: # no symlink in os
513 except AttributeError: # no symlink in os
514 pass
514 pass
515
515
516 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
516 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
517 os.write(ld, info)
517 os.write(ld, info)
518 os.close(ld)
518 os.close(ld)
519
519
520 def readlock(pathname):
520 def readlock(pathname):
521 try:
521 try:
522 return os.readlink(pathname)
522 return os.readlink(pathname)
523 except OSError, why:
523 except OSError, why:
524 if why.errno not in (errno.EINVAL, errno.ENOSYS):
524 if why.errno not in (errno.EINVAL, errno.ENOSYS):
525 raise
525 raise
526 except AttributeError: # no symlink in os
526 except AttributeError: # no symlink in os
527 pass
527 pass
528 fp = posixfile(pathname)
528 fp = posixfile(pathname)
529 r = fp.read()
529 r = fp.read()
530 fp.close()
530 fp.close()
531 return r
531 return r
532
532
533 def fstat(fp):
533 def fstat(fp):
534 '''stat file object that may not have fileno method.'''
534 '''stat file object that may not have fileno method.'''
535 try:
535 try:
536 return os.fstat(fp.fileno())
536 return os.fstat(fp.fileno())
537 except AttributeError:
537 except AttributeError:
538 return os.stat(fp.name)
538 return os.stat(fp.name)
539
539
540 # File system features
540 # File system features
541
541
542 def checkcase(path):
542 def checkcase(path):
543 """
543 """
544 Check whether the given path is on a case-sensitive filesystem
544 Check whether the given path is on a case-sensitive filesystem
545
545
546 Requires a path (like /foo/.hg) ending with a foldable final
546 Requires a path (like /foo/.hg) ending with a foldable final
547 directory component.
547 directory component.
548 """
548 """
549 s1 = os.stat(path)
549 s1 = os.stat(path)
550 d, b = os.path.split(path)
550 d, b = os.path.split(path)
551 p2 = os.path.join(d, b.upper())
551 p2 = os.path.join(d, b.upper())
552 if path == p2:
552 if path == p2:
553 p2 = os.path.join(d, b.lower())
553 p2 = os.path.join(d, b.lower())
554 try:
554 try:
555 s2 = os.stat(p2)
555 s2 = os.stat(p2)
556 if s2 == s1:
556 if s2 == s1:
557 return False
557 return False
558 return True
558 return True
559 except:
559 except:
560 return True
560 return True
561
561
562 _fspathcache = {}
562 _fspathcache = {}
563 def fspath(name, root):
563 def fspath(name, root):
564 '''Get name in the case stored in the filesystem
564 '''Get name in the case stored in the filesystem
565
565
566 The name is either relative to root, or it is an absolute path starting
566 The name is either relative to root, or it is an absolute path starting
567 with root. Note that this function is unnecessary, and should not be
567 with root. Note that this function is unnecessary, and should not be
568 called, for case-sensitive filesystems (simply because it's expensive).
568 called, for case-sensitive filesystems (simply because it's expensive).
569 '''
569 '''
570 # If name is absolute, make it relative
570 # If name is absolute, make it relative
571 if name.lower().startswith(root.lower()):
571 if name.lower().startswith(root.lower()):
572 l = len(root)
572 l = len(root)
573 if name[l] == os.sep or name[l] == os.altsep:
573 if name[l] == os.sep or name[l] == os.altsep:
574 l = l + 1
574 l = l + 1
575 name = name[l:]
575 name = name[l:]
576
576
577 if not os.path.lexists(os.path.join(root, name)):
577 if not os.path.lexists(os.path.join(root, name)):
578 return None
578 return None
579
579
580 seps = os.sep
580 seps = os.sep
581 if os.altsep:
581 if os.altsep:
582 seps = seps + os.altsep
582 seps = seps + os.altsep
583 # Protect backslashes. This gets silly very quickly.
583 # Protect backslashes. This gets silly very quickly.
584 seps.replace('\\','\\\\')
584 seps.replace('\\','\\\\')
585 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
585 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
586 dir = os.path.normcase(os.path.normpath(root))
586 dir = os.path.normcase(os.path.normpath(root))
587 result = []
587 result = []
588 for part, sep in pattern.findall(name):
588 for part, sep in pattern.findall(name):
589 if sep:
589 if sep:
590 result.append(sep)
590 result.append(sep)
591 continue
591 continue
592
592
593 if dir not in _fspathcache:
593 if dir not in _fspathcache:
594 _fspathcache[dir] = os.listdir(dir)
594 _fspathcache[dir] = os.listdir(dir)
595 contents = _fspathcache[dir]
595 contents = _fspathcache[dir]
596
596
597 lpart = part.lower()
597 lpart = part.lower()
598 lenp = len(part)
598 lenp = len(part)
599 for n in contents:
599 for n in contents:
600 if lenp == len(n) and n.lower() == lpart:
600 if lenp == len(n) and n.lower() == lpart:
601 result.append(n)
601 result.append(n)
602 break
602 break
603 else:
603 else:
604 # Cannot happen, as the file exists!
604 # Cannot happen, as the file exists!
605 result.append(part)
605 result.append(part)
606 dir = os.path.join(dir, lpart)
606 dir = os.path.join(dir, lpart)
607
607
608 return ''.join(result)
608 return ''.join(result)
609
609
610 def checknlink(testfile):
610 def checknlink(testfile):
611 '''check whether hardlink count reporting works properly'''
611 '''check whether hardlink count reporting works properly'''
612
612
613 # testfile may be open, so we need a separate file for checking to
613 # testfile may be open, so we need a separate file for checking to
614 # work around issue2543 (or testfile may get lost on Samba shares)
614 # work around issue2543 (or testfile may get lost on Samba shares)
615 f1 = testfile + ".hgtmp1"
615 f1 = testfile + ".hgtmp1"
616 if os.path.lexists(f1):
616 if os.path.lexists(f1):
617 return False
617 return False
618 try:
618 try:
619 posixfile(f1, 'w').close()
619 posixfile(f1, 'w').close()
620 except IOError:
620 except IOError:
621 return False
621 return False
622
622
623 f2 = testfile + ".hgtmp2"
623 f2 = testfile + ".hgtmp2"
624 fd = None
624 fd = None
625 try:
625 try:
626 try:
626 try:
627 os_link(f1, f2)
627 os_link(f1, f2)
628 except OSError:
628 except OSError:
629 return False
629 return False
630
630
631 # nlinks() may behave differently for files on Windows shares if
631 # nlinks() may behave differently for files on Windows shares if
632 # the file is open.
632 # the file is open.
633 fd = posixfile(f2)
633 fd = posixfile(f2)
634 return nlinks(f2) > 1
634 return nlinks(f2) > 1
635 finally:
635 finally:
636 if fd is not None:
636 if fd is not None:
637 fd.close()
637 fd.close()
638 for f in (f1, f2):
638 for f in (f1, f2):
639 try:
639 try:
640 os.unlink(f)
640 os.unlink(f)
641 except OSError:
641 except OSError:
642 pass
642 pass
643
643
644 return False
644 return False
645
645
646 def endswithsep(path):
646 def endswithsep(path):
647 '''Check path ends with os.sep or os.altsep.'''
647 '''Check path ends with os.sep or os.altsep.'''
648 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
648 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
649
649
650 def splitpath(path):
650 def splitpath(path):
651 '''Split path by os.sep.
651 '''Split path by os.sep.
652 Note that this function does not use os.altsep because this is
652 Note that this function does not use os.altsep because this is
653 an alternative of simple "xxx.split(os.sep)".
653 an alternative of simple "xxx.split(os.sep)".
654 It is recommended to use os.path.normpath() before using this
654 It is recommended to use os.path.normpath() before using this
655 function if need.'''
655 function if need.'''
656 return path.split(os.sep)
656 return path.split(os.sep)
657
657
658 def gui():
658 def gui():
659 '''Are we running in a GUI?'''
659 '''Are we running in a GUI?'''
660 if sys.platform == 'darwin':
660 if sys.platform == 'darwin':
661 if 'SSH_CONNECTION' in os.environ:
661 if 'SSH_CONNECTION' in os.environ:
662 # handle SSH access to a box where the user is logged in
662 # handle SSH access to a box where the user is logged in
663 return False
663 return False
664 elif getattr(osutil, 'isgui', None):
664 elif getattr(osutil, 'isgui', None):
665 # check if a CoreGraphics session is available
665 # check if a CoreGraphics session is available
666 return osutil.isgui()
666 return osutil.isgui()
667 else:
667 else:
668 # pure build; use a safe default
668 # pure build; use a safe default
669 return True
669 return True
670 else:
670 else:
671 return os.name == "nt" or os.environ.get("DISPLAY")
671 return os.name == "nt" or os.environ.get("DISPLAY")
672
672
673 def mktempcopy(name, emptyok=False, createmode=None):
673 def mktempcopy(name, emptyok=False, createmode=None):
674 """Create a temporary file with the same contents from name
674 """Create a temporary file with the same contents from name
675
675
676 The permission bits are copied from the original file.
676 The permission bits are copied from the original file.
677
677
678 If the temporary file is going to be truncated immediately, you
678 If the temporary file is going to be truncated immediately, you
679 can use emptyok=True as an optimization.
679 can use emptyok=True as an optimization.
680
680
681 Returns the name of the temporary file.
681 Returns the name of the temporary file.
682 """
682 """
683 d, fn = os.path.split(name)
683 d, fn = os.path.split(name)
684 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
684 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
685 os.close(fd)
685 os.close(fd)
686 # Temporary files are created with mode 0600, which is usually not
686 # Temporary files are created with mode 0600, which is usually not
687 # what we want. If the original file already exists, just copy
687 # what we want. If the original file already exists, just copy
688 # its mode. Otherwise, manually obey umask.
688 # its mode. Otherwise, manually obey umask.
689 try:
689 try:
690 st_mode = os.lstat(name).st_mode & 0777
690 st_mode = os.lstat(name).st_mode & 0777
691 except OSError, inst:
691 except OSError, inst:
692 if inst.errno != errno.ENOENT:
692 if inst.errno != errno.ENOENT:
693 raise
693 raise
694 st_mode = createmode
694 st_mode = createmode
695 if st_mode is None:
695 if st_mode is None:
696 st_mode = ~umask
696 st_mode = ~umask
697 st_mode &= 0666
697 st_mode &= 0666
698 os.chmod(temp, st_mode)
698 os.chmod(temp, st_mode)
699 if emptyok:
699 if emptyok:
700 return temp
700 return temp
701 try:
701 try:
702 try:
702 try:
703 ifp = posixfile(name, "rb")
703 ifp = posixfile(name, "rb")
704 except IOError, inst:
704 except IOError, inst:
705 if inst.errno == errno.ENOENT:
705 if inst.errno == errno.ENOENT:
706 return temp
706 return temp
707 if not getattr(inst, 'filename', None):
707 if not getattr(inst, 'filename', None):
708 inst.filename = name
708 inst.filename = name
709 raise
709 raise
710 ofp = posixfile(temp, "wb")
710 ofp = posixfile(temp, "wb")
711 for chunk in filechunkiter(ifp):
711 for chunk in filechunkiter(ifp):
712 ofp.write(chunk)
712 ofp.write(chunk)
713 ifp.close()
713 ifp.close()
714 ofp.close()
714 ofp.close()
715 except:
715 except:
716 try: os.unlink(temp)
716 try: os.unlink(temp)
717 except: pass
717 except: pass
718 raise
718 raise
719 return temp
719 return temp
720
720
721 class atomictempfile(object):
721 class atomictempfile(object):
722 """file-like object that atomically updates a file
722 """file-like object that atomically updates a file
723
723
724 All writes will be redirected to a temporary copy of the original
724 All writes will be redirected to a temporary copy of the original
725 file. When rename is called, the copy is renamed to the original
725 file. When rename is called, the copy is renamed to the original
726 name, making the changes visible.
726 name, making the changes visible.
727 """
727 """
728 def __init__(self, name, mode='w+b', createmode=None):
728 def __init__(self, name, mode='w+b', createmode=None):
729 self.__name = name
729 self.__name = name
730 self._fp = None
730 self._fp = None
731 self.temp = mktempcopy(name, emptyok=('w' in mode),
731 self.temp = mktempcopy(name, emptyok=('w' in mode),
732 createmode=createmode)
732 createmode=createmode)
733 self._fp = posixfile(self.temp, mode)
733 self._fp = posixfile(self.temp, mode)
734
734
735 def __getattr__(self, name):
735 def __getattr__(self, name):
736 return getattr(self._fp, name)
736 return getattr(self._fp, name)
737
737
738 def rename(self):
738 def rename(self):
739 if not self._fp.closed:
739 if not self._fp.closed:
740 self._fp.close()
740 self._fp.close()
741 rename(self.temp, localpath(self.__name))
741 rename(self.temp, localpath(self.__name))
742
742
743 def close(self):
743 def close(self):
744 if not self._fp:
744 if not self._fp:
745 return
745 return
746 if not self._fp.closed:
746 if not self._fp.closed:
747 try:
747 try:
748 os.unlink(self.temp)
748 os.unlink(self.temp)
749 except: pass
749 except: pass
750 self._fp.close()
750 self._fp.close()
751
751
752 def __del__(self):
752 def __del__(self):
753 self.close()
753 self.close()
754
754
755 def makedirs(name, mode=None):
755 def makedirs(name, mode=None):
756 """recursive directory creation with parent mode inheritance"""
756 """recursive directory creation with parent mode inheritance"""
757 parent = os.path.abspath(os.path.dirname(name))
757 parent = os.path.abspath(os.path.dirname(name))
758 try:
758 try:
759 os.mkdir(name)
759 os.mkdir(name)
760 if mode is not None:
760 if mode is not None:
761 os.chmod(name, mode)
761 os.chmod(name, mode)
762 return
762 return
763 except OSError, err:
763 except OSError, err:
764 if err.errno == errno.EEXIST:
764 if err.errno == errno.EEXIST:
765 return
765 return
766 if not name or parent == name or err.errno != errno.ENOENT:
766 if not name or parent == name or err.errno != errno.ENOENT:
767 raise
767 raise
768 makedirs(parent, mode)
768 makedirs(parent, mode)
769 makedirs(name, mode)
769 makedirs(name, mode)
770
770
771 class chunkbuffer(object):
771 class chunkbuffer(object):
772 """Allow arbitrary sized chunks of data to be efficiently read from an
772 """Allow arbitrary sized chunks of data to be efficiently read from an
773 iterator over chunks of arbitrary size."""
773 iterator over chunks of arbitrary size."""
774
774
775 def __init__(self, in_iter):
775 def __init__(self, in_iter):
776 """in_iter is the iterator that's iterating over the input chunks.
776 """in_iter is the iterator that's iterating over the input chunks.
777 targetsize is how big a buffer to try to maintain."""
777 targetsize is how big a buffer to try to maintain."""
778 def splitbig(chunks):
778 def splitbig(chunks):
779 for chunk in chunks:
779 for chunk in chunks:
780 if len(chunk) > 2**20:
780 if len(chunk) > 2**20:
781 pos = 0
781 pos = 0
782 while pos < len(chunk):
782 while pos < len(chunk):
783 end = pos + 2 ** 18
783 end = pos + 2 ** 18
784 yield chunk[pos:end]
784 yield chunk[pos:end]
785 pos = end
785 pos = end
786 else:
786 else:
787 yield chunk
787 yield chunk
788 self.iter = splitbig(in_iter)
788 self.iter = splitbig(in_iter)
789 self._queue = []
789 self._queue = []
790
790
791 def read(self, l):
791 def read(self, l):
792 """Read L bytes of data from the iterator of chunks of data.
792 """Read L bytes of data from the iterator of chunks of data.
793 Returns less than L bytes if the iterator runs dry."""
793 Returns less than L bytes if the iterator runs dry."""
794 left = l
794 left = l
795 buf = ''
795 buf = ''
796 queue = self._queue
796 queue = self._queue
797 while left > 0:
797 while left > 0:
798 # refill the queue
798 # refill the queue
799 if not queue:
799 if not queue:
800 target = 2**18
800 target = 2**18
801 for chunk in self.iter:
801 for chunk in self.iter:
802 queue.append(chunk)
802 queue.append(chunk)
803 target -= len(chunk)
803 target -= len(chunk)
804 if target <= 0:
804 if target <= 0:
805 break
805 break
806 if not queue:
806 if not queue:
807 break
807 break
808
808
809 chunk = queue.pop(0)
809 chunk = queue.pop(0)
810 left -= len(chunk)
810 left -= len(chunk)
811 if left < 0:
811 if left < 0:
812 queue.insert(0, chunk[left:])
812 queue.insert(0, chunk[left:])
813 buf += chunk[:left]
813 buf += chunk[:left]
814 else:
814 else:
815 buf += chunk
815 buf += chunk
816
816
817 return buf
817 return buf
818
818
819 def filechunkiter(f, size=65536, limit=None):
819 def filechunkiter(f, size=65536, limit=None):
820 """Create a generator that produces the data in the file size
820 """Create a generator that produces the data in the file size
821 (default 65536) bytes at a time, up to optional limit (default is
821 (default 65536) bytes at a time, up to optional limit (default is
822 to read all data). Chunks may be less than size bytes if the
822 to read all data). Chunks may be less than size bytes if the
823 chunk is the last chunk in the file, or the file is a socket or
823 chunk is the last chunk in the file, or the file is a socket or
824 some other type of file that sometimes reads less data than is
824 some other type of file that sometimes reads less data than is
825 requested."""
825 requested."""
826 assert size >= 0
826 assert size >= 0
827 assert limit is None or limit >= 0
827 assert limit is None or limit >= 0
828 while True:
828 while True:
829 if limit is None:
829 if limit is None:
830 nbytes = size
830 nbytes = size
831 else:
831 else:
832 nbytes = min(limit, size)
832 nbytes = min(limit, size)
833 s = nbytes and f.read(nbytes)
833 s = nbytes and f.read(nbytes)
834 if not s:
834 if not s:
835 break
835 break
836 if limit:
836 if limit:
837 limit -= len(s)
837 limit -= len(s)
838 yield s
838 yield s
839
839
840 def makedate():
840 def makedate():
841 lt = time.localtime()
841 lt = time.localtime()
842 if lt[8] == 1 and time.daylight:
842 if lt[8] == 1 and time.daylight:
843 tz = time.altzone
843 tz = time.altzone
844 else:
844 else:
845 tz = time.timezone
845 tz = time.timezone
846 t = time.mktime(lt)
846 t = time.mktime(lt)
847 if t < 0:
847 if t < 0:
848 hint = _("check your clock")
848 hint = _("check your clock")
849 raise Abort(_("negative timestamp: %d") % t, hint=hint)
849 raise Abort(_("negative timestamp: %d") % t, hint=hint)
850 return t, tz
850 return t, tz
851
851
852 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
852 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
853 """represent a (unixtime, offset) tuple as a localized time.
853 """represent a (unixtime, offset) tuple as a localized time.
854 unixtime is seconds since the epoch, and offset is the time zone's
854 unixtime is seconds since the epoch, and offset is the time zone's
855 number of seconds away from UTC. if timezone is false, do not
855 number of seconds away from UTC. if timezone is false, do not
856 append time zone to string."""
856 append time zone to string."""
857 t, tz = date or makedate()
857 t, tz = date or makedate()
858 if t < 0:
858 if t < 0:
859 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
859 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
860 tz = 0
860 tz = 0
861 if "%1" in format or "%2" in format:
861 if "%1" in format or "%2" in format:
862 sign = (tz > 0) and "-" or "+"
862 sign = (tz > 0) and "-" or "+"
863 minutes = abs(tz) // 60
863 minutes = abs(tz) // 60
864 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
864 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
865 format = format.replace("%2", "%02d" % (minutes % 60))
865 format = format.replace("%2", "%02d" % (minutes % 60))
866 s = time.strftime(format, time.gmtime(float(t) - tz))
866 s = time.strftime(format, time.gmtime(float(t) - tz))
867 return s
867 return s
868
868
869 def shortdate(date=None):
869 def shortdate(date=None):
870 """turn (timestamp, tzoff) tuple into iso 8631 date."""
870 """turn (timestamp, tzoff) tuple into iso 8631 date."""
871 return datestr(date, format='%Y-%m-%d')
871 return datestr(date, format='%Y-%m-%d')
872
872
873 def strdate(string, format, defaults=[]):
873 def strdate(string, format, defaults=[]):
874 """parse a localized time string and return a (unixtime, offset) tuple.
874 """parse a localized time string and return a (unixtime, offset) tuple.
875 if the string cannot be parsed, ValueError is raised."""
875 if the string cannot be parsed, ValueError is raised."""
876 def timezone(string):
876 def timezone(string):
877 tz = string.split()[-1]
877 tz = string.split()[-1]
878 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
878 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
879 sign = (tz[0] == "+") and 1 or -1
879 sign = (tz[0] == "+") and 1 or -1
880 hours = int(tz[1:3])
880 hours = int(tz[1:3])
881 minutes = int(tz[3:5])
881 minutes = int(tz[3:5])
882 return -sign * (hours * 60 + minutes) * 60
882 return -sign * (hours * 60 + minutes) * 60
883 if tz == "GMT" or tz == "UTC":
883 if tz == "GMT" or tz == "UTC":
884 return 0
884 return 0
885 return None
885 return None
886
886
887 # NOTE: unixtime = localunixtime + offset
887 # NOTE: unixtime = localunixtime + offset
888 offset, date = timezone(string), string
888 offset, date = timezone(string), string
889 if offset is not None:
889 if offset is not None:
890 date = " ".join(string.split()[:-1])
890 date = " ".join(string.split()[:-1])
891
891
892 # add missing elements from defaults
892 # add missing elements from defaults
893 usenow = False # default to using biased defaults
893 usenow = False # default to using biased defaults
894 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
894 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
895 found = [True for p in part if ("%"+p) in format]
895 found = [True for p in part if ("%"+p) in format]
896 if not found:
896 if not found:
897 date += "@" + defaults[part][usenow]
897 date += "@" + defaults[part][usenow]
898 format += "@%" + part[0]
898 format += "@%" + part[0]
899 else:
899 else:
900 # We've found a specific time element, less specific time
900 # We've found a specific time element, less specific time
901 # elements are relative to today
901 # elements are relative to today
902 usenow = True
902 usenow = True
903
903
904 timetuple = time.strptime(date, format)
904 timetuple = time.strptime(date, format)
905 localunixtime = int(calendar.timegm(timetuple))
905 localunixtime = int(calendar.timegm(timetuple))
906 if offset is None:
906 if offset is None:
907 # local timezone
907 # local timezone
908 unixtime = int(time.mktime(timetuple))
908 unixtime = int(time.mktime(timetuple))
909 offset = unixtime - localunixtime
909 offset = unixtime - localunixtime
910 else:
910 else:
911 unixtime = localunixtime + offset
911 unixtime = localunixtime + offset
912 return unixtime, offset
912 return unixtime, offset
913
913
914 def parsedate(date, formats=None, bias={}):
914 def parsedate(date, formats=None, bias={}):
915 """parse a localized date/time and return a (unixtime, offset) tuple.
915 """parse a localized date/time and return a (unixtime, offset) tuple.
916
916
917 The date may be a "unixtime offset" string or in one of the specified
917 The date may be a "unixtime offset" string or in one of the specified
918 formats. If the date already is a (unixtime, offset) tuple, it is returned.
918 formats. If the date already is a (unixtime, offset) tuple, it is returned.
919 """
919 """
920 if not date:
920 if not date:
921 return 0, 0
921 return 0, 0
922 if isinstance(date, tuple) and len(date) == 2:
922 if isinstance(date, tuple) and len(date) == 2:
923 return date
923 return date
924 if not formats:
924 if not formats:
925 formats = defaultdateformats
925 formats = defaultdateformats
926 date = date.strip()
926 date = date.strip()
927 try:
927 try:
928 when, offset = map(int, date.split(' '))
928 when, offset = map(int, date.split(' '))
929 except ValueError:
929 except ValueError:
930 # fill out defaults
930 # fill out defaults
931 now = makedate()
931 now = makedate()
932 defaults = {}
932 defaults = {}
933 nowmap = {}
933 nowmap = {}
934 for part in ("d", "mb", "yY", "HI", "M", "S"):
934 for part in ("d", "mb", "yY", "HI", "M", "S"):
935 # this piece is for rounding the specific end of unknowns
935 # this piece is for rounding the specific end of unknowns
936 b = bias.get(part)
936 b = bias.get(part)
937 if b is None:
937 if b is None:
938 if part[0] in "HMS":
938 if part[0] in "HMS":
939 b = "00"
939 b = "00"
940 else:
940 else:
941 b = "0"
941 b = "0"
942
942
943 # this piece is for matching the generic end to today's date
943 # this piece is for matching the generic end to today's date
944 n = datestr(now, "%" + part[0])
944 n = datestr(now, "%" + part[0])
945
945
946 defaults[part] = (b, n)
946 defaults[part] = (b, n)
947
947
948 for format in formats:
948 for format in formats:
949 try:
949 try:
950 when, offset = strdate(date, format, defaults)
950 when, offset = strdate(date, format, defaults)
951 except (ValueError, OverflowError):
951 except (ValueError, OverflowError):
952 pass
952 pass
953 else:
953 else:
954 break
954 break
955 else:
955 else:
956 raise Abort(_('invalid date: %r') % date)
956 raise Abort(_('invalid date: %r') % date)
957 # validate explicit (probably user-specified) date and
957 # validate explicit (probably user-specified) date and
958 # time zone offset. values must fit in signed 32 bits for
958 # time zone offset. values must fit in signed 32 bits for
959 # current 32-bit linux runtimes. timezones go from UTC-12
959 # current 32-bit linux runtimes. timezones go from UTC-12
960 # to UTC+14
960 # to UTC+14
961 if abs(when) > 0x7fffffff:
961 if abs(when) > 0x7fffffff:
962 raise Abort(_('date exceeds 32 bits: %d') % when)
962 raise Abort(_('date exceeds 32 bits: %d') % when)
963 if when < 0:
963 if when < 0:
964 raise Abort(_('negative date value: %d') % when)
964 raise Abort(_('negative date value: %d') % when)
965 if offset < -50400 or offset > 43200:
965 if offset < -50400 or offset > 43200:
966 raise Abort(_('impossible time zone offset: %d') % offset)
966 raise Abort(_('impossible time zone offset: %d') % offset)
967 return when, offset
967 return when, offset
968
968
969 def matchdate(date):
969 def matchdate(date):
970 """Return a function that matches a given date match specifier
970 """Return a function that matches a given date match specifier
971
971
972 Formats include:
972 Formats include:
973
973
974 '{date}' match a given date to the accuracy provided
974 '{date}' match a given date to the accuracy provided
975
975
976 '<{date}' on or before a given date
976 '<{date}' on or before a given date
977
977
978 '>{date}' on or after a given date
978 '>{date}' on or after a given date
979
979
980 >>> p1 = parsedate("10:29:59")
980 >>> p1 = parsedate("10:29:59")
981 >>> p2 = parsedate("10:30:00")
981 >>> p2 = parsedate("10:30:00")
982 >>> p3 = parsedate("10:30:59")
982 >>> p3 = parsedate("10:30:59")
983 >>> p4 = parsedate("10:31:00")
983 >>> p4 = parsedate("10:31:00")
984 >>> p5 = parsedate("Sep 15 10:30:00 1999")
984 >>> p5 = parsedate("Sep 15 10:30:00 1999")
985 >>> f = matchdate("10:30")
985 >>> f = matchdate("10:30")
986 >>> f(p1[0])
986 >>> f(p1[0])
987 False
987 False
988 >>> f(p2[0])
988 >>> f(p2[0])
989 True
989 True
990 >>> f(p3[0])
990 >>> f(p3[0])
991 True
991 True
992 >>> f(p4[0])
992 >>> f(p4[0])
993 False
993 False
994 >>> f(p5[0])
994 >>> f(p5[0])
995 False
995 False
996 """
996 """
997
997
998 def lower(date):
998 def lower(date):
999 d = dict(mb="1", d="1")
999 d = dict(mb="1", d="1")
1000 return parsedate(date, extendeddateformats, d)[0]
1000 return parsedate(date, extendeddateformats, d)[0]
1001
1001
1002 def upper(date):
1002 def upper(date):
1003 d = dict(mb="12", HI="23", M="59", S="59")
1003 d = dict(mb="12", HI="23", M="59", S="59")
1004 for days in ("31", "30", "29"):
1004 for days in ("31", "30", "29"):
1005 try:
1005 try:
1006 d["d"] = days
1006 d["d"] = days
1007 return parsedate(date, extendeddateformats, d)[0]
1007 return parsedate(date, extendeddateformats, d)[0]
1008 except:
1008 except:
1009 pass
1009 pass
1010 d["d"] = "28"
1010 d["d"] = "28"
1011 return parsedate(date, extendeddateformats, d)[0]
1011 return parsedate(date, extendeddateformats, d)[0]
1012
1012
1013 date = date.strip()
1013 date = date.strip()
1014
1014
1015 if not date:
1015 if not date:
1016 raise Abort(_("dates cannot consist entirely of whitespace"))
1016 raise Abort(_("dates cannot consist entirely of whitespace"))
1017 elif date[0] == "<":
1017 elif date[0] == "<":
1018 if not date[1:]:
1018 if not date[1:]:
1019 raise Abort(_("invalid day spec, use '<DATE'"))
1019 raise Abort(_("invalid day spec, use '<DATE'"))
1020 when = upper(date[1:])
1020 when = upper(date[1:])
1021 return lambda x: x <= when
1021 return lambda x: x <= when
1022 elif date[0] == ">":
1022 elif date[0] == ">":
1023 if not date[1:]:
1023 if not date[1:]:
1024 raise Abort(_("invalid day spec, use '>DATE'"))
1024 raise Abort(_("invalid day spec, use '>DATE'"))
1025 when = lower(date[1:])
1025 when = lower(date[1:])
1026 return lambda x: x >= when
1026 return lambda x: x >= when
1027 elif date[0] == "-":
1027 elif date[0] == "-":
1028 try:
1028 try:
1029 days = int(date[1:])
1029 days = int(date[1:])
1030 except ValueError:
1030 except ValueError:
1031 raise Abort(_("invalid day spec: %s") % date[1:])
1031 raise Abort(_("invalid day spec: %s") % date[1:])
1032 if days < 0:
1032 if days < 0:
1033 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1033 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1034 % date[1:])
1034 % date[1:])
1035 when = makedate()[0] - days * 3600 * 24
1035 when = makedate()[0] - days * 3600 * 24
1036 return lambda x: x >= when
1036 return lambda x: x >= when
1037 elif " to " in date:
1037 elif " to " in date:
1038 a, b = date.split(" to ")
1038 a, b = date.split(" to ")
1039 start, stop = lower(a), upper(b)
1039 start, stop = lower(a), upper(b)
1040 return lambda x: x >= start and x <= stop
1040 return lambda x: x >= start and x <= stop
1041 else:
1041 else:
1042 start, stop = lower(date), upper(date)
1042 start, stop = lower(date), upper(date)
1043 return lambda x: x >= start and x <= stop
1043 return lambda x: x >= start and x <= stop
1044
1044
1045 def shortuser(user):
1045 def shortuser(user):
1046 """Return a short representation of a user name or email address."""
1046 """Return a short representation of a user name or email address."""
1047 f = user.find('@')
1047 f = user.find('@')
1048 if f >= 0:
1048 if f >= 0:
1049 user = user[:f]
1049 user = user[:f]
1050 f = user.find('<')
1050 f = user.find('<')
1051 if f >= 0:
1051 if f >= 0:
1052 user = user[f + 1:]
1052 user = user[f + 1:]
1053 f = user.find(' ')
1053 f = user.find(' ')
1054 if f >= 0:
1054 if f >= 0:
1055 user = user[:f]
1055 user = user[:f]
1056 f = user.find('.')
1056 f = user.find('.')
1057 if f >= 0:
1057 if f >= 0:
1058 user = user[:f]
1058 user = user[:f]
1059 return user
1059 return user
1060
1060
1061 def email(author):
1061 def email(author):
1062 '''get email of author.'''
1062 '''get email of author.'''
1063 r = author.find('>')
1063 r = author.find('>')
1064 if r == -1:
1064 if r == -1:
1065 r = None
1065 r = None
1066 return author[author.find('<') + 1:r]
1066 return author[author.find('<') + 1:r]
1067
1067
1068 def _ellipsis(text, maxlength):
1068 def _ellipsis(text, maxlength):
1069 if len(text) <= maxlength:
1069 if len(text) <= maxlength:
1070 return text, False
1070 return text, False
1071 else:
1071 else:
1072 return "%s..." % (text[:maxlength - 3]), True
1072 return "%s..." % (text[:maxlength - 3]), True
1073
1073
1074 def ellipsis(text, maxlength=400):
1074 def ellipsis(text, maxlength=400):
1075 """Trim string to at most maxlength (default: 400) characters."""
1075 """Trim string to at most maxlength (default: 400) characters."""
1076 try:
1076 try:
1077 # use unicode not to split at intermediate multi-byte sequence
1077 # use unicode not to split at intermediate multi-byte sequence
1078 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1078 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1079 maxlength)
1079 maxlength)
1080 if not truncated:
1080 if not truncated:
1081 return text
1081 return text
1082 return utext.encode(encoding.encoding)
1082 return utext.encode(encoding.encoding)
1083 except (UnicodeDecodeError, UnicodeEncodeError):
1083 except (UnicodeDecodeError, UnicodeEncodeError):
1084 return _ellipsis(text, maxlength)[0]
1084 return _ellipsis(text, maxlength)[0]
1085
1085
1086 def os_rcpath():
1087 '''return default os-specific hgrc search path'''
1088 path = system_rcpath()
1089 path.extend(user_rcpath())
1090 path = [os.path.normpath(f) for f in path]
1091 return path
1092
1093 def bytecount(nbytes):
1086 def bytecount(nbytes):
1094 '''return byte count formatted as readable string, with units'''
1087 '''return byte count formatted as readable string, with units'''
1095
1088
1096 units = (
1089 units = (
1097 (100, 1 << 30, _('%.0f GB')),
1090 (100, 1 << 30, _('%.0f GB')),
1098 (10, 1 << 30, _('%.1f GB')),
1091 (10, 1 << 30, _('%.1f GB')),
1099 (1, 1 << 30, _('%.2f GB')),
1092 (1, 1 << 30, _('%.2f GB')),
1100 (100, 1 << 20, _('%.0f MB')),
1093 (100, 1 << 20, _('%.0f MB')),
1101 (10, 1 << 20, _('%.1f MB')),
1094 (10, 1 << 20, _('%.1f MB')),
1102 (1, 1 << 20, _('%.2f MB')),
1095 (1, 1 << 20, _('%.2f MB')),
1103 (100, 1 << 10, _('%.0f KB')),
1096 (100, 1 << 10, _('%.0f KB')),
1104 (10, 1 << 10, _('%.1f KB')),
1097 (10, 1 << 10, _('%.1f KB')),
1105 (1, 1 << 10, _('%.2f KB')),
1098 (1, 1 << 10, _('%.2f KB')),
1106 (1, 1, _('%.0f bytes')),
1099 (1, 1, _('%.0f bytes')),
1107 )
1100 )
1108
1101
1109 for multiplier, divisor, format in units:
1102 for multiplier, divisor, format in units:
1110 if nbytes >= divisor * multiplier:
1103 if nbytes >= divisor * multiplier:
1111 return format % (nbytes / float(divisor))
1104 return format % (nbytes / float(divisor))
1112 return units[-1][2] % nbytes
1105 return units[-1][2] % nbytes
1113
1106
1114 def uirepr(s):
1107 def uirepr(s):
1115 # Avoid double backslash in Windows path repr()
1108 # Avoid double backslash in Windows path repr()
1116 return repr(s).replace('\\\\', '\\')
1109 return repr(s).replace('\\\\', '\\')
1117
1110
1118 # delay import of textwrap
1111 # delay import of textwrap
1119 def MBTextWrapper(**kwargs):
1112 def MBTextWrapper(**kwargs):
1120 class tw(textwrap.TextWrapper):
1113 class tw(textwrap.TextWrapper):
1121 """
1114 """
1122 Extend TextWrapper for double-width characters.
1115 Extend TextWrapper for double-width characters.
1123
1116
1124 Some Asian characters use two terminal columns instead of one.
1117 Some Asian characters use two terminal columns instead of one.
1125 A good example of this behavior can be seen with u'\u65e5\u672c',
1118 A good example of this behavior can be seen with u'\u65e5\u672c',
1126 the two Japanese characters for "Japan":
1119 the two Japanese characters for "Japan":
1127 len() returns 2, but when printed to a terminal, they eat 4 columns.
1120 len() returns 2, but when printed to a terminal, they eat 4 columns.
1128
1121
1129 (Note that this has nothing to do whatsoever with unicode
1122 (Note that this has nothing to do whatsoever with unicode
1130 representation, or encoding of the underlying string)
1123 representation, or encoding of the underlying string)
1131 """
1124 """
1132 def __init__(self, **kwargs):
1125 def __init__(self, **kwargs):
1133 textwrap.TextWrapper.__init__(self, **kwargs)
1126 textwrap.TextWrapper.__init__(self, **kwargs)
1134
1127
1135 def _cutdown(self, str, space_left):
1128 def _cutdown(self, str, space_left):
1136 l = 0
1129 l = 0
1137 ucstr = unicode(str, encoding.encoding)
1130 ucstr = unicode(str, encoding.encoding)
1138 colwidth = unicodedata.east_asian_width
1131 colwidth = unicodedata.east_asian_width
1139 for i in xrange(len(ucstr)):
1132 for i in xrange(len(ucstr)):
1140 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1133 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1141 if space_left < l:
1134 if space_left < l:
1142 return (ucstr[:i].encode(encoding.encoding),
1135 return (ucstr[:i].encode(encoding.encoding),
1143 ucstr[i:].encode(encoding.encoding))
1136 ucstr[i:].encode(encoding.encoding))
1144 return str, ''
1137 return str, ''
1145
1138
1146 # overriding of base class
1139 # overriding of base class
1147 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1140 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1148 space_left = max(width - cur_len, 1)
1141 space_left = max(width - cur_len, 1)
1149
1142
1150 if self.break_long_words:
1143 if self.break_long_words:
1151 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1144 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1152 cur_line.append(cut)
1145 cur_line.append(cut)
1153 reversed_chunks[-1] = res
1146 reversed_chunks[-1] = res
1154 elif not cur_line:
1147 elif not cur_line:
1155 cur_line.append(reversed_chunks.pop())
1148 cur_line.append(reversed_chunks.pop())
1156
1149
1157 global MBTextWrapper
1150 global MBTextWrapper
1158 MBTextWrapper = tw
1151 MBTextWrapper = tw
1159 return tw(**kwargs)
1152 return tw(**kwargs)
1160
1153
1161 def wrap(line, width, initindent='', hangindent=''):
1154 def wrap(line, width, initindent='', hangindent=''):
1162 maxindent = max(len(hangindent), len(initindent))
1155 maxindent = max(len(hangindent), len(initindent))
1163 if width <= maxindent:
1156 if width <= maxindent:
1164 # adjust for weird terminal size
1157 # adjust for weird terminal size
1165 width = max(78, maxindent + 1)
1158 width = max(78, maxindent + 1)
1166 wrapper = MBTextWrapper(width=width,
1159 wrapper = MBTextWrapper(width=width,
1167 initial_indent=initindent,
1160 initial_indent=initindent,
1168 subsequent_indent=hangindent)
1161 subsequent_indent=hangindent)
1169 return wrapper.fill(line)
1162 return wrapper.fill(line)
1170
1163
1171 def iterlines(iterator):
1164 def iterlines(iterator):
1172 for chunk in iterator:
1165 for chunk in iterator:
1173 for line in chunk.splitlines():
1166 for line in chunk.splitlines():
1174 yield line
1167 yield line
1175
1168
1176 def expandpath(path):
1169 def expandpath(path):
1177 return os.path.expanduser(os.path.expandvars(path))
1170 return os.path.expanduser(os.path.expandvars(path))
1178
1171
1179 def hgcmd():
1172 def hgcmd():
1180 """Return the command used to execute current hg
1173 """Return the command used to execute current hg
1181
1174
1182 This is different from hgexecutable() because on Windows we want
1175 This is different from hgexecutable() because on Windows we want
1183 to avoid things opening new shell windows like batch files, so we
1176 to avoid things opening new shell windows like batch files, so we
1184 get either the python call or current executable.
1177 get either the python call or current executable.
1185 """
1178 """
1186 if main_is_frozen():
1179 if main_is_frozen():
1187 return [sys.executable]
1180 return [sys.executable]
1188 return gethgcmd()
1181 return gethgcmd()
1189
1182
1190 def rundetached(args, condfn):
1183 def rundetached(args, condfn):
1191 """Execute the argument list in a detached process.
1184 """Execute the argument list in a detached process.
1192
1185
1193 condfn is a callable which is called repeatedly and should return
1186 condfn is a callable which is called repeatedly and should return
1194 True once the child process is known to have started successfully.
1187 True once the child process is known to have started successfully.
1195 At this point, the child process PID is returned. If the child
1188 At this point, the child process PID is returned. If the child
1196 process fails to start or finishes before condfn() evaluates to
1189 process fails to start or finishes before condfn() evaluates to
1197 True, return -1.
1190 True, return -1.
1198 """
1191 """
1199 # Windows case is easier because the child process is either
1192 # Windows case is easier because the child process is either
1200 # successfully starting and validating the condition or exiting
1193 # successfully starting and validating the condition or exiting
1201 # on failure. We just poll on its PID. On Unix, if the child
1194 # on failure. We just poll on its PID. On Unix, if the child
1202 # process fails to start, it will be left in a zombie state until
1195 # process fails to start, it will be left in a zombie state until
1203 # the parent wait on it, which we cannot do since we expect a long
1196 # the parent wait on it, which we cannot do since we expect a long
1204 # running process on success. Instead we listen for SIGCHLD telling
1197 # running process on success. Instead we listen for SIGCHLD telling
1205 # us our child process terminated.
1198 # us our child process terminated.
1206 terminated = set()
1199 terminated = set()
1207 def handler(signum, frame):
1200 def handler(signum, frame):
1208 terminated.add(os.wait())
1201 terminated.add(os.wait())
1209 prevhandler = None
1202 prevhandler = None
1210 if hasattr(signal, 'SIGCHLD'):
1203 if hasattr(signal, 'SIGCHLD'):
1211 prevhandler = signal.signal(signal.SIGCHLD, handler)
1204 prevhandler = signal.signal(signal.SIGCHLD, handler)
1212 try:
1205 try:
1213 pid = spawndetached(args)
1206 pid = spawndetached(args)
1214 while not condfn():
1207 while not condfn():
1215 if ((pid in terminated or not testpid(pid))
1208 if ((pid in terminated or not testpid(pid))
1216 and not condfn()):
1209 and not condfn()):
1217 return -1
1210 return -1
1218 time.sleep(0.1)
1211 time.sleep(0.1)
1219 return pid
1212 return pid
1220 finally:
1213 finally:
1221 if prevhandler is not None:
1214 if prevhandler is not None:
1222 signal.signal(signal.SIGCHLD, prevhandler)
1215 signal.signal(signal.SIGCHLD, prevhandler)
1223
1216
1224 try:
1217 try:
1225 any, all = any, all
1218 any, all = any, all
1226 except NameError:
1219 except NameError:
1227 def any(iterable):
1220 def any(iterable):
1228 for i in iterable:
1221 for i in iterable:
1229 if i:
1222 if i:
1230 return True
1223 return True
1231 return False
1224 return False
1232
1225
1233 def all(iterable):
1226 def all(iterable):
1234 for i in iterable:
1227 for i in iterable:
1235 if not i:
1228 if not i:
1236 return False
1229 return False
1237 return True
1230 return True
1238
1231
1239 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1232 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1240 """Return the result of interpolating items in the mapping into string s.
1233 """Return the result of interpolating items in the mapping into string s.
1241
1234
1242 prefix is a single character string, or a two character string with
1235 prefix is a single character string, or a two character string with
1243 a backslash as the first character if the prefix needs to be escaped in
1236 a backslash as the first character if the prefix needs to be escaped in
1244 a regular expression.
1237 a regular expression.
1245
1238
1246 fn is an optional function that will be applied to the replacement text
1239 fn is an optional function that will be applied to the replacement text
1247 just before replacement.
1240 just before replacement.
1248
1241
1249 escape_prefix is an optional flag that allows using doubled prefix for
1242 escape_prefix is an optional flag that allows using doubled prefix for
1250 its escaping.
1243 its escaping.
1251 """
1244 """
1252 fn = fn or (lambda s: s)
1245 fn = fn or (lambda s: s)
1253 patterns = '|'.join(mapping.keys())
1246 patterns = '|'.join(mapping.keys())
1254 if escape_prefix:
1247 if escape_prefix:
1255 patterns += '|' + prefix
1248 patterns += '|' + prefix
1256 if len(prefix) > 1:
1249 if len(prefix) > 1:
1257 prefix_char = prefix[1:]
1250 prefix_char = prefix[1:]
1258 else:
1251 else:
1259 prefix_char = prefix
1252 prefix_char = prefix
1260 mapping[prefix_char] = prefix_char
1253 mapping[prefix_char] = prefix_char
1261 r = re.compile(r'%s(%s)' % (prefix, patterns))
1254 r = re.compile(r'%s(%s)' % (prefix, patterns))
1262 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1255 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1263
1256
1264 def getport(port):
1257 def getport(port):
1265 """Return the port for a given network service.
1258 """Return the port for a given network service.
1266
1259
1267 If port is an integer, it's returned as is. If it's a string, it's
1260 If port is an integer, it's returned as is. If it's a string, it's
1268 looked up using socket.getservbyname(). If there's no matching
1261 looked up using socket.getservbyname(). If there's no matching
1269 service, util.Abort is raised.
1262 service, util.Abort is raised.
1270 """
1263 """
1271 try:
1264 try:
1272 return int(port)
1265 return int(port)
1273 except ValueError:
1266 except ValueError:
1274 pass
1267 pass
1275
1268
1276 try:
1269 try:
1277 return socket.getservbyname(port)
1270 return socket.getservbyname(port)
1278 except socket.error:
1271 except socket.error:
1279 raise Abort(_("no port number associated with service '%s'") % port)
1272 raise Abort(_("no port number associated with service '%s'") % port)
1280
1273
1281 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1274 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1282 '0': False, 'no': False, 'false': False, 'off': False,
1275 '0': False, 'no': False, 'false': False, 'off': False,
1283 'never': False}
1276 'never': False}
1284
1277
1285 def parsebool(s):
1278 def parsebool(s):
1286 """Parse s into a boolean.
1279 """Parse s into a boolean.
1287
1280
1288 If s is not a valid boolean, returns None.
1281 If s is not a valid boolean, returns None.
1289 """
1282 """
1290 return _booleans.get(s.lower(), None)
1283 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now