##// END OF EJS Templates
scmutil: create directories in a race-safe way during update...
Bryan O'Sullivan -
r18668:4034b8d5 default
parent child Browse files
Show More
@@ -1,1003 +1,1003 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 from mercurial.node import nullrev
9 from mercurial.node import nullrev
10 import util, error, osutil, revset, similar, encoding, phases
10 import util, error, osutil, revset, similar, encoding, phases
11 import match as matchmod
11 import match as matchmod
12 import os, errno, re, stat, sys, glob
12 import os, errno, re, stat, sys, glob
13
13
14 def nochangesfound(ui, repo, excluded=None):
14 def nochangesfound(ui, repo, excluded=None):
15 '''Report no changes for push/pull, excluded is None or a list of
15 '''Report no changes for push/pull, excluded is None or a list of
16 nodes excluded from the push/pull.
16 nodes excluded from the push/pull.
17 '''
17 '''
18 secretlist = []
18 secretlist = []
19 if excluded:
19 if excluded:
20 for n in excluded:
20 for n in excluded:
21 if n not in repo:
21 if n not in repo:
22 # discovery should not have included the filtered revision,
22 # discovery should not have included the filtered revision,
23 # we have to explicitly exclude it until discovery is cleanup.
23 # we have to explicitly exclude it until discovery is cleanup.
24 continue
24 continue
25 ctx = repo[n]
25 ctx = repo[n]
26 if ctx.phase() >= phases.secret and not ctx.extinct():
26 if ctx.phase() >= phases.secret and not ctx.extinct():
27 secretlist.append(n)
27 secretlist.append(n)
28
28
29 if secretlist:
29 if secretlist:
30 ui.status(_("no changes found (ignored %d secret changesets)\n")
30 ui.status(_("no changes found (ignored %d secret changesets)\n")
31 % len(secretlist))
31 % len(secretlist))
32 else:
32 else:
33 ui.status(_("no changes found\n"))
33 ui.status(_("no changes found\n"))
34
34
35 def checknewlabel(repo, lbl, kind):
35 def checknewlabel(repo, lbl, kind):
36 if lbl in ['tip', '.', 'null']:
36 if lbl in ['tip', '.', 'null']:
37 raise util.Abort(_("the name '%s' is reserved") % lbl)
37 raise util.Abort(_("the name '%s' is reserved") % lbl)
38 for c in (':', '\0', '\n', '\r'):
38 for c in (':', '\0', '\n', '\r'):
39 if c in lbl:
39 if c in lbl:
40 raise util.Abort(_("%r cannot be used in a name") % c)
40 raise util.Abort(_("%r cannot be used in a name") % c)
41 try:
41 try:
42 int(lbl)
42 int(lbl)
43 raise util.Abort(_("a %s cannot have an integer as its name") % kind)
43 raise util.Abort(_("a %s cannot have an integer as its name") % kind)
44 except ValueError:
44 except ValueError:
45 pass
45 pass
46
46
47 def checkfilename(f):
47 def checkfilename(f):
48 '''Check that the filename f is an acceptable filename for a tracked file'''
48 '''Check that the filename f is an acceptable filename for a tracked file'''
49 if '\r' in f or '\n' in f:
49 if '\r' in f or '\n' in f:
50 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
50 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
51
51
52 def checkportable(ui, f):
52 def checkportable(ui, f):
53 '''Check if filename f is portable and warn or abort depending on config'''
53 '''Check if filename f is portable and warn or abort depending on config'''
54 checkfilename(f)
54 checkfilename(f)
55 abort, warn = checkportabilityalert(ui)
55 abort, warn = checkportabilityalert(ui)
56 if abort or warn:
56 if abort or warn:
57 msg = util.checkwinfilename(f)
57 msg = util.checkwinfilename(f)
58 if msg:
58 if msg:
59 msg = "%s: %r" % (msg, f)
59 msg = "%s: %r" % (msg, f)
60 if abort:
60 if abort:
61 raise util.Abort(msg)
61 raise util.Abort(msg)
62 ui.warn(_("warning: %s\n") % msg)
62 ui.warn(_("warning: %s\n") % msg)
63
63
64 def checkportabilityalert(ui):
64 def checkportabilityalert(ui):
65 '''check if the user's config requests nothing, a warning, or abort for
65 '''check if the user's config requests nothing, a warning, or abort for
66 non-portable filenames'''
66 non-portable filenames'''
67 val = ui.config('ui', 'portablefilenames', 'warn')
67 val = ui.config('ui', 'portablefilenames', 'warn')
68 lval = val.lower()
68 lval = val.lower()
69 bval = util.parsebool(val)
69 bval = util.parsebool(val)
70 abort = os.name == 'nt' or lval == 'abort'
70 abort = os.name == 'nt' or lval == 'abort'
71 warn = bval or lval == 'warn'
71 warn = bval or lval == 'warn'
72 if bval is None and not (warn or abort or lval == 'ignore'):
72 if bval is None and not (warn or abort or lval == 'ignore'):
73 raise error.ConfigError(
73 raise error.ConfigError(
74 _("ui.portablefilenames value is invalid ('%s')") % val)
74 _("ui.portablefilenames value is invalid ('%s')") % val)
75 return abort, warn
75 return abort, warn
76
76
77 class casecollisionauditor(object):
77 class casecollisionauditor(object):
78 def __init__(self, ui, abort, dirstate):
78 def __init__(self, ui, abort, dirstate):
79 self._ui = ui
79 self._ui = ui
80 self._abort = abort
80 self._abort = abort
81 allfiles = '\0'.join(dirstate._map)
81 allfiles = '\0'.join(dirstate._map)
82 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
82 self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
83 self._dirstate = dirstate
83 self._dirstate = dirstate
84 # The purpose of _newfiles is so that we don't complain about
84 # The purpose of _newfiles is so that we don't complain about
85 # case collisions if someone were to call this object with the
85 # case collisions if someone were to call this object with the
86 # same filename twice.
86 # same filename twice.
87 self._newfiles = set()
87 self._newfiles = set()
88
88
89 def __call__(self, f):
89 def __call__(self, f):
90 fl = encoding.lower(f)
90 fl = encoding.lower(f)
91 if (fl in self._loweredfiles and f not in self._dirstate and
91 if (fl in self._loweredfiles and f not in self._dirstate and
92 f not in self._newfiles):
92 f not in self._newfiles):
93 msg = _('possible case-folding collision for %s') % f
93 msg = _('possible case-folding collision for %s') % f
94 if self._abort:
94 if self._abort:
95 raise util.Abort(msg)
95 raise util.Abort(msg)
96 self._ui.warn(_("warning: %s\n") % msg)
96 self._ui.warn(_("warning: %s\n") % msg)
97 self._loweredfiles.add(fl)
97 self._loweredfiles.add(fl)
98 self._newfiles.add(f)
98 self._newfiles.add(f)
99
99
100 class pathauditor(object):
100 class pathauditor(object):
101 '''ensure that a filesystem path contains no banned components.
101 '''ensure that a filesystem path contains no banned components.
102 the following properties of a path are checked:
102 the following properties of a path are checked:
103
103
104 - ends with a directory separator
104 - ends with a directory separator
105 - under top-level .hg
105 - under top-level .hg
106 - starts at the root of a windows drive
106 - starts at the root of a windows drive
107 - contains ".."
107 - contains ".."
108 - traverses a symlink (e.g. a/symlink_here/b)
108 - traverses a symlink (e.g. a/symlink_here/b)
109 - inside a nested repository (a callback can be used to approve
109 - inside a nested repository (a callback can be used to approve
110 some nested repositories, e.g., subrepositories)
110 some nested repositories, e.g., subrepositories)
111 '''
111 '''
112
112
113 def __init__(self, root, callback=None):
113 def __init__(self, root, callback=None):
114 self.audited = set()
114 self.audited = set()
115 self.auditeddir = set()
115 self.auditeddir = set()
116 self.root = root
116 self.root = root
117 self.callback = callback
117 self.callback = callback
118 if os.path.lexists(root) and not util.checkcase(root):
118 if os.path.lexists(root) and not util.checkcase(root):
119 self.normcase = util.normcase
119 self.normcase = util.normcase
120 else:
120 else:
121 self.normcase = lambda x: x
121 self.normcase = lambda x: x
122
122
123 def __call__(self, path):
123 def __call__(self, path):
124 '''Check the relative path.
124 '''Check the relative path.
125 path may contain a pattern (e.g. foodir/**.txt)'''
125 path may contain a pattern (e.g. foodir/**.txt)'''
126
126
127 path = util.localpath(path)
127 path = util.localpath(path)
128 normpath = self.normcase(path)
128 normpath = self.normcase(path)
129 if normpath in self.audited:
129 if normpath in self.audited:
130 return
130 return
131 # AIX ignores "/" at end of path, others raise EISDIR.
131 # AIX ignores "/" at end of path, others raise EISDIR.
132 if util.endswithsep(path):
132 if util.endswithsep(path):
133 raise util.Abort(_("path ends in directory separator: %s") % path)
133 raise util.Abort(_("path ends in directory separator: %s") % path)
134 parts = util.splitpath(path)
134 parts = util.splitpath(path)
135 if (os.path.splitdrive(path)[0]
135 if (os.path.splitdrive(path)[0]
136 or parts[0].lower() in ('.hg', '.hg.', '')
136 or parts[0].lower() in ('.hg', '.hg.', '')
137 or os.pardir in parts):
137 or os.pardir in parts):
138 raise util.Abort(_("path contains illegal component: %s") % path)
138 raise util.Abort(_("path contains illegal component: %s") % path)
139 if '.hg' in path.lower():
139 if '.hg' in path.lower():
140 lparts = [p.lower() for p in parts]
140 lparts = [p.lower() for p in parts]
141 for p in '.hg', '.hg.':
141 for p in '.hg', '.hg.':
142 if p in lparts[1:]:
142 if p in lparts[1:]:
143 pos = lparts.index(p)
143 pos = lparts.index(p)
144 base = os.path.join(*parts[:pos])
144 base = os.path.join(*parts[:pos])
145 raise util.Abort(_("path '%s' is inside nested repo %r")
145 raise util.Abort(_("path '%s' is inside nested repo %r")
146 % (path, base))
146 % (path, base))
147
147
148 normparts = util.splitpath(normpath)
148 normparts = util.splitpath(normpath)
149 assert len(parts) == len(normparts)
149 assert len(parts) == len(normparts)
150
150
151 parts.pop()
151 parts.pop()
152 normparts.pop()
152 normparts.pop()
153 prefixes = []
153 prefixes = []
154 while parts:
154 while parts:
155 prefix = os.sep.join(parts)
155 prefix = os.sep.join(parts)
156 normprefix = os.sep.join(normparts)
156 normprefix = os.sep.join(normparts)
157 if normprefix in self.auditeddir:
157 if normprefix in self.auditeddir:
158 break
158 break
159 curpath = os.path.join(self.root, prefix)
159 curpath = os.path.join(self.root, prefix)
160 try:
160 try:
161 st = os.lstat(curpath)
161 st = os.lstat(curpath)
162 except OSError, err:
162 except OSError, err:
163 # EINVAL can be raised as invalid path syntax under win32.
163 # EINVAL can be raised as invalid path syntax under win32.
164 # They must be ignored for patterns can be checked too.
164 # They must be ignored for patterns can be checked too.
165 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
165 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
166 raise
166 raise
167 else:
167 else:
168 if stat.S_ISLNK(st.st_mode):
168 if stat.S_ISLNK(st.st_mode):
169 raise util.Abort(
169 raise util.Abort(
170 _('path %r traverses symbolic link %r')
170 _('path %r traverses symbolic link %r')
171 % (path, prefix))
171 % (path, prefix))
172 elif (stat.S_ISDIR(st.st_mode) and
172 elif (stat.S_ISDIR(st.st_mode) and
173 os.path.isdir(os.path.join(curpath, '.hg'))):
173 os.path.isdir(os.path.join(curpath, '.hg'))):
174 if not self.callback or not self.callback(curpath):
174 if not self.callback or not self.callback(curpath):
175 raise util.Abort(_("path '%s' is inside nested "
175 raise util.Abort(_("path '%s' is inside nested "
176 "repo %r")
176 "repo %r")
177 % (path, prefix))
177 % (path, prefix))
178 prefixes.append(normprefix)
178 prefixes.append(normprefix)
179 parts.pop()
179 parts.pop()
180 normparts.pop()
180 normparts.pop()
181
181
182 self.audited.add(normpath)
182 self.audited.add(normpath)
183 # only add prefixes to the cache after checking everything: we don't
183 # only add prefixes to the cache after checking everything: we don't
184 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
184 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
185 self.auditeddir.update(prefixes)
185 self.auditeddir.update(prefixes)
186
186
187 def check(self, path):
187 def check(self, path):
188 try:
188 try:
189 self(path)
189 self(path)
190 return True
190 return True
191 except (OSError, util.Abort):
191 except (OSError, util.Abort):
192 return False
192 return False
193
193
194 class abstractvfs(object):
194 class abstractvfs(object):
195 """Abstract base class; cannot be instantiated"""
195 """Abstract base class; cannot be instantiated"""
196
196
197 def __init__(self, *args, **kwargs):
197 def __init__(self, *args, **kwargs):
198 '''Prevent instantiation; don't call this from subclasses.'''
198 '''Prevent instantiation; don't call this from subclasses.'''
199 raise NotImplementedError('attempted instantiating ' + str(type(self)))
199 raise NotImplementedError('attempted instantiating ' + str(type(self)))
200
200
201 def tryread(self, path):
201 def tryread(self, path):
202 '''gracefully return an empty string for missing files'''
202 '''gracefully return an empty string for missing files'''
203 try:
203 try:
204 return self.read(path)
204 return self.read(path)
205 except IOError, inst:
205 except IOError, inst:
206 if inst.errno != errno.ENOENT:
206 if inst.errno != errno.ENOENT:
207 raise
207 raise
208 return ""
208 return ""
209
209
210 def read(self, path):
210 def read(self, path):
211 fp = self(path, 'rb')
211 fp = self(path, 'rb')
212 try:
212 try:
213 return fp.read()
213 return fp.read()
214 finally:
214 finally:
215 fp.close()
215 fp.close()
216
216
217 def write(self, path, data):
217 def write(self, path, data):
218 fp = self(path, 'wb')
218 fp = self(path, 'wb')
219 try:
219 try:
220 return fp.write(data)
220 return fp.write(data)
221 finally:
221 finally:
222 fp.close()
222 fp.close()
223
223
224 def append(self, path, data):
224 def append(self, path, data):
225 fp = self(path, 'ab')
225 fp = self(path, 'ab')
226 try:
226 try:
227 return fp.write(data)
227 return fp.write(data)
228 finally:
228 finally:
229 fp.close()
229 fp.close()
230
230
231 def exists(self, path=None):
231 def exists(self, path=None):
232 return os.path.exists(self.join(path))
232 return os.path.exists(self.join(path))
233
233
234 def isdir(self, path=None):
234 def isdir(self, path=None):
235 return os.path.isdir(self.join(path))
235 return os.path.isdir(self.join(path))
236
236
237 def makedir(self, path=None, notindexed=True):
237 def makedir(self, path=None, notindexed=True):
238 return util.makedir(self.join(path), notindexed)
238 return util.makedir(self.join(path), notindexed)
239
239
240 def makedirs(self, path=None, mode=None):
240 def makedirs(self, path=None, mode=None):
241 return util.makedirs(self.join(path), mode)
241 return util.makedirs(self.join(path), mode)
242
242
243 def mkdir(self, path=None):
243 def mkdir(self, path=None):
244 return os.mkdir(self.join(path))
244 return os.mkdir(self.join(path))
245
245
246 def readdir(self, path=None, stat=None, skip=None):
246 def readdir(self, path=None, stat=None, skip=None):
247 return osutil.listdir(self.join(path), stat, skip)
247 return osutil.listdir(self.join(path), stat, skip)
248
248
249 def stat(self, path=None):
249 def stat(self, path=None):
250 return os.stat(self.join(path))
250 return os.stat(self.join(path))
251
251
252 class vfs(abstractvfs):
252 class vfs(abstractvfs):
253 '''Operate files relative to a base directory
253 '''Operate files relative to a base directory
254
254
255 This class is used to hide the details of COW semantics and
255 This class is used to hide the details of COW semantics and
256 remote file access from higher level code.
256 remote file access from higher level code.
257 '''
257 '''
258 def __init__(self, base, audit=True, expand=False):
258 def __init__(self, base, audit=True, expand=False):
259 if expand:
259 if expand:
260 base = os.path.realpath(util.expandpath(base))
260 base = os.path.realpath(util.expandpath(base))
261 self.base = base
261 self.base = base
262 self._setmustaudit(audit)
262 self._setmustaudit(audit)
263 self.createmode = None
263 self.createmode = None
264 self._trustnlink = None
264 self._trustnlink = None
265
265
266 def _getmustaudit(self):
266 def _getmustaudit(self):
267 return self._audit
267 return self._audit
268
268
269 def _setmustaudit(self, onoff):
269 def _setmustaudit(self, onoff):
270 self._audit = onoff
270 self._audit = onoff
271 if onoff:
271 if onoff:
272 self.audit = pathauditor(self.base)
272 self.audit = pathauditor(self.base)
273 else:
273 else:
274 self.audit = util.always
274 self.audit = util.always
275
275
276 mustaudit = property(_getmustaudit, _setmustaudit)
276 mustaudit = property(_getmustaudit, _setmustaudit)
277
277
278 @util.propertycache
278 @util.propertycache
279 def _cansymlink(self):
279 def _cansymlink(self):
280 return util.checklink(self.base)
280 return util.checklink(self.base)
281
281
282 @util.propertycache
282 @util.propertycache
283 def _chmod(self):
283 def _chmod(self):
284 return util.checkexec(self.base)
284 return util.checkexec(self.base)
285
285
286 def _fixfilemode(self, name):
286 def _fixfilemode(self, name):
287 if self.createmode is None or not self._chmod:
287 if self.createmode is None or not self._chmod:
288 return
288 return
289 os.chmod(name, self.createmode & 0666)
289 os.chmod(name, self.createmode & 0666)
290
290
291 def __call__(self, path, mode="r", text=False, atomictemp=False):
291 def __call__(self, path, mode="r", text=False, atomictemp=False):
292 if self._audit:
292 if self._audit:
293 r = util.checkosfilename(path)
293 r = util.checkosfilename(path)
294 if r:
294 if r:
295 raise util.Abort("%s: %r" % (r, path))
295 raise util.Abort("%s: %r" % (r, path))
296 self.audit(path)
296 self.audit(path)
297 f = self.join(path)
297 f = self.join(path)
298
298
299 if not text and "b" not in mode:
299 if not text and "b" not in mode:
300 mode += "b" # for that other OS
300 mode += "b" # for that other OS
301
301
302 nlink = -1
302 nlink = -1
303 if mode not in ('r', 'rb'):
303 if mode not in ('r', 'rb'):
304 dirname, basename = util.split(f)
304 dirname, basename = util.split(f)
305 # If basename is empty, then the path is malformed because it points
305 # If basename is empty, then the path is malformed because it points
306 # to a directory. Let the posixfile() call below raise IOError.
306 # to a directory. Let the posixfile() call below raise IOError.
307 if basename:
307 if basename:
308 if atomictemp:
308 if atomictemp:
309 if not os.path.isdir(dirname):
309 if not os.path.isdir(dirname):
310 util.makedirs(dirname, self.createmode)
310 util.ensuredirs(dirname, self.createmode)
311 return util.atomictempfile(f, mode, self.createmode)
311 return util.atomictempfile(f, mode, self.createmode)
312 try:
312 try:
313 if 'w' in mode:
313 if 'w' in mode:
314 util.unlink(f)
314 util.unlink(f)
315 nlink = 0
315 nlink = 0
316 else:
316 else:
317 # nlinks() may behave differently for files on Windows
317 # nlinks() may behave differently for files on Windows
318 # shares if the file is open.
318 # shares if the file is open.
319 fd = util.posixfile(f)
319 fd = util.posixfile(f)
320 nlink = util.nlinks(f)
320 nlink = util.nlinks(f)
321 if nlink < 1:
321 if nlink < 1:
322 nlink = 2 # force mktempcopy (issue1922)
322 nlink = 2 # force mktempcopy (issue1922)
323 fd.close()
323 fd.close()
324 except (OSError, IOError), e:
324 except (OSError, IOError), e:
325 if e.errno != errno.ENOENT:
325 if e.errno != errno.ENOENT:
326 raise
326 raise
327 nlink = 0
327 nlink = 0
328 if not os.path.isdir(dirname):
328 if not os.path.isdir(dirname):
329 util.makedirs(dirname, self.createmode)
329 util.ensuredirs(dirname, self.createmode)
330 if nlink > 0:
330 if nlink > 0:
331 if self._trustnlink is None:
331 if self._trustnlink is None:
332 self._trustnlink = nlink > 1 or util.checknlink(f)
332 self._trustnlink = nlink > 1 or util.checknlink(f)
333 if nlink > 1 or not self._trustnlink:
333 if nlink > 1 or not self._trustnlink:
334 util.rename(util.mktempcopy(f), f)
334 util.rename(util.mktempcopy(f), f)
335 fp = util.posixfile(f, mode)
335 fp = util.posixfile(f, mode)
336 if nlink == 0:
336 if nlink == 0:
337 self._fixfilemode(f)
337 self._fixfilemode(f)
338 return fp
338 return fp
339
339
340 def symlink(self, src, dst):
340 def symlink(self, src, dst):
341 self.audit(dst)
341 self.audit(dst)
342 linkname = self.join(dst)
342 linkname = self.join(dst)
343 try:
343 try:
344 os.unlink(linkname)
344 os.unlink(linkname)
345 except OSError:
345 except OSError:
346 pass
346 pass
347
347
348 dirname = os.path.dirname(linkname)
348 dirname = os.path.dirname(linkname)
349 if not os.path.exists(dirname):
349 if not os.path.exists(dirname):
350 util.makedirs(dirname, self.createmode)
350 util.ensuredirs(dirname, self.createmode)
351
351
352 if self._cansymlink:
352 if self._cansymlink:
353 try:
353 try:
354 os.symlink(src, linkname)
354 os.symlink(src, linkname)
355 except OSError, err:
355 except OSError, err:
356 raise OSError(err.errno, _('could not symlink to %r: %s') %
356 raise OSError(err.errno, _('could not symlink to %r: %s') %
357 (src, err.strerror), linkname)
357 (src, err.strerror), linkname)
358 else:
358 else:
359 self.write(dst, src)
359 self.write(dst, src)
360
360
361 def join(self, path):
361 def join(self, path):
362 if path:
362 if path:
363 return os.path.join(self.base, path)
363 return os.path.join(self.base, path)
364 else:
364 else:
365 return self.base
365 return self.base
366
366
367 opener = vfs
367 opener = vfs
368
368
369 class auditvfs(object):
369 class auditvfs(object):
370 def __init__(self, vfs):
370 def __init__(self, vfs):
371 self.vfs = vfs
371 self.vfs = vfs
372
372
373 def _getmustaudit(self):
373 def _getmustaudit(self):
374 return self.vfs.mustaudit
374 return self.vfs.mustaudit
375
375
376 def _setmustaudit(self, onoff):
376 def _setmustaudit(self, onoff):
377 self.vfs.mustaudit = onoff
377 self.vfs.mustaudit = onoff
378
378
379 mustaudit = property(_getmustaudit, _setmustaudit)
379 mustaudit = property(_getmustaudit, _setmustaudit)
380
380
381 class filtervfs(abstractvfs, auditvfs):
381 class filtervfs(abstractvfs, auditvfs):
382 '''Wrapper vfs for filtering filenames with a function.'''
382 '''Wrapper vfs for filtering filenames with a function.'''
383
383
384 def __init__(self, vfs, filter):
384 def __init__(self, vfs, filter):
385 auditvfs.__init__(self, vfs)
385 auditvfs.__init__(self, vfs)
386 self._filter = filter
386 self._filter = filter
387
387
388 def __call__(self, path, *args, **kwargs):
388 def __call__(self, path, *args, **kwargs):
389 return self.vfs(self._filter(path), *args, **kwargs)
389 return self.vfs(self._filter(path), *args, **kwargs)
390
390
391 def join(self, path):
391 def join(self, path):
392 if path:
392 if path:
393 return self.vfs.join(self._filter(path))
393 return self.vfs.join(self._filter(path))
394 else:
394 else:
395 return self.vfs.join(path)
395 return self.vfs.join(path)
396
396
397 filteropener = filtervfs
397 filteropener = filtervfs
398
398
399 class readonlyvfs(abstractvfs, auditvfs):
399 class readonlyvfs(abstractvfs, auditvfs):
400 '''Wrapper vfs preventing any writing.'''
400 '''Wrapper vfs preventing any writing.'''
401
401
402 def __init__(self, vfs):
402 def __init__(self, vfs):
403 auditvfs.__init__(self, vfs)
403 auditvfs.__init__(self, vfs)
404
404
405 def __call__(self, path, mode='r', *args, **kw):
405 def __call__(self, path, mode='r', *args, **kw):
406 if mode not in ('r', 'rb'):
406 if mode not in ('r', 'rb'):
407 raise util.Abort('this vfs is read only')
407 raise util.Abort('this vfs is read only')
408 return self.vfs(path, mode, *args, **kw)
408 return self.vfs(path, mode, *args, **kw)
409
409
410
410
411 def canonpath(root, cwd, myname, auditor=None):
411 def canonpath(root, cwd, myname, auditor=None):
412 '''return the canonical path of myname, given cwd and root'''
412 '''return the canonical path of myname, given cwd and root'''
413 if util.endswithsep(root):
413 if util.endswithsep(root):
414 rootsep = root
414 rootsep = root
415 else:
415 else:
416 rootsep = root + os.sep
416 rootsep = root + os.sep
417 name = myname
417 name = myname
418 if not os.path.isabs(name):
418 if not os.path.isabs(name):
419 name = os.path.join(root, cwd, name)
419 name = os.path.join(root, cwd, name)
420 name = os.path.normpath(name)
420 name = os.path.normpath(name)
421 if auditor is None:
421 if auditor is None:
422 auditor = pathauditor(root)
422 auditor = pathauditor(root)
423 if name != rootsep and name.startswith(rootsep):
423 if name != rootsep and name.startswith(rootsep):
424 name = name[len(rootsep):]
424 name = name[len(rootsep):]
425 auditor(name)
425 auditor(name)
426 return util.pconvert(name)
426 return util.pconvert(name)
427 elif name == root:
427 elif name == root:
428 return ''
428 return ''
429 else:
429 else:
430 # Determine whether `name' is in the hierarchy at or beneath `root',
430 # Determine whether `name' is in the hierarchy at or beneath `root',
431 # by iterating name=dirname(name) until that causes no change (can't
431 # by iterating name=dirname(name) until that causes no change (can't
432 # check name == '/', because that doesn't work on windows). The list
432 # check name == '/', because that doesn't work on windows). The list
433 # `rel' holds the reversed list of components making up the relative
433 # `rel' holds the reversed list of components making up the relative
434 # file name we want.
434 # file name we want.
435 rel = []
435 rel = []
436 while True:
436 while True:
437 try:
437 try:
438 s = util.samefile(name, root)
438 s = util.samefile(name, root)
439 except OSError:
439 except OSError:
440 s = False
440 s = False
441 if s:
441 if s:
442 if not rel:
442 if not rel:
443 # name was actually the same as root (maybe a symlink)
443 # name was actually the same as root (maybe a symlink)
444 return ''
444 return ''
445 rel.reverse()
445 rel.reverse()
446 name = os.path.join(*rel)
446 name = os.path.join(*rel)
447 auditor(name)
447 auditor(name)
448 return util.pconvert(name)
448 return util.pconvert(name)
449 dirname, basename = util.split(name)
449 dirname, basename = util.split(name)
450 rel.append(basename)
450 rel.append(basename)
451 if dirname == name:
451 if dirname == name:
452 break
452 break
453 name = dirname
453 name = dirname
454
454
455 raise util.Abort(_("%s not under root '%s'") % (myname, root))
455 raise util.Abort(_("%s not under root '%s'") % (myname, root))
456
456
457 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
457 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
458 '''yield every hg repository under path, always recursively.
458 '''yield every hg repository under path, always recursively.
459 The recurse flag will only control recursion into repo working dirs'''
459 The recurse flag will only control recursion into repo working dirs'''
460 def errhandler(err):
460 def errhandler(err):
461 if err.filename == path:
461 if err.filename == path:
462 raise err
462 raise err
463 samestat = getattr(os.path, 'samestat', None)
463 samestat = getattr(os.path, 'samestat', None)
464 if followsym and samestat is not None:
464 if followsym and samestat is not None:
465 def adddir(dirlst, dirname):
465 def adddir(dirlst, dirname):
466 match = False
466 match = False
467 dirstat = os.stat(dirname)
467 dirstat = os.stat(dirname)
468 for lstdirstat in dirlst:
468 for lstdirstat in dirlst:
469 if samestat(dirstat, lstdirstat):
469 if samestat(dirstat, lstdirstat):
470 match = True
470 match = True
471 break
471 break
472 if not match:
472 if not match:
473 dirlst.append(dirstat)
473 dirlst.append(dirstat)
474 return not match
474 return not match
475 else:
475 else:
476 followsym = False
476 followsym = False
477
477
478 if (seen_dirs is None) and followsym:
478 if (seen_dirs is None) and followsym:
479 seen_dirs = []
479 seen_dirs = []
480 adddir(seen_dirs, path)
480 adddir(seen_dirs, path)
481 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
481 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
482 dirs.sort()
482 dirs.sort()
483 if '.hg' in dirs:
483 if '.hg' in dirs:
484 yield root # found a repository
484 yield root # found a repository
485 qroot = os.path.join(root, '.hg', 'patches')
485 qroot = os.path.join(root, '.hg', 'patches')
486 if os.path.isdir(os.path.join(qroot, '.hg')):
486 if os.path.isdir(os.path.join(qroot, '.hg')):
487 yield qroot # we have a patch queue repo here
487 yield qroot # we have a patch queue repo here
488 if recurse:
488 if recurse:
489 # avoid recursing inside the .hg directory
489 # avoid recursing inside the .hg directory
490 dirs.remove('.hg')
490 dirs.remove('.hg')
491 else:
491 else:
492 dirs[:] = [] # don't descend further
492 dirs[:] = [] # don't descend further
493 elif followsym:
493 elif followsym:
494 newdirs = []
494 newdirs = []
495 for d in dirs:
495 for d in dirs:
496 fname = os.path.join(root, d)
496 fname = os.path.join(root, d)
497 if adddir(seen_dirs, fname):
497 if adddir(seen_dirs, fname):
498 if os.path.islink(fname):
498 if os.path.islink(fname):
499 for hgname in walkrepos(fname, True, seen_dirs):
499 for hgname in walkrepos(fname, True, seen_dirs):
500 yield hgname
500 yield hgname
501 else:
501 else:
502 newdirs.append(d)
502 newdirs.append(d)
503 dirs[:] = newdirs
503 dirs[:] = newdirs
504
504
505 def osrcpath():
505 def osrcpath():
506 '''return default os-specific hgrc search path'''
506 '''return default os-specific hgrc search path'''
507 path = systemrcpath()
507 path = systemrcpath()
508 path.extend(userrcpath())
508 path.extend(userrcpath())
509 path = [os.path.normpath(f) for f in path]
509 path = [os.path.normpath(f) for f in path]
510 return path
510 return path
511
511
512 _rcpath = None
512 _rcpath = None
513
513
514 def rcpath():
514 def rcpath():
515 '''return hgrc search path. if env var HGRCPATH is set, use it.
515 '''return hgrc search path. if env var HGRCPATH is set, use it.
516 for each item in path, if directory, use files ending in .rc,
516 for each item in path, if directory, use files ending in .rc,
517 else use item.
517 else use item.
518 make HGRCPATH empty to only look in .hg/hgrc of current repo.
518 make HGRCPATH empty to only look in .hg/hgrc of current repo.
519 if no HGRCPATH, use default os-specific path.'''
519 if no HGRCPATH, use default os-specific path.'''
520 global _rcpath
520 global _rcpath
521 if _rcpath is None:
521 if _rcpath is None:
522 if 'HGRCPATH' in os.environ:
522 if 'HGRCPATH' in os.environ:
523 _rcpath = []
523 _rcpath = []
524 for p in os.environ['HGRCPATH'].split(os.pathsep):
524 for p in os.environ['HGRCPATH'].split(os.pathsep):
525 if not p:
525 if not p:
526 continue
526 continue
527 p = util.expandpath(p)
527 p = util.expandpath(p)
528 if os.path.isdir(p):
528 if os.path.isdir(p):
529 for f, kind in osutil.listdir(p):
529 for f, kind in osutil.listdir(p):
530 if f.endswith('.rc'):
530 if f.endswith('.rc'):
531 _rcpath.append(os.path.join(p, f))
531 _rcpath.append(os.path.join(p, f))
532 else:
532 else:
533 _rcpath.append(p)
533 _rcpath.append(p)
534 else:
534 else:
535 _rcpath = osrcpath()
535 _rcpath = osrcpath()
536 return _rcpath
536 return _rcpath
537
537
538 if os.name != 'nt':
538 if os.name != 'nt':
539
539
540 def rcfiles(path):
540 def rcfiles(path):
541 rcs = [os.path.join(path, 'hgrc')]
541 rcs = [os.path.join(path, 'hgrc')]
542 rcdir = os.path.join(path, 'hgrc.d')
542 rcdir = os.path.join(path, 'hgrc.d')
543 try:
543 try:
544 rcs.extend([os.path.join(rcdir, f)
544 rcs.extend([os.path.join(rcdir, f)
545 for f, kind in osutil.listdir(rcdir)
545 for f, kind in osutil.listdir(rcdir)
546 if f.endswith(".rc")])
546 if f.endswith(".rc")])
547 except OSError:
547 except OSError:
548 pass
548 pass
549 return rcs
549 return rcs
550
550
551 def systemrcpath():
551 def systemrcpath():
552 path = []
552 path = []
553 if sys.platform == 'plan9':
553 if sys.platform == 'plan9':
554 root = 'lib/mercurial'
554 root = 'lib/mercurial'
555 else:
555 else:
556 root = 'etc/mercurial'
556 root = 'etc/mercurial'
557 # old mod_python does not set sys.argv
557 # old mod_python does not set sys.argv
558 if len(getattr(sys, 'argv', [])) > 0:
558 if len(getattr(sys, 'argv', [])) > 0:
559 p = os.path.dirname(os.path.dirname(sys.argv[0]))
559 p = os.path.dirname(os.path.dirname(sys.argv[0]))
560 path.extend(rcfiles(os.path.join(p, root)))
560 path.extend(rcfiles(os.path.join(p, root)))
561 path.extend(rcfiles('/' + root))
561 path.extend(rcfiles('/' + root))
562 return path
562 return path
563
563
564 def userrcpath():
564 def userrcpath():
565 if sys.platform == 'plan9':
565 if sys.platform == 'plan9':
566 return [os.environ['home'] + '/lib/hgrc']
566 return [os.environ['home'] + '/lib/hgrc']
567 else:
567 else:
568 return [os.path.expanduser('~/.hgrc')]
568 return [os.path.expanduser('~/.hgrc')]
569
569
570 else:
570 else:
571
571
572 import _winreg
572 import _winreg
573
573
574 def systemrcpath():
574 def systemrcpath():
575 '''return default os-specific hgrc search path'''
575 '''return default os-specific hgrc search path'''
576 rcpath = []
576 rcpath = []
577 filename = util.executablepath()
577 filename = util.executablepath()
578 # Use mercurial.ini found in directory with hg.exe
578 # Use mercurial.ini found in directory with hg.exe
579 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
579 progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini')
580 if os.path.isfile(progrc):
580 if os.path.isfile(progrc):
581 rcpath.append(progrc)
581 rcpath.append(progrc)
582 return rcpath
582 return rcpath
583 # Use hgrc.d found in directory with hg.exe
583 # Use hgrc.d found in directory with hg.exe
584 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
584 progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d')
585 if os.path.isdir(progrcd):
585 if os.path.isdir(progrcd):
586 for f, kind in osutil.listdir(progrcd):
586 for f, kind in osutil.listdir(progrcd):
587 if f.endswith('.rc'):
587 if f.endswith('.rc'):
588 rcpath.append(os.path.join(progrcd, f))
588 rcpath.append(os.path.join(progrcd, f))
589 return rcpath
589 return rcpath
590 # else look for a system rcpath in the registry
590 # else look for a system rcpath in the registry
591 value = util.lookupreg('SOFTWARE\\Mercurial', None,
591 value = util.lookupreg('SOFTWARE\\Mercurial', None,
592 _winreg.HKEY_LOCAL_MACHINE)
592 _winreg.HKEY_LOCAL_MACHINE)
593 if not isinstance(value, str) or not value:
593 if not isinstance(value, str) or not value:
594 return rcpath
594 return rcpath
595 value = util.localpath(value)
595 value = util.localpath(value)
596 for p in value.split(os.pathsep):
596 for p in value.split(os.pathsep):
597 if p.lower().endswith('mercurial.ini'):
597 if p.lower().endswith('mercurial.ini'):
598 rcpath.append(p)
598 rcpath.append(p)
599 elif os.path.isdir(p):
599 elif os.path.isdir(p):
600 for f, kind in osutil.listdir(p):
600 for f, kind in osutil.listdir(p):
601 if f.endswith('.rc'):
601 if f.endswith('.rc'):
602 rcpath.append(os.path.join(p, f))
602 rcpath.append(os.path.join(p, f))
603 return rcpath
603 return rcpath
604
604
605 def userrcpath():
605 def userrcpath():
606 '''return os-specific hgrc search path to the user dir'''
606 '''return os-specific hgrc search path to the user dir'''
607 home = os.path.expanduser('~')
607 home = os.path.expanduser('~')
608 path = [os.path.join(home, 'mercurial.ini'),
608 path = [os.path.join(home, 'mercurial.ini'),
609 os.path.join(home, '.hgrc')]
609 os.path.join(home, '.hgrc')]
610 userprofile = os.environ.get('USERPROFILE')
610 userprofile = os.environ.get('USERPROFILE')
611 if userprofile:
611 if userprofile:
612 path.append(os.path.join(userprofile, 'mercurial.ini'))
612 path.append(os.path.join(userprofile, 'mercurial.ini'))
613 path.append(os.path.join(userprofile, '.hgrc'))
613 path.append(os.path.join(userprofile, '.hgrc'))
614 return path
614 return path
615
615
616 def revsingle(repo, revspec, default='.'):
616 def revsingle(repo, revspec, default='.'):
617 if not revspec:
617 if not revspec:
618 return repo[default]
618 return repo[default]
619
619
620 l = revrange(repo, [revspec])
620 l = revrange(repo, [revspec])
621 if len(l) < 1:
621 if len(l) < 1:
622 raise util.Abort(_('empty revision set'))
622 raise util.Abort(_('empty revision set'))
623 return repo[l[-1]]
623 return repo[l[-1]]
624
624
625 def revpair(repo, revs):
625 def revpair(repo, revs):
626 if not revs:
626 if not revs:
627 return repo.dirstate.p1(), None
627 return repo.dirstate.p1(), None
628
628
629 l = revrange(repo, revs)
629 l = revrange(repo, revs)
630
630
631 if len(l) == 0:
631 if len(l) == 0:
632 if revs:
632 if revs:
633 raise util.Abort(_('empty revision range'))
633 raise util.Abort(_('empty revision range'))
634 return repo.dirstate.p1(), None
634 return repo.dirstate.p1(), None
635
635
636 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
636 if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
637 return repo.lookup(l[0]), None
637 return repo.lookup(l[0]), None
638
638
639 return repo.lookup(l[0]), repo.lookup(l[-1])
639 return repo.lookup(l[0]), repo.lookup(l[-1])
640
640
641 _revrangesep = ':'
641 _revrangesep = ':'
642
642
643 def revrange(repo, revs):
643 def revrange(repo, revs):
644 """Yield revision as strings from a list of revision specifications."""
644 """Yield revision as strings from a list of revision specifications."""
645
645
646 def revfix(repo, val, defval):
646 def revfix(repo, val, defval):
647 if not val and val != 0 and defval is not None:
647 if not val and val != 0 and defval is not None:
648 return defval
648 return defval
649 return repo[val].rev()
649 return repo[val].rev()
650
650
651 seen, l = set(), []
651 seen, l = set(), []
652 for spec in revs:
652 for spec in revs:
653 if l and not seen:
653 if l and not seen:
654 seen = set(l)
654 seen = set(l)
655 # attempt to parse old-style ranges first to deal with
655 # attempt to parse old-style ranges first to deal with
656 # things like old-tag which contain query metacharacters
656 # things like old-tag which contain query metacharacters
657 try:
657 try:
658 if isinstance(spec, int):
658 if isinstance(spec, int):
659 seen.add(spec)
659 seen.add(spec)
660 l.append(spec)
660 l.append(spec)
661 continue
661 continue
662
662
663 if _revrangesep in spec:
663 if _revrangesep in spec:
664 start, end = spec.split(_revrangesep, 1)
664 start, end = spec.split(_revrangesep, 1)
665 start = revfix(repo, start, 0)
665 start = revfix(repo, start, 0)
666 end = revfix(repo, end, len(repo) - 1)
666 end = revfix(repo, end, len(repo) - 1)
667 if end == nullrev and start <= 0:
667 if end == nullrev and start <= 0:
668 start = nullrev
668 start = nullrev
669 rangeiter = repo.changelog.revs(start, end)
669 rangeiter = repo.changelog.revs(start, end)
670 if not seen and not l:
670 if not seen and not l:
671 # by far the most common case: revs = ["-1:0"]
671 # by far the most common case: revs = ["-1:0"]
672 l = list(rangeiter)
672 l = list(rangeiter)
673 # defer syncing seen until next iteration
673 # defer syncing seen until next iteration
674 continue
674 continue
675 newrevs = set(rangeiter)
675 newrevs = set(rangeiter)
676 if seen:
676 if seen:
677 newrevs.difference_update(seen)
677 newrevs.difference_update(seen)
678 seen.update(newrevs)
678 seen.update(newrevs)
679 else:
679 else:
680 seen = newrevs
680 seen = newrevs
681 l.extend(sorted(newrevs, reverse=start > end))
681 l.extend(sorted(newrevs, reverse=start > end))
682 continue
682 continue
683 elif spec and spec in repo: # single unquoted rev
683 elif spec and spec in repo: # single unquoted rev
684 rev = revfix(repo, spec, None)
684 rev = revfix(repo, spec, None)
685 if rev in seen:
685 if rev in seen:
686 continue
686 continue
687 seen.add(rev)
687 seen.add(rev)
688 l.append(rev)
688 l.append(rev)
689 continue
689 continue
690 except error.RepoLookupError:
690 except error.RepoLookupError:
691 pass
691 pass
692
692
693 # fall through to new-style queries if old-style fails
693 # fall through to new-style queries if old-style fails
694 m = revset.match(repo.ui, spec)
694 m = revset.match(repo.ui, spec)
695 dl = [r for r in m(repo, list(repo)) if r not in seen]
695 dl = [r for r in m(repo, list(repo)) if r not in seen]
696 l.extend(dl)
696 l.extend(dl)
697 seen.update(dl)
697 seen.update(dl)
698
698
699 return l
699 return l
700
700
701 def expandpats(pats):
701 def expandpats(pats):
702 if not util.expandglobs:
702 if not util.expandglobs:
703 return list(pats)
703 return list(pats)
704 ret = []
704 ret = []
705 for p in pats:
705 for p in pats:
706 kind, name = matchmod._patsplit(p, None)
706 kind, name = matchmod._patsplit(p, None)
707 if kind is None:
707 if kind is None:
708 try:
708 try:
709 globbed = glob.glob(name)
709 globbed = glob.glob(name)
710 except re.error:
710 except re.error:
711 globbed = [name]
711 globbed = [name]
712 if globbed:
712 if globbed:
713 ret.extend(globbed)
713 ret.extend(globbed)
714 continue
714 continue
715 ret.append(p)
715 ret.append(p)
716 return ret
716 return ret
717
717
718 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
718 def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
719 if pats == ("",):
719 if pats == ("",):
720 pats = []
720 pats = []
721 if not globbed and default == 'relpath':
721 if not globbed and default == 'relpath':
722 pats = expandpats(pats or [])
722 pats = expandpats(pats or [])
723
723
724 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
724 m = ctx.match(pats, opts.get('include'), opts.get('exclude'),
725 default)
725 default)
726 def badfn(f, msg):
726 def badfn(f, msg):
727 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
727 ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
728 m.bad = badfn
728 m.bad = badfn
729 return m, pats
729 return m, pats
730
730
731 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
731 def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
732 return matchandpats(ctx, pats, opts, globbed, default)[0]
732 return matchandpats(ctx, pats, opts, globbed, default)[0]
733
733
734 def matchall(repo):
734 def matchall(repo):
735 return matchmod.always(repo.root, repo.getcwd())
735 return matchmod.always(repo.root, repo.getcwd())
736
736
737 def matchfiles(repo, files):
737 def matchfiles(repo, files):
738 return matchmod.exact(repo.root, repo.getcwd(), files)
738 return matchmod.exact(repo.root, repo.getcwd(), files)
739
739
740 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
740 def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
741 if dry_run is None:
741 if dry_run is None:
742 dry_run = opts.get('dry_run')
742 dry_run = opts.get('dry_run')
743 if similarity is None:
743 if similarity is None:
744 similarity = float(opts.get('similarity') or 0)
744 similarity = float(opts.get('similarity') or 0)
745 # we'd use status here, except handling of symlinks and ignore is tricky
745 # we'd use status here, except handling of symlinks and ignore is tricky
746 added, unknown, deleted, removed = [], [], [], []
746 added, unknown, deleted, removed = [], [], [], []
747 audit_path = pathauditor(repo.root)
747 audit_path = pathauditor(repo.root)
748 m = match(repo[None], pats, opts)
748 m = match(repo[None], pats, opts)
749 rejected = []
749 rejected = []
750 m.bad = lambda x, y: rejected.append(x)
750 m.bad = lambda x, y: rejected.append(x)
751
751
752 ctx = repo[None]
752 ctx = repo[None]
753 walkresults = repo.dirstate.walk(m, sorted(ctx.substate), True, False)
753 walkresults = repo.dirstate.walk(m, sorted(ctx.substate), True, False)
754 for abs in sorted(walkresults):
754 for abs in sorted(walkresults):
755 st = walkresults[abs]
755 st = walkresults[abs]
756 dstate = repo.dirstate[abs]
756 dstate = repo.dirstate[abs]
757 if dstate == '?' and audit_path.check(abs):
757 if dstate == '?' and audit_path.check(abs):
758 unknown.append(abs)
758 unknown.append(abs)
759 if repo.ui.verbose or not m.exact(abs):
759 if repo.ui.verbose or not m.exact(abs):
760 rel = m.rel(abs)
760 rel = m.rel(abs)
761 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
761 repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
762 elif (dstate != 'r' and (not st or
762 elif (dstate != 'r' and (not st or
763 (stat.S_ISDIR(st.st_mode) and not stat.S_ISLNK(st.st_mode)))):
763 (stat.S_ISDIR(st.st_mode) and not stat.S_ISLNK(st.st_mode)))):
764 deleted.append(abs)
764 deleted.append(abs)
765 if repo.ui.verbose or not m.exact(abs):
765 if repo.ui.verbose or not m.exact(abs):
766 rel = m.rel(abs)
766 rel = m.rel(abs)
767 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
767 repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
768 # for finding renames
768 # for finding renames
769 elif dstate == 'r':
769 elif dstate == 'r':
770 removed.append(abs)
770 removed.append(abs)
771 elif dstate == 'a':
771 elif dstate == 'a':
772 added.append(abs)
772 added.append(abs)
773 copies = {}
773 copies = {}
774 if similarity > 0:
774 if similarity > 0:
775 for old, new, score in similar.findrenames(repo,
775 for old, new, score in similar.findrenames(repo,
776 added + unknown, removed + deleted, similarity):
776 added + unknown, removed + deleted, similarity):
777 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
777 if repo.ui.verbose or not m.exact(old) or not m.exact(new):
778 repo.ui.status(_('recording removal of %s as rename to %s '
778 repo.ui.status(_('recording removal of %s as rename to %s '
779 '(%d%% similar)\n') %
779 '(%d%% similar)\n') %
780 (m.rel(old), m.rel(new), score * 100))
780 (m.rel(old), m.rel(new), score * 100))
781 copies[new] = old
781 copies[new] = old
782
782
783 if not dry_run:
783 if not dry_run:
784 wctx = repo[None]
784 wctx = repo[None]
785 wlock = repo.wlock()
785 wlock = repo.wlock()
786 try:
786 try:
787 wctx.forget(deleted)
787 wctx.forget(deleted)
788 wctx.add(unknown)
788 wctx.add(unknown)
789 for new, old in copies.iteritems():
789 for new, old in copies.iteritems():
790 wctx.copy(old, new)
790 wctx.copy(old, new)
791 finally:
791 finally:
792 wlock.release()
792 wlock.release()
793
793
794 for f in rejected:
794 for f in rejected:
795 if f in m.files():
795 if f in m.files():
796 return 1
796 return 1
797 return 0
797 return 0
798
798
799 def updatedir(ui, repo, patches, similarity=0):
799 def updatedir(ui, repo, patches, similarity=0):
800 '''Update dirstate after patch application according to metadata'''
800 '''Update dirstate after patch application according to metadata'''
801 if not patches:
801 if not patches:
802 return []
802 return []
803 copies = []
803 copies = []
804 removes = set()
804 removes = set()
805 cfiles = patches.keys()
805 cfiles = patches.keys()
806 cwd = repo.getcwd()
806 cwd = repo.getcwd()
807 if cwd:
807 if cwd:
808 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
808 cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()]
809 for f in patches:
809 for f in patches:
810 gp = patches[f]
810 gp = patches[f]
811 if not gp:
811 if not gp:
812 continue
812 continue
813 if gp.op == 'RENAME':
813 if gp.op == 'RENAME':
814 copies.append((gp.oldpath, gp.path))
814 copies.append((gp.oldpath, gp.path))
815 removes.add(gp.oldpath)
815 removes.add(gp.oldpath)
816 elif gp.op == 'COPY':
816 elif gp.op == 'COPY':
817 copies.append((gp.oldpath, gp.path))
817 copies.append((gp.oldpath, gp.path))
818 elif gp.op == 'DELETE':
818 elif gp.op == 'DELETE':
819 removes.add(gp.path)
819 removes.add(gp.path)
820
820
821 wctx = repo[None]
821 wctx = repo[None]
822 for src, dst in copies:
822 for src, dst in copies:
823 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
823 dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd)
824 if (not similarity) and removes:
824 if (not similarity) and removes:
825 wctx.remove(sorted(removes), True)
825 wctx.remove(sorted(removes), True)
826
826
827 for f in patches:
827 for f in patches:
828 gp = patches[f]
828 gp = patches[f]
829 if gp and gp.mode:
829 if gp and gp.mode:
830 islink, isexec = gp.mode
830 islink, isexec = gp.mode
831 dst = repo.wjoin(gp.path)
831 dst = repo.wjoin(gp.path)
832 # patch won't create empty files
832 # patch won't create empty files
833 if gp.op == 'ADD' and not os.path.lexists(dst):
833 if gp.op == 'ADD' and not os.path.lexists(dst):
834 flags = (isexec and 'x' or '') + (islink and 'l' or '')
834 flags = (isexec and 'x' or '') + (islink and 'l' or '')
835 repo.wwrite(gp.path, '', flags)
835 repo.wwrite(gp.path, '', flags)
836 util.setflags(dst, islink, isexec)
836 util.setflags(dst, islink, isexec)
837 addremove(repo, cfiles, similarity=similarity)
837 addremove(repo, cfiles, similarity=similarity)
838 files = patches.keys()
838 files = patches.keys()
839 files.extend([r for r in removes if r not in files])
839 files.extend([r for r in removes if r not in files])
840 return sorted(files)
840 return sorted(files)
841
841
842 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
842 def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None):
843 """Update the dirstate to reflect the intent of copying src to dst. For
843 """Update the dirstate to reflect the intent of copying src to dst. For
844 different reasons it might not end with dst being marked as copied from src.
844 different reasons it might not end with dst being marked as copied from src.
845 """
845 """
846 origsrc = repo.dirstate.copied(src) or src
846 origsrc = repo.dirstate.copied(src) or src
847 if dst == origsrc: # copying back a copy?
847 if dst == origsrc: # copying back a copy?
848 if repo.dirstate[dst] not in 'mn' and not dryrun:
848 if repo.dirstate[dst] not in 'mn' and not dryrun:
849 repo.dirstate.normallookup(dst)
849 repo.dirstate.normallookup(dst)
850 else:
850 else:
851 if repo.dirstate[origsrc] == 'a' and origsrc == src:
851 if repo.dirstate[origsrc] == 'a' and origsrc == src:
852 if not ui.quiet:
852 if not ui.quiet:
853 ui.warn(_("%s has not been committed yet, so no copy "
853 ui.warn(_("%s has not been committed yet, so no copy "
854 "data will be stored for %s.\n")
854 "data will be stored for %s.\n")
855 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
855 % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd)))
856 if repo.dirstate[dst] in '?r' and not dryrun:
856 if repo.dirstate[dst] in '?r' and not dryrun:
857 wctx.add([dst])
857 wctx.add([dst])
858 elif not dryrun:
858 elif not dryrun:
859 wctx.copy(origsrc, dst)
859 wctx.copy(origsrc, dst)
860
860
861 def readrequires(opener, supported):
861 def readrequires(opener, supported):
862 '''Reads and parses .hg/requires and checks if all entries found
862 '''Reads and parses .hg/requires and checks if all entries found
863 are in the list of supported features.'''
863 are in the list of supported features.'''
864 requirements = set(opener.read("requires").splitlines())
864 requirements = set(opener.read("requires").splitlines())
865 missings = []
865 missings = []
866 for r in requirements:
866 for r in requirements:
867 if r not in supported:
867 if r not in supported:
868 if not r or not r[0].isalnum():
868 if not r or not r[0].isalnum():
869 raise error.RequirementError(_(".hg/requires file is corrupt"))
869 raise error.RequirementError(_(".hg/requires file is corrupt"))
870 missings.append(r)
870 missings.append(r)
871 missings.sort()
871 missings.sort()
872 if missings:
872 if missings:
873 raise error.RequirementError(
873 raise error.RequirementError(
874 _("unknown repository format: requires features '%s' (upgrade "
874 _("unknown repository format: requires features '%s' (upgrade "
875 "Mercurial)") % "', '".join(missings))
875 "Mercurial)") % "', '".join(missings))
876 return requirements
876 return requirements
877
877
878 class filecacheentry(object):
878 class filecacheentry(object):
879 def __init__(self, path, stat=True):
879 def __init__(self, path, stat=True):
880 self.path = path
880 self.path = path
881 self.cachestat = None
881 self.cachestat = None
882 self._cacheable = None
882 self._cacheable = None
883
883
884 if stat:
884 if stat:
885 self.cachestat = filecacheentry.stat(self.path)
885 self.cachestat = filecacheentry.stat(self.path)
886
886
887 if self.cachestat:
887 if self.cachestat:
888 self._cacheable = self.cachestat.cacheable()
888 self._cacheable = self.cachestat.cacheable()
889 else:
889 else:
890 # None means we don't know yet
890 # None means we don't know yet
891 self._cacheable = None
891 self._cacheable = None
892
892
893 def refresh(self):
893 def refresh(self):
894 if self.cacheable():
894 if self.cacheable():
895 self.cachestat = filecacheentry.stat(self.path)
895 self.cachestat = filecacheentry.stat(self.path)
896
896
897 def cacheable(self):
897 def cacheable(self):
898 if self._cacheable is not None:
898 if self._cacheable is not None:
899 return self._cacheable
899 return self._cacheable
900
900
901 # we don't know yet, assume it is for now
901 # we don't know yet, assume it is for now
902 return True
902 return True
903
903
904 def changed(self):
904 def changed(self):
905 # no point in going further if we can't cache it
905 # no point in going further if we can't cache it
906 if not self.cacheable():
906 if not self.cacheable():
907 return True
907 return True
908
908
909 newstat = filecacheentry.stat(self.path)
909 newstat = filecacheentry.stat(self.path)
910
910
911 # we may not know if it's cacheable yet, check again now
911 # we may not know if it's cacheable yet, check again now
912 if newstat and self._cacheable is None:
912 if newstat and self._cacheable is None:
913 self._cacheable = newstat.cacheable()
913 self._cacheable = newstat.cacheable()
914
914
915 # check again
915 # check again
916 if not self._cacheable:
916 if not self._cacheable:
917 return True
917 return True
918
918
919 if self.cachestat != newstat:
919 if self.cachestat != newstat:
920 self.cachestat = newstat
920 self.cachestat = newstat
921 return True
921 return True
922 else:
922 else:
923 return False
923 return False
924
924
925 @staticmethod
925 @staticmethod
926 def stat(path):
926 def stat(path):
927 try:
927 try:
928 return util.cachestat(path)
928 return util.cachestat(path)
929 except OSError, e:
929 except OSError, e:
930 if e.errno != errno.ENOENT:
930 if e.errno != errno.ENOENT:
931 raise
931 raise
932
932
933 class filecache(object):
933 class filecache(object):
934 '''A property like decorator that tracks a file under .hg/ for updates.
934 '''A property like decorator that tracks a file under .hg/ for updates.
935
935
936 Records stat info when called in _filecache.
936 Records stat info when called in _filecache.
937
937
938 On subsequent calls, compares old stat info with new info, and recreates
938 On subsequent calls, compares old stat info with new info, and recreates
939 the object when needed, updating the new stat info in _filecache.
939 the object when needed, updating the new stat info in _filecache.
940
940
941 Mercurial either atomic renames or appends for files under .hg,
941 Mercurial either atomic renames or appends for files under .hg,
942 so to ensure the cache is reliable we need the filesystem to be able
942 so to ensure the cache is reliable we need the filesystem to be able
943 to tell us if a file has been replaced. If it can't, we fallback to
943 to tell us if a file has been replaced. If it can't, we fallback to
944 recreating the object on every call (essentially the same behaviour as
944 recreating the object on every call (essentially the same behaviour as
945 propertycache).'''
945 propertycache).'''
946 def __init__(self, path):
946 def __init__(self, path):
947 self.path = path
947 self.path = path
948
948
949 def join(self, obj, fname):
949 def join(self, obj, fname):
950 """Used to compute the runtime path of the cached file.
950 """Used to compute the runtime path of the cached file.
951
951
952 Users should subclass filecache and provide their own version of this
952 Users should subclass filecache and provide their own version of this
953 function to call the appropriate join function on 'obj' (an instance
953 function to call the appropriate join function on 'obj' (an instance
954 of the class that its member function was decorated).
954 of the class that its member function was decorated).
955 """
955 """
956 return obj.join(fname)
956 return obj.join(fname)
957
957
958 def __call__(self, func):
958 def __call__(self, func):
959 self.func = func
959 self.func = func
960 self.name = func.__name__
960 self.name = func.__name__
961 return self
961 return self
962
962
963 def __get__(self, obj, type=None):
963 def __get__(self, obj, type=None):
964 # do we need to check if the file changed?
964 # do we need to check if the file changed?
965 if self.name in obj.__dict__:
965 if self.name in obj.__dict__:
966 assert self.name in obj._filecache, self.name
966 assert self.name in obj._filecache, self.name
967 return obj.__dict__[self.name]
967 return obj.__dict__[self.name]
968
968
969 entry = obj._filecache.get(self.name)
969 entry = obj._filecache.get(self.name)
970
970
971 if entry:
971 if entry:
972 if entry.changed():
972 if entry.changed():
973 entry.obj = self.func(obj)
973 entry.obj = self.func(obj)
974 else:
974 else:
975 path = self.join(obj, self.path)
975 path = self.join(obj, self.path)
976
976
977 # We stat -before- creating the object so our cache doesn't lie if
977 # We stat -before- creating the object so our cache doesn't lie if
978 # a writer modified between the time we read and stat
978 # a writer modified between the time we read and stat
979 entry = filecacheentry(path)
979 entry = filecacheentry(path)
980 entry.obj = self.func(obj)
980 entry.obj = self.func(obj)
981
981
982 obj._filecache[self.name] = entry
982 obj._filecache[self.name] = entry
983
983
984 obj.__dict__[self.name] = entry.obj
984 obj.__dict__[self.name] = entry.obj
985 return entry.obj
985 return entry.obj
986
986
987 def __set__(self, obj, value):
987 def __set__(self, obj, value):
988 if self.name not in obj._filecache:
988 if self.name not in obj._filecache:
989 # we add an entry for the missing value because X in __dict__
989 # we add an entry for the missing value because X in __dict__
990 # implies X in _filecache
990 # implies X in _filecache
991 ce = filecacheentry(self.join(obj, self.path), False)
991 ce = filecacheentry(self.join(obj, self.path), False)
992 obj._filecache[self.name] = ce
992 obj._filecache[self.name] = ce
993 else:
993 else:
994 ce = obj._filecache[self.name]
994 ce = obj._filecache[self.name]
995
995
996 ce.obj = value # update cached copy
996 ce.obj = value # update cached copy
997 obj.__dict__[self.name] = value # update copy returned by obj.x
997 obj.__dict__[self.name] = value # update copy returned by obj.x
998
998
999 def __delete__(self, obj):
999 def __delete__(self, obj):
1000 try:
1000 try:
1001 del obj.__dict__[self.name]
1001 del obj.__dict__[self.name]
1002 except KeyError:
1002 except KeyError:
1003 raise AttributeError(self.name)
1003 raise AttributeError(self.name)
@@ -1,1854 +1,1864 b''
1 # util.py - Mercurial utility functions and platform specific implementations
1 # util.py - Mercurial utility functions and platform specific implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specific implementations.
10 """Mercurial utility functions and platform specific implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding, collections
17 import error, osutil, encoding, collections
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal
19 import os, time, datetime, calendar, textwrap, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 cachestat = platform.cachestat
27 cachestat = platform.cachestat
28 checkexec = platform.checkexec
28 checkexec = platform.checkexec
29 checklink = platform.checklink
29 checklink = platform.checklink
30 copymode = platform.copymode
30 copymode = platform.copymode
31 executablepath = platform.executablepath
31 executablepath = platform.executablepath
32 expandglobs = platform.expandglobs
32 expandglobs = platform.expandglobs
33 explainexit = platform.explainexit
33 explainexit = platform.explainexit
34 findexe = platform.findexe
34 findexe = platform.findexe
35 gethgcmd = platform.gethgcmd
35 gethgcmd = platform.gethgcmd
36 getuser = platform.getuser
36 getuser = platform.getuser
37 groupmembers = platform.groupmembers
37 groupmembers = platform.groupmembers
38 groupname = platform.groupname
38 groupname = platform.groupname
39 hidewindow = platform.hidewindow
39 hidewindow = platform.hidewindow
40 isexec = platform.isexec
40 isexec = platform.isexec
41 isowner = platform.isowner
41 isowner = platform.isowner
42 localpath = platform.localpath
42 localpath = platform.localpath
43 lookupreg = platform.lookupreg
43 lookupreg = platform.lookupreg
44 makedir = platform.makedir
44 makedir = platform.makedir
45 nlinks = platform.nlinks
45 nlinks = platform.nlinks
46 normpath = platform.normpath
46 normpath = platform.normpath
47 normcase = platform.normcase
47 normcase = platform.normcase
48 openhardlinks = platform.openhardlinks
48 openhardlinks = platform.openhardlinks
49 oslink = platform.oslink
49 oslink = platform.oslink
50 parsepatchoutput = platform.parsepatchoutput
50 parsepatchoutput = platform.parsepatchoutput
51 pconvert = platform.pconvert
51 pconvert = platform.pconvert
52 popen = platform.popen
52 popen = platform.popen
53 posixfile = platform.posixfile
53 posixfile = platform.posixfile
54 quotecommand = platform.quotecommand
54 quotecommand = platform.quotecommand
55 realpath = platform.realpath
55 realpath = platform.realpath
56 rename = platform.rename
56 rename = platform.rename
57 samedevice = platform.samedevice
57 samedevice = platform.samedevice
58 samefile = platform.samefile
58 samefile = platform.samefile
59 samestat = platform.samestat
59 samestat = platform.samestat
60 setbinary = platform.setbinary
60 setbinary = platform.setbinary
61 setflags = platform.setflags
61 setflags = platform.setflags
62 setsignalhandler = platform.setsignalhandler
62 setsignalhandler = platform.setsignalhandler
63 shellquote = platform.shellquote
63 shellquote = platform.shellquote
64 spawndetached = platform.spawndetached
64 spawndetached = platform.spawndetached
65 split = platform.split
65 split = platform.split
66 sshargs = platform.sshargs
66 sshargs = platform.sshargs
67 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
67 statfiles = getattr(osutil, 'statfiles', platform.statfiles)
68 termwidth = platform.termwidth
68 termwidth = platform.termwidth
69 testpid = platform.testpid
69 testpid = platform.testpid
70 umask = platform.umask
70 umask = platform.umask
71 unlink = platform.unlink
71 unlink = platform.unlink
72 unlinkpath = platform.unlinkpath
72 unlinkpath = platform.unlinkpath
73 username = platform.username
73 username = platform.username
74
74
75 # Python compatibility
75 # Python compatibility
76
76
77 _notset = object()
77 _notset = object()
78
78
79 def safehasattr(thing, attr):
79 def safehasattr(thing, attr):
80 return getattr(thing, attr, _notset) is not _notset
80 return getattr(thing, attr, _notset) is not _notset
81
81
82 def sha1(s=''):
82 def sha1(s=''):
83 '''
83 '''
84 Low-overhead wrapper around Python's SHA support
84 Low-overhead wrapper around Python's SHA support
85
85
86 >>> f = _fastsha1
86 >>> f = _fastsha1
87 >>> a = sha1()
87 >>> a = sha1()
88 >>> a = f()
88 >>> a = f()
89 >>> a.hexdigest()
89 >>> a.hexdigest()
90 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
90 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
91 '''
91 '''
92
92
93 return _fastsha1(s)
93 return _fastsha1(s)
94
94
95 def _fastsha1(s=''):
95 def _fastsha1(s=''):
96 # This function will import sha1 from hashlib or sha (whichever is
96 # This function will import sha1 from hashlib or sha (whichever is
97 # available) and overwrite itself with it on the first call.
97 # available) and overwrite itself with it on the first call.
98 # Subsequent calls will go directly to the imported function.
98 # Subsequent calls will go directly to the imported function.
99 if sys.version_info >= (2, 5):
99 if sys.version_info >= (2, 5):
100 from hashlib import sha1 as _sha1
100 from hashlib import sha1 as _sha1
101 else:
101 else:
102 from sha import sha as _sha1
102 from sha import sha as _sha1
103 global _fastsha1, sha1
103 global _fastsha1, sha1
104 _fastsha1 = sha1 = _sha1
104 _fastsha1 = sha1 = _sha1
105 return _sha1(s)
105 return _sha1(s)
106
106
107 try:
107 try:
108 buffer = buffer
108 buffer = buffer
109 except NameError:
109 except NameError:
110 if sys.version_info[0] < 3:
110 if sys.version_info[0] < 3:
111 def buffer(sliceable, offset=0):
111 def buffer(sliceable, offset=0):
112 return sliceable[offset:]
112 return sliceable[offset:]
113 else:
113 else:
114 def buffer(sliceable, offset=0):
114 def buffer(sliceable, offset=0):
115 return memoryview(sliceable)[offset:]
115 return memoryview(sliceable)[offset:]
116
116
117 import subprocess
117 import subprocess
118 closefds = os.name == 'posix'
118 closefds = os.name == 'posix'
119
119
120 def popen2(cmd, env=None, newlines=False):
120 def popen2(cmd, env=None, newlines=False):
121 # Setting bufsize to -1 lets the system decide the buffer size.
121 # Setting bufsize to -1 lets the system decide the buffer size.
122 # The default for bufsize is 0, meaning unbuffered. This leads to
122 # The default for bufsize is 0, meaning unbuffered. This leads to
123 # poor performance on Mac OS X: http://bugs.python.org/issue4194
123 # poor performance on Mac OS X: http://bugs.python.org/issue4194
124 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
124 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
125 close_fds=closefds,
125 close_fds=closefds,
126 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
126 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
127 universal_newlines=newlines,
127 universal_newlines=newlines,
128 env=env)
128 env=env)
129 return p.stdin, p.stdout
129 return p.stdin, p.stdout
130
130
131 def popen3(cmd, env=None, newlines=False):
131 def popen3(cmd, env=None, newlines=False):
132 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
132 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
133 close_fds=closefds,
133 close_fds=closefds,
134 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
134 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
135 stderr=subprocess.PIPE,
135 stderr=subprocess.PIPE,
136 universal_newlines=newlines,
136 universal_newlines=newlines,
137 env=env)
137 env=env)
138 return p.stdin, p.stdout, p.stderr
138 return p.stdin, p.stdout, p.stderr
139
139
140 def version():
140 def version():
141 """Return version information if available."""
141 """Return version information if available."""
142 try:
142 try:
143 import __version__
143 import __version__
144 return __version__.version
144 return __version__.version
145 except ImportError:
145 except ImportError:
146 return 'unknown'
146 return 'unknown'
147
147
148 # used by parsedate
148 # used by parsedate
149 defaultdateformats = (
149 defaultdateformats = (
150 '%Y-%m-%d %H:%M:%S',
150 '%Y-%m-%d %H:%M:%S',
151 '%Y-%m-%d %I:%M:%S%p',
151 '%Y-%m-%d %I:%M:%S%p',
152 '%Y-%m-%d %H:%M',
152 '%Y-%m-%d %H:%M',
153 '%Y-%m-%d %I:%M%p',
153 '%Y-%m-%d %I:%M%p',
154 '%Y-%m-%d',
154 '%Y-%m-%d',
155 '%m-%d',
155 '%m-%d',
156 '%m/%d',
156 '%m/%d',
157 '%m/%d/%y',
157 '%m/%d/%y',
158 '%m/%d/%Y',
158 '%m/%d/%Y',
159 '%a %b %d %H:%M:%S %Y',
159 '%a %b %d %H:%M:%S %Y',
160 '%a %b %d %I:%M:%S%p %Y',
160 '%a %b %d %I:%M:%S%p %Y',
161 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
161 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
162 '%b %d %H:%M:%S %Y',
162 '%b %d %H:%M:%S %Y',
163 '%b %d %I:%M:%S%p %Y',
163 '%b %d %I:%M:%S%p %Y',
164 '%b %d %H:%M:%S',
164 '%b %d %H:%M:%S',
165 '%b %d %I:%M:%S%p',
165 '%b %d %I:%M:%S%p',
166 '%b %d %H:%M',
166 '%b %d %H:%M',
167 '%b %d %I:%M%p',
167 '%b %d %I:%M%p',
168 '%b %d %Y',
168 '%b %d %Y',
169 '%b %d',
169 '%b %d',
170 '%H:%M:%S',
170 '%H:%M:%S',
171 '%I:%M:%S%p',
171 '%I:%M:%S%p',
172 '%H:%M',
172 '%H:%M',
173 '%I:%M%p',
173 '%I:%M%p',
174 )
174 )
175
175
176 extendeddateformats = defaultdateformats + (
176 extendeddateformats = defaultdateformats + (
177 "%Y",
177 "%Y",
178 "%Y-%m",
178 "%Y-%m",
179 "%b",
179 "%b",
180 "%b %Y",
180 "%b %Y",
181 )
181 )
182
182
183 def cachefunc(func):
183 def cachefunc(func):
184 '''cache the result of function calls'''
184 '''cache the result of function calls'''
185 # XXX doesn't handle keywords args
185 # XXX doesn't handle keywords args
186 cache = {}
186 cache = {}
187 if func.func_code.co_argcount == 1:
187 if func.func_code.co_argcount == 1:
188 # we gain a small amount of time because
188 # we gain a small amount of time because
189 # we don't need to pack/unpack the list
189 # we don't need to pack/unpack the list
190 def f(arg):
190 def f(arg):
191 if arg not in cache:
191 if arg not in cache:
192 cache[arg] = func(arg)
192 cache[arg] = func(arg)
193 return cache[arg]
193 return cache[arg]
194 else:
194 else:
195 def f(*args):
195 def f(*args):
196 if args not in cache:
196 if args not in cache:
197 cache[args] = func(*args)
197 cache[args] = func(*args)
198 return cache[args]
198 return cache[args]
199
199
200 return f
200 return f
201
201
202 try:
202 try:
203 collections.deque.remove
203 collections.deque.remove
204 deque = collections.deque
204 deque = collections.deque
205 except AttributeError:
205 except AttributeError:
206 # python 2.4 lacks deque.remove
206 # python 2.4 lacks deque.remove
207 class deque(collections.deque):
207 class deque(collections.deque):
208 def remove(self, val):
208 def remove(self, val):
209 for i, v in enumerate(self):
209 for i, v in enumerate(self):
210 if v == val:
210 if v == val:
211 del self[i]
211 del self[i]
212 break
212 break
213
213
214 class lrucachedict(object):
214 class lrucachedict(object):
215 '''cache most recent gets from or sets to this dictionary'''
215 '''cache most recent gets from or sets to this dictionary'''
216 def __init__(self, maxsize):
216 def __init__(self, maxsize):
217 self._cache = {}
217 self._cache = {}
218 self._maxsize = maxsize
218 self._maxsize = maxsize
219 self._order = deque()
219 self._order = deque()
220
220
221 def __getitem__(self, key):
221 def __getitem__(self, key):
222 value = self._cache[key]
222 value = self._cache[key]
223 self._order.remove(key)
223 self._order.remove(key)
224 self._order.append(key)
224 self._order.append(key)
225 return value
225 return value
226
226
227 def __setitem__(self, key, value):
227 def __setitem__(self, key, value):
228 if key not in self._cache:
228 if key not in self._cache:
229 if len(self._cache) >= self._maxsize:
229 if len(self._cache) >= self._maxsize:
230 del self._cache[self._order.popleft()]
230 del self._cache[self._order.popleft()]
231 else:
231 else:
232 self._order.remove(key)
232 self._order.remove(key)
233 self._cache[key] = value
233 self._cache[key] = value
234 self._order.append(key)
234 self._order.append(key)
235
235
236 def __contains__(self, key):
236 def __contains__(self, key):
237 return key in self._cache
237 return key in self._cache
238
238
239 def lrucachefunc(func):
239 def lrucachefunc(func):
240 '''cache most recent results of function calls'''
240 '''cache most recent results of function calls'''
241 cache = {}
241 cache = {}
242 order = deque()
242 order = deque()
243 if func.func_code.co_argcount == 1:
243 if func.func_code.co_argcount == 1:
244 def f(arg):
244 def f(arg):
245 if arg not in cache:
245 if arg not in cache:
246 if len(cache) > 20:
246 if len(cache) > 20:
247 del cache[order.popleft()]
247 del cache[order.popleft()]
248 cache[arg] = func(arg)
248 cache[arg] = func(arg)
249 else:
249 else:
250 order.remove(arg)
250 order.remove(arg)
251 order.append(arg)
251 order.append(arg)
252 return cache[arg]
252 return cache[arg]
253 else:
253 else:
254 def f(*args):
254 def f(*args):
255 if args not in cache:
255 if args not in cache:
256 if len(cache) > 20:
256 if len(cache) > 20:
257 del cache[order.popleft()]
257 del cache[order.popleft()]
258 cache[args] = func(*args)
258 cache[args] = func(*args)
259 else:
259 else:
260 order.remove(args)
260 order.remove(args)
261 order.append(args)
261 order.append(args)
262 return cache[args]
262 return cache[args]
263
263
264 return f
264 return f
265
265
266 class propertycache(object):
266 class propertycache(object):
267 def __init__(self, func):
267 def __init__(self, func):
268 self.func = func
268 self.func = func
269 self.name = func.__name__
269 self.name = func.__name__
270 def __get__(self, obj, type=None):
270 def __get__(self, obj, type=None):
271 result = self.func(obj)
271 result = self.func(obj)
272 self.cachevalue(obj, result)
272 self.cachevalue(obj, result)
273 return result
273 return result
274
274
275 def cachevalue(self, obj, value):
275 def cachevalue(self, obj, value):
276 setattr(obj, self.name, value)
276 setattr(obj, self.name, value)
277
277
278 def pipefilter(s, cmd):
278 def pipefilter(s, cmd):
279 '''filter string S through command CMD, returning its output'''
279 '''filter string S through command CMD, returning its output'''
280 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
280 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
281 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
281 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
282 pout, perr = p.communicate(s)
282 pout, perr = p.communicate(s)
283 return pout
283 return pout
284
284
285 def tempfilter(s, cmd):
285 def tempfilter(s, cmd):
286 '''filter string S through a pair of temporary files with CMD.
286 '''filter string S through a pair of temporary files with CMD.
287 CMD is used as a template to create the real command to be run,
287 CMD is used as a template to create the real command to be run,
288 with the strings INFILE and OUTFILE replaced by the real names of
288 with the strings INFILE and OUTFILE replaced by the real names of
289 the temporary files generated.'''
289 the temporary files generated.'''
290 inname, outname = None, None
290 inname, outname = None, None
291 try:
291 try:
292 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
292 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
293 fp = os.fdopen(infd, 'wb')
293 fp = os.fdopen(infd, 'wb')
294 fp.write(s)
294 fp.write(s)
295 fp.close()
295 fp.close()
296 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
296 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
297 os.close(outfd)
297 os.close(outfd)
298 cmd = cmd.replace('INFILE', inname)
298 cmd = cmd.replace('INFILE', inname)
299 cmd = cmd.replace('OUTFILE', outname)
299 cmd = cmd.replace('OUTFILE', outname)
300 code = os.system(cmd)
300 code = os.system(cmd)
301 if sys.platform == 'OpenVMS' and code & 1:
301 if sys.platform == 'OpenVMS' and code & 1:
302 code = 0
302 code = 0
303 if code:
303 if code:
304 raise Abort(_("command '%s' failed: %s") %
304 raise Abort(_("command '%s' failed: %s") %
305 (cmd, explainexit(code)))
305 (cmd, explainexit(code)))
306 fp = open(outname, 'rb')
306 fp = open(outname, 'rb')
307 r = fp.read()
307 r = fp.read()
308 fp.close()
308 fp.close()
309 return r
309 return r
310 finally:
310 finally:
311 try:
311 try:
312 if inname:
312 if inname:
313 os.unlink(inname)
313 os.unlink(inname)
314 except OSError:
314 except OSError:
315 pass
315 pass
316 try:
316 try:
317 if outname:
317 if outname:
318 os.unlink(outname)
318 os.unlink(outname)
319 except OSError:
319 except OSError:
320 pass
320 pass
321
321
322 filtertable = {
322 filtertable = {
323 'tempfile:': tempfilter,
323 'tempfile:': tempfilter,
324 'pipe:': pipefilter,
324 'pipe:': pipefilter,
325 }
325 }
326
326
327 def filter(s, cmd):
327 def filter(s, cmd):
328 "filter a string through a command that transforms its input to its output"
328 "filter a string through a command that transforms its input to its output"
329 for name, fn in filtertable.iteritems():
329 for name, fn in filtertable.iteritems():
330 if cmd.startswith(name):
330 if cmd.startswith(name):
331 return fn(s, cmd[len(name):].lstrip())
331 return fn(s, cmd[len(name):].lstrip())
332 return pipefilter(s, cmd)
332 return pipefilter(s, cmd)
333
333
334 def binary(s):
334 def binary(s):
335 """return true if a string is binary data"""
335 """return true if a string is binary data"""
336 return bool(s and '\0' in s)
336 return bool(s and '\0' in s)
337
337
338 def increasingchunks(source, min=1024, max=65536):
338 def increasingchunks(source, min=1024, max=65536):
339 '''return no less than min bytes per chunk while data remains,
339 '''return no less than min bytes per chunk while data remains,
340 doubling min after each chunk until it reaches max'''
340 doubling min after each chunk until it reaches max'''
341 def log2(x):
341 def log2(x):
342 if not x:
342 if not x:
343 return 0
343 return 0
344 i = 0
344 i = 0
345 while x:
345 while x:
346 x >>= 1
346 x >>= 1
347 i += 1
347 i += 1
348 return i - 1
348 return i - 1
349
349
350 buf = []
350 buf = []
351 blen = 0
351 blen = 0
352 for chunk in source:
352 for chunk in source:
353 buf.append(chunk)
353 buf.append(chunk)
354 blen += len(chunk)
354 blen += len(chunk)
355 if blen >= min:
355 if blen >= min:
356 if min < max:
356 if min < max:
357 min = min << 1
357 min = min << 1
358 nmin = 1 << log2(blen)
358 nmin = 1 << log2(blen)
359 if nmin > min:
359 if nmin > min:
360 min = nmin
360 min = nmin
361 if min > max:
361 if min > max:
362 min = max
362 min = max
363 yield ''.join(buf)
363 yield ''.join(buf)
364 blen = 0
364 blen = 0
365 buf = []
365 buf = []
366 if buf:
366 if buf:
367 yield ''.join(buf)
367 yield ''.join(buf)
368
368
369 Abort = error.Abort
369 Abort = error.Abort
370
370
371 def always(fn):
371 def always(fn):
372 return True
372 return True
373
373
374 def never(fn):
374 def never(fn):
375 return False
375 return False
376
376
377 def pathto(root, n1, n2):
377 def pathto(root, n1, n2):
378 '''return the relative path from one place to another.
378 '''return the relative path from one place to another.
379 root should use os.sep to separate directories
379 root should use os.sep to separate directories
380 n1 should use os.sep to separate directories
380 n1 should use os.sep to separate directories
381 n2 should use "/" to separate directories
381 n2 should use "/" to separate directories
382 returns an os.sep-separated path.
382 returns an os.sep-separated path.
383
383
384 If n1 is a relative path, it's assumed it's
384 If n1 is a relative path, it's assumed it's
385 relative to root.
385 relative to root.
386 n2 should always be relative to root.
386 n2 should always be relative to root.
387 '''
387 '''
388 if not n1:
388 if not n1:
389 return localpath(n2)
389 return localpath(n2)
390 if os.path.isabs(n1):
390 if os.path.isabs(n1):
391 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
391 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
392 return os.path.join(root, localpath(n2))
392 return os.path.join(root, localpath(n2))
393 n2 = '/'.join((pconvert(root), n2))
393 n2 = '/'.join((pconvert(root), n2))
394 a, b = splitpath(n1), n2.split('/')
394 a, b = splitpath(n1), n2.split('/')
395 a.reverse()
395 a.reverse()
396 b.reverse()
396 b.reverse()
397 while a and b and a[-1] == b[-1]:
397 while a and b and a[-1] == b[-1]:
398 a.pop()
398 a.pop()
399 b.pop()
399 b.pop()
400 b.reverse()
400 b.reverse()
401 return os.sep.join((['..'] * len(a)) + b) or '.'
401 return os.sep.join((['..'] * len(a)) + b) or '.'
402
402
403 _hgexecutable = None
403 _hgexecutable = None
404
404
405 def mainfrozen():
405 def mainfrozen():
406 """return True if we are a frozen executable.
406 """return True if we are a frozen executable.
407
407
408 The code supports py2exe (most common, Windows only) and tools/freeze
408 The code supports py2exe (most common, Windows only) and tools/freeze
409 (portable, not much used).
409 (portable, not much used).
410 """
410 """
411 return (safehasattr(sys, "frozen") or # new py2exe
411 return (safehasattr(sys, "frozen") or # new py2exe
412 safehasattr(sys, "importers") or # old py2exe
412 safehasattr(sys, "importers") or # old py2exe
413 imp.is_frozen("__main__")) # tools/freeze
413 imp.is_frozen("__main__")) # tools/freeze
414
414
415 def hgexecutable():
415 def hgexecutable():
416 """return location of the 'hg' executable.
416 """return location of the 'hg' executable.
417
417
418 Defaults to $HG or 'hg' in the search path.
418 Defaults to $HG or 'hg' in the search path.
419 """
419 """
420 if _hgexecutable is None:
420 if _hgexecutable is None:
421 hg = os.environ.get('HG')
421 hg = os.environ.get('HG')
422 mainmod = sys.modules['__main__']
422 mainmod = sys.modules['__main__']
423 if hg:
423 if hg:
424 _sethgexecutable(hg)
424 _sethgexecutable(hg)
425 elif mainfrozen():
425 elif mainfrozen():
426 _sethgexecutable(sys.executable)
426 _sethgexecutable(sys.executable)
427 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
427 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
428 _sethgexecutable(mainmod.__file__)
428 _sethgexecutable(mainmod.__file__)
429 else:
429 else:
430 exe = findexe('hg') or os.path.basename(sys.argv[0])
430 exe = findexe('hg') or os.path.basename(sys.argv[0])
431 _sethgexecutable(exe)
431 _sethgexecutable(exe)
432 return _hgexecutable
432 return _hgexecutable
433
433
434 def _sethgexecutable(path):
434 def _sethgexecutable(path):
435 """set location of the 'hg' executable"""
435 """set location of the 'hg' executable"""
436 global _hgexecutable
436 global _hgexecutable
437 _hgexecutable = path
437 _hgexecutable = path
438
438
439 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
439 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
440 '''enhanced shell command execution.
440 '''enhanced shell command execution.
441 run with environment maybe modified, maybe in different dir.
441 run with environment maybe modified, maybe in different dir.
442
442
443 if command fails and onerr is None, return status. if ui object,
443 if command fails and onerr is None, return status. if ui object,
444 print error message and return status, else raise onerr object as
444 print error message and return status, else raise onerr object as
445 exception.
445 exception.
446
446
447 if out is specified, it is assumed to be a file-like object that has a
447 if out is specified, it is assumed to be a file-like object that has a
448 write() method. stdout and stderr will be redirected to out.'''
448 write() method. stdout and stderr will be redirected to out.'''
449 try:
449 try:
450 sys.stdout.flush()
450 sys.stdout.flush()
451 except Exception:
451 except Exception:
452 pass
452 pass
453 def py2shell(val):
453 def py2shell(val):
454 'convert python object into string that is useful to shell'
454 'convert python object into string that is useful to shell'
455 if val is None or val is False:
455 if val is None or val is False:
456 return '0'
456 return '0'
457 if val is True:
457 if val is True:
458 return '1'
458 return '1'
459 return str(val)
459 return str(val)
460 origcmd = cmd
460 origcmd = cmd
461 cmd = quotecommand(cmd)
461 cmd = quotecommand(cmd)
462 if sys.platform == 'plan9':
462 if sys.platform == 'plan9':
463 # subprocess kludge to work around issues in half-baked Python
463 # subprocess kludge to work around issues in half-baked Python
464 # ports, notably bichued/python:
464 # ports, notably bichued/python:
465 if not cwd is None:
465 if not cwd is None:
466 os.chdir(cwd)
466 os.chdir(cwd)
467 rc = os.system(cmd)
467 rc = os.system(cmd)
468 else:
468 else:
469 env = dict(os.environ)
469 env = dict(os.environ)
470 env.update((k, py2shell(v)) for k, v in environ.iteritems())
470 env.update((k, py2shell(v)) for k, v in environ.iteritems())
471 env['HG'] = hgexecutable()
471 env['HG'] = hgexecutable()
472 if out is None or out == sys.__stdout__:
472 if out is None or out == sys.__stdout__:
473 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
473 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
474 env=env, cwd=cwd)
474 env=env, cwd=cwd)
475 else:
475 else:
476 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
476 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
477 env=env, cwd=cwd, stdout=subprocess.PIPE,
477 env=env, cwd=cwd, stdout=subprocess.PIPE,
478 stderr=subprocess.STDOUT)
478 stderr=subprocess.STDOUT)
479 for line in proc.stdout:
479 for line in proc.stdout:
480 out.write(line)
480 out.write(line)
481 proc.wait()
481 proc.wait()
482 rc = proc.returncode
482 rc = proc.returncode
483 if sys.platform == 'OpenVMS' and rc & 1:
483 if sys.platform == 'OpenVMS' and rc & 1:
484 rc = 0
484 rc = 0
485 if rc and onerr:
485 if rc and onerr:
486 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
486 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
487 explainexit(rc)[0])
487 explainexit(rc)[0])
488 if errprefix:
488 if errprefix:
489 errmsg = '%s: %s' % (errprefix, errmsg)
489 errmsg = '%s: %s' % (errprefix, errmsg)
490 try:
490 try:
491 onerr.warn(errmsg + '\n')
491 onerr.warn(errmsg + '\n')
492 except AttributeError:
492 except AttributeError:
493 raise onerr(errmsg)
493 raise onerr(errmsg)
494 return rc
494 return rc
495
495
496 def checksignature(func):
496 def checksignature(func):
497 '''wrap a function with code to check for calling errors'''
497 '''wrap a function with code to check for calling errors'''
498 def check(*args, **kwargs):
498 def check(*args, **kwargs):
499 try:
499 try:
500 return func(*args, **kwargs)
500 return func(*args, **kwargs)
501 except TypeError:
501 except TypeError:
502 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
502 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
503 raise error.SignatureError
503 raise error.SignatureError
504 raise
504 raise
505
505
506 return check
506 return check
507
507
508 def copyfile(src, dest):
508 def copyfile(src, dest):
509 "copy a file, preserving mode and atime/mtime"
509 "copy a file, preserving mode and atime/mtime"
510 if os.path.lexists(dest):
510 if os.path.lexists(dest):
511 unlink(dest)
511 unlink(dest)
512 if os.path.islink(src):
512 if os.path.islink(src):
513 os.symlink(os.readlink(src), dest)
513 os.symlink(os.readlink(src), dest)
514 else:
514 else:
515 try:
515 try:
516 shutil.copyfile(src, dest)
516 shutil.copyfile(src, dest)
517 shutil.copymode(src, dest)
517 shutil.copymode(src, dest)
518 except shutil.Error, inst:
518 except shutil.Error, inst:
519 raise Abort(str(inst))
519 raise Abort(str(inst))
520
520
521 def copyfiles(src, dst, hardlink=None):
521 def copyfiles(src, dst, hardlink=None):
522 """Copy a directory tree using hardlinks if possible"""
522 """Copy a directory tree using hardlinks if possible"""
523
523
524 if hardlink is None:
524 if hardlink is None:
525 hardlink = (os.stat(src).st_dev ==
525 hardlink = (os.stat(src).st_dev ==
526 os.stat(os.path.dirname(dst)).st_dev)
526 os.stat(os.path.dirname(dst)).st_dev)
527
527
528 num = 0
528 num = 0
529 if os.path.isdir(src):
529 if os.path.isdir(src):
530 os.mkdir(dst)
530 os.mkdir(dst)
531 for name, kind in osutil.listdir(src):
531 for name, kind in osutil.listdir(src):
532 srcname = os.path.join(src, name)
532 srcname = os.path.join(src, name)
533 dstname = os.path.join(dst, name)
533 dstname = os.path.join(dst, name)
534 hardlink, n = copyfiles(srcname, dstname, hardlink)
534 hardlink, n = copyfiles(srcname, dstname, hardlink)
535 num += n
535 num += n
536 else:
536 else:
537 if hardlink:
537 if hardlink:
538 try:
538 try:
539 oslink(src, dst)
539 oslink(src, dst)
540 except (IOError, OSError):
540 except (IOError, OSError):
541 hardlink = False
541 hardlink = False
542 shutil.copy(src, dst)
542 shutil.copy(src, dst)
543 else:
543 else:
544 shutil.copy(src, dst)
544 shutil.copy(src, dst)
545 num += 1
545 num += 1
546
546
547 return hardlink, num
547 return hardlink, num
548
548
549 _winreservednames = '''con prn aux nul
549 _winreservednames = '''con prn aux nul
550 com1 com2 com3 com4 com5 com6 com7 com8 com9
550 com1 com2 com3 com4 com5 com6 com7 com8 com9
551 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
551 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
552 _winreservedchars = ':*?"<>|'
552 _winreservedchars = ':*?"<>|'
553 def checkwinfilename(path):
553 def checkwinfilename(path):
554 '''Check that the base-relative path is a valid filename on Windows.
554 '''Check that the base-relative path is a valid filename on Windows.
555 Returns None if the path is ok, or a UI string describing the problem.
555 Returns None if the path is ok, or a UI string describing the problem.
556
556
557 >>> checkwinfilename("just/a/normal/path")
557 >>> checkwinfilename("just/a/normal/path")
558 >>> checkwinfilename("foo/bar/con.xml")
558 >>> checkwinfilename("foo/bar/con.xml")
559 "filename contains 'con', which is reserved on Windows"
559 "filename contains 'con', which is reserved on Windows"
560 >>> checkwinfilename("foo/con.xml/bar")
560 >>> checkwinfilename("foo/con.xml/bar")
561 "filename contains 'con', which is reserved on Windows"
561 "filename contains 'con', which is reserved on Windows"
562 >>> checkwinfilename("foo/bar/xml.con")
562 >>> checkwinfilename("foo/bar/xml.con")
563 >>> checkwinfilename("foo/bar/AUX/bla.txt")
563 >>> checkwinfilename("foo/bar/AUX/bla.txt")
564 "filename contains 'AUX', which is reserved on Windows"
564 "filename contains 'AUX', which is reserved on Windows"
565 >>> checkwinfilename("foo/bar/bla:.txt")
565 >>> checkwinfilename("foo/bar/bla:.txt")
566 "filename contains ':', which is reserved on Windows"
566 "filename contains ':', which is reserved on Windows"
567 >>> checkwinfilename("foo/bar/b\07la.txt")
567 >>> checkwinfilename("foo/bar/b\07la.txt")
568 "filename contains '\\\\x07', which is invalid on Windows"
568 "filename contains '\\\\x07', which is invalid on Windows"
569 >>> checkwinfilename("foo/bar/bla ")
569 >>> checkwinfilename("foo/bar/bla ")
570 "filename ends with ' ', which is not allowed on Windows"
570 "filename ends with ' ', which is not allowed on Windows"
571 >>> checkwinfilename("../bar")
571 >>> checkwinfilename("../bar")
572 '''
572 '''
573 for n in path.replace('\\', '/').split('/'):
573 for n in path.replace('\\', '/').split('/'):
574 if not n:
574 if not n:
575 continue
575 continue
576 for c in n:
576 for c in n:
577 if c in _winreservedchars:
577 if c in _winreservedchars:
578 return _("filename contains '%s', which is reserved "
578 return _("filename contains '%s', which is reserved "
579 "on Windows") % c
579 "on Windows") % c
580 if ord(c) <= 31:
580 if ord(c) <= 31:
581 return _("filename contains %r, which is invalid "
581 return _("filename contains %r, which is invalid "
582 "on Windows") % c
582 "on Windows") % c
583 base = n.split('.')[0]
583 base = n.split('.')[0]
584 if base and base.lower() in _winreservednames:
584 if base and base.lower() in _winreservednames:
585 return _("filename contains '%s', which is reserved "
585 return _("filename contains '%s', which is reserved "
586 "on Windows") % base
586 "on Windows") % base
587 t = n[-1]
587 t = n[-1]
588 if t in '. ' and n not in '..':
588 if t in '. ' and n not in '..':
589 return _("filename ends with '%s', which is not allowed "
589 return _("filename ends with '%s', which is not allowed "
590 "on Windows") % t
590 "on Windows") % t
591
591
592 if os.name == 'nt':
592 if os.name == 'nt':
593 checkosfilename = checkwinfilename
593 checkosfilename = checkwinfilename
594 else:
594 else:
595 checkosfilename = platform.checkosfilename
595 checkosfilename = platform.checkosfilename
596
596
597 def makelock(info, pathname):
597 def makelock(info, pathname):
598 try:
598 try:
599 return os.symlink(info, pathname)
599 return os.symlink(info, pathname)
600 except OSError, why:
600 except OSError, why:
601 if why.errno == errno.EEXIST:
601 if why.errno == errno.EEXIST:
602 raise
602 raise
603 except AttributeError: # no symlink in os
603 except AttributeError: # no symlink in os
604 pass
604 pass
605
605
606 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
606 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
607 os.write(ld, info)
607 os.write(ld, info)
608 os.close(ld)
608 os.close(ld)
609
609
610 def readlock(pathname):
610 def readlock(pathname):
611 try:
611 try:
612 return os.readlink(pathname)
612 return os.readlink(pathname)
613 except OSError, why:
613 except OSError, why:
614 if why.errno not in (errno.EINVAL, errno.ENOSYS):
614 if why.errno not in (errno.EINVAL, errno.ENOSYS):
615 raise
615 raise
616 except AttributeError: # no symlink in os
616 except AttributeError: # no symlink in os
617 pass
617 pass
618 fp = posixfile(pathname)
618 fp = posixfile(pathname)
619 r = fp.read()
619 r = fp.read()
620 fp.close()
620 fp.close()
621 return r
621 return r
622
622
623 def fstat(fp):
623 def fstat(fp):
624 '''stat file object that may not have fileno method.'''
624 '''stat file object that may not have fileno method.'''
625 try:
625 try:
626 return os.fstat(fp.fileno())
626 return os.fstat(fp.fileno())
627 except AttributeError:
627 except AttributeError:
628 return os.stat(fp.name)
628 return os.stat(fp.name)
629
629
630 # File system features
630 # File system features
631
631
632 def checkcase(path):
632 def checkcase(path):
633 """
633 """
634 Check whether the given path is on a case-sensitive filesystem
634 Check whether the given path is on a case-sensitive filesystem
635
635
636 Requires a path (like /foo/.hg) ending with a foldable final
636 Requires a path (like /foo/.hg) ending with a foldable final
637 directory component.
637 directory component.
638 """
638 """
639 s1 = os.stat(path)
639 s1 = os.stat(path)
640 d, b = os.path.split(path)
640 d, b = os.path.split(path)
641 b2 = b.upper()
641 b2 = b.upper()
642 if b == b2:
642 if b == b2:
643 b2 = b.lower()
643 b2 = b.lower()
644 if b == b2:
644 if b == b2:
645 return True # no evidence against case sensitivity
645 return True # no evidence against case sensitivity
646 p2 = os.path.join(d, b2)
646 p2 = os.path.join(d, b2)
647 try:
647 try:
648 s2 = os.stat(p2)
648 s2 = os.stat(p2)
649 if s2 == s1:
649 if s2 == s1:
650 return False
650 return False
651 return True
651 return True
652 except OSError:
652 except OSError:
653 return True
653 return True
654
654
655 try:
655 try:
656 import re2
656 import re2
657 _re2 = None
657 _re2 = None
658 except ImportError:
658 except ImportError:
659 _re2 = False
659 _re2 = False
660
660
661 def compilere(pat):
661 def compilere(pat):
662 '''Compile a regular expression, using re2 if possible
662 '''Compile a regular expression, using re2 if possible
663
663
664 For best performance, use only re2-compatible regexp features.'''
664 For best performance, use only re2-compatible regexp features.'''
665 global _re2
665 global _re2
666 if _re2 is None:
666 if _re2 is None:
667 try:
667 try:
668 re2.compile
668 re2.compile
669 _re2 = True
669 _re2 = True
670 except ImportError:
670 except ImportError:
671 _re2 = False
671 _re2 = False
672 if _re2:
672 if _re2:
673 try:
673 try:
674 return re2.compile(pat)
674 return re2.compile(pat)
675 except re2.error:
675 except re2.error:
676 pass
676 pass
677 return re.compile(pat)
677 return re.compile(pat)
678
678
679 _fspathcache = {}
679 _fspathcache = {}
680 def fspath(name, root):
680 def fspath(name, root):
681 '''Get name in the case stored in the filesystem
681 '''Get name in the case stored in the filesystem
682
682
683 The name should be relative to root, and be normcase-ed for efficiency.
683 The name should be relative to root, and be normcase-ed for efficiency.
684
684
685 Note that this function is unnecessary, and should not be
685 Note that this function is unnecessary, and should not be
686 called, for case-sensitive filesystems (simply because it's expensive).
686 called, for case-sensitive filesystems (simply because it's expensive).
687
687
688 The root should be normcase-ed, too.
688 The root should be normcase-ed, too.
689 '''
689 '''
690 def find(p, contents):
690 def find(p, contents):
691 for n in contents:
691 for n in contents:
692 if normcase(n) == p:
692 if normcase(n) == p:
693 return n
693 return n
694 return None
694 return None
695
695
696 seps = os.sep
696 seps = os.sep
697 if os.altsep:
697 if os.altsep:
698 seps = seps + os.altsep
698 seps = seps + os.altsep
699 # Protect backslashes. This gets silly very quickly.
699 # Protect backslashes. This gets silly very quickly.
700 seps.replace('\\','\\\\')
700 seps.replace('\\','\\\\')
701 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
701 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
702 dir = os.path.normpath(root)
702 dir = os.path.normpath(root)
703 result = []
703 result = []
704 for part, sep in pattern.findall(name):
704 for part, sep in pattern.findall(name):
705 if sep:
705 if sep:
706 result.append(sep)
706 result.append(sep)
707 continue
707 continue
708
708
709 if dir not in _fspathcache:
709 if dir not in _fspathcache:
710 _fspathcache[dir] = os.listdir(dir)
710 _fspathcache[dir] = os.listdir(dir)
711 contents = _fspathcache[dir]
711 contents = _fspathcache[dir]
712
712
713 found = find(part, contents)
713 found = find(part, contents)
714 if not found:
714 if not found:
715 # retry "once per directory" per "dirstate.walk" which
715 # retry "once per directory" per "dirstate.walk" which
716 # may take place for each patches of "hg qpush", for example
716 # may take place for each patches of "hg qpush", for example
717 contents = os.listdir(dir)
717 contents = os.listdir(dir)
718 _fspathcache[dir] = contents
718 _fspathcache[dir] = contents
719 found = find(part, contents)
719 found = find(part, contents)
720
720
721 result.append(found or part)
721 result.append(found or part)
722 dir = os.path.join(dir, part)
722 dir = os.path.join(dir, part)
723
723
724 return ''.join(result)
724 return ''.join(result)
725
725
726 def checknlink(testfile):
726 def checknlink(testfile):
727 '''check whether hardlink count reporting works properly'''
727 '''check whether hardlink count reporting works properly'''
728
728
729 # testfile may be open, so we need a separate file for checking to
729 # testfile may be open, so we need a separate file for checking to
730 # work around issue2543 (or testfile may get lost on Samba shares)
730 # work around issue2543 (or testfile may get lost on Samba shares)
731 f1 = testfile + ".hgtmp1"
731 f1 = testfile + ".hgtmp1"
732 if os.path.lexists(f1):
732 if os.path.lexists(f1):
733 return False
733 return False
734 try:
734 try:
735 posixfile(f1, 'w').close()
735 posixfile(f1, 'w').close()
736 except IOError:
736 except IOError:
737 return False
737 return False
738
738
739 f2 = testfile + ".hgtmp2"
739 f2 = testfile + ".hgtmp2"
740 fd = None
740 fd = None
741 try:
741 try:
742 try:
742 try:
743 oslink(f1, f2)
743 oslink(f1, f2)
744 except OSError:
744 except OSError:
745 return False
745 return False
746
746
747 # nlinks() may behave differently for files on Windows shares if
747 # nlinks() may behave differently for files on Windows shares if
748 # the file is open.
748 # the file is open.
749 fd = posixfile(f2)
749 fd = posixfile(f2)
750 return nlinks(f2) > 1
750 return nlinks(f2) > 1
751 finally:
751 finally:
752 if fd is not None:
752 if fd is not None:
753 fd.close()
753 fd.close()
754 for f in (f1, f2):
754 for f in (f1, f2):
755 try:
755 try:
756 os.unlink(f)
756 os.unlink(f)
757 except OSError:
757 except OSError:
758 pass
758 pass
759
759
760 return False
760 return False
761
761
762 def endswithsep(path):
762 def endswithsep(path):
763 '''Check path ends with os.sep or os.altsep.'''
763 '''Check path ends with os.sep or os.altsep.'''
764 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
764 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
765
765
766 def splitpath(path):
766 def splitpath(path):
767 '''Split path by os.sep.
767 '''Split path by os.sep.
768 Note that this function does not use os.altsep because this is
768 Note that this function does not use os.altsep because this is
769 an alternative of simple "xxx.split(os.sep)".
769 an alternative of simple "xxx.split(os.sep)".
770 It is recommended to use os.path.normpath() before using this
770 It is recommended to use os.path.normpath() before using this
771 function if need.'''
771 function if need.'''
772 return path.split(os.sep)
772 return path.split(os.sep)
773
773
774 def gui():
774 def gui():
775 '''Are we running in a GUI?'''
775 '''Are we running in a GUI?'''
776 if sys.platform == 'darwin':
776 if sys.platform == 'darwin':
777 if 'SSH_CONNECTION' in os.environ:
777 if 'SSH_CONNECTION' in os.environ:
778 # handle SSH access to a box where the user is logged in
778 # handle SSH access to a box where the user is logged in
779 return False
779 return False
780 elif getattr(osutil, 'isgui', None):
780 elif getattr(osutil, 'isgui', None):
781 # check if a CoreGraphics session is available
781 # check if a CoreGraphics session is available
782 return osutil.isgui()
782 return osutil.isgui()
783 else:
783 else:
784 # pure build; use a safe default
784 # pure build; use a safe default
785 return True
785 return True
786 else:
786 else:
787 return os.name == "nt" or os.environ.get("DISPLAY")
787 return os.name == "nt" or os.environ.get("DISPLAY")
788
788
789 def mktempcopy(name, emptyok=False, createmode=None):
789 def mktempcopy(name, emptyok=False, createmode=None):
790 """Create a temporary file with the same contents from name
790 """Create a temporary file with the same contents from name
791
791
792 The permission bits are copied from the original file.
792 The permission bits are copied from the original file.
793
793
794 If the temporary file is going to be truncated immediately, you
794 If the temporary file is going to be truncated immediately, you
795 can use emptyok=True as an optimization.
795 can use emptyok=True as an optimization.
796
796
797 Returns the name of the temporary file.
797 Returns the name of the temporary file.
798 """
798 """
799 d, fn = os.path.split(name)
799 d, fn = os.path.split(name)
800 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
800 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
801 os.close(fd)
801 os.close(fd)
802 # Temporary files are created with mode 0600, which is usually not
802 # Temporary files are created with mode 0600, which is usually not
803 # what we want. If the original file already exists, just copy
803 # what we want. If the original file already exists, just copy
804 # its mode. Otherwise, manually obey umask.
804 # its mode. Otherwise, manually obey umask.
805 copymode(name, temp, createmode)
805 copymode(name, temp, createmode)
806 if emptyok:
806 if emptyok:
807 return temp
807 return temp
808 try:
808 try:
809 try:
809 try:
810 ifp = posixfile(name, "rb")
810 ifp = posixfile(name, "rb")
811 except IOError, inst:
811 except IOError, inst:
812 if inst.errno == errno.ENOENT:
812 if inst.errno == errno.ENOENT:
813 return temp
813 return temp
814 if not getattr(inst, 'filename', None):
814 if not getattr(inst, 'filename', None):
815 inst.filename = name
815 inst.filename = name
816 raise
816 raise
817 ofp = posixfile(temp, "wb")
817 ofp = posixfile(temp, "wb")
818 for chunk in filechunkiter(ifp):
818 for chunk in filechunkiter(ifp):
819 ofp.write(chunk)
819 ofp.write(chunk)
820 ifp.close()
820 ifp.close()
821 ofp.close()
821 ofp.close()
822 except: # re-raises
822 except: # re-raises
823 try: os.unlink(temp)
823 try: os.unlink(temp)
824 except OSError: pass
824 except OSError: pass
825 raise
825 raise
826 return temp
826 return temp
827
827
828 class atomictempfile(object):
828 class atomictempfile(object):
829 '''writable file object that atomically updates a file
829 '''writable file object that atomically updates a file
830
830
831 All writes will go to a temporary copy of the original file. Call
831 All writes will go to a temporary copy of the original file. Call
832 close() when you are done writing, and atomictempfile will rename
832 close() when you are done writing, and atomictempfile will rename
833 the temporary copy to the original name, making the changes
833 the temporary copy to the original name, making the changes
834 visible. If the object is destroyed without being closed, all your
834 visible. If the object is destroyed without being closed, all your
835 writes are discarded.
835 writes are discarded.
836 '''
836 '''
837 def __init__(self, name, mode='w+b', createmode=None):
837 def __init__(self, name, mode='w+b', createmode=None):
838 self.__name = name # permanent name
838 self.__name = name # permanent name
839 self._tempname = mktempcopy(name, emptyok=('w' in mode),
839 self._tempname = mktempcopy(name, emptyok=('w' in mode),
840 createmode=createmode)
840 createmode=createmode)
841 self._fp = posixfile(self._tempname, mode)
841 self._fp = posixfile(self._tempname, mode)
842
842
843 # delegated methods
843 # delegated methods
844 self.write = self._fp.write
844 self.write = self._fp.write
845 self.seek = self._fp.seek
845 self.seek = self._fp.seek
846 self.tell = self._fp.tell
846 self.tell = self._fp.tell
847 self.fileno = self._fp.fileno
847 self.fileno = self._fp.fileno
848
848
849 def close(self):
849 def close(self):
850 if not self._fp.closed:
850 if not self._fp.closed:
851 self._fp.close()
851 self._fp.close()
852 rename(self._tempname, localpath(self.__name))
852 rename(self._tempname, localpath(self.__name))
853
853
854 def discard(self):
854 def discard(self):
855 if not self._fp.closed:
855 if not self._fp.closed:
856 try:
856 try:
857 os.unlink(self._tempname)
857 os.unlink(self._tempname)
858 except OSError:
858 except OSError:
859 pass
859 pass
860 self._fp.close()
860 self._fp.close()
861
861
862 def __del__(self):
862 def __del__(self):
863 if safehasattr(self, '_fp'): # constructor actually did something
863 if safehasattr(self, '_fp'): # constructor actually did something
864 self.discard()
864 self.discard()
865
865
866 def makedirs(name, mode=None):
866 def makedirs(name, mode=None):
867 """recursive directory creation with parent mode inheritance"""
867 """recursive directory creation with parent mode inheritance"""
868 try:
868 try:
869 os.mkdir(name)
869 os.mkdir(name)
870 except OSError, err:
870 except OSError, err:
871 if err.errno == errno.EEXIST:
871 if err.errno == errno.EEXIST:
872 return
872 return
873 if err.errno != errno.ENOENT or not name:
873 if err.errno != errno.ENOENT or not name:
874 raise
874 raise
875 parent = os.path.dirname(os.path.abspath(name))
875 parent = os.path.dirname(os.path.abspath(name))
876 if parent == name:
876 if parent == name:
877 raise
877 raise
878 makedirs(parent, mode)
878 makedirs(parent, mode)
879 os.mkdir(name)
879 os.mkdir(name)
880 if mode is not None:
880 if mode is not None:
881 os.chmod(name, mode)
881 os.chmod(name, mode)
882
882
883 def ensuredirs(name, mode=None):
884 """race-safe recursive directory creation"""
885 try:
886 makedirs(name, mode)
887 except OSError, err:
888 if err.errno == errno.EEXIST and os.path.isdir(name):
889 # someone else seems to have won a directory creation race
890 return
891 raise
892
883 def readfile(path):
893 def readfile(path):
884 fp = open(path, 'rb')
894 fp = open(path, 'rb')
885 try:
895 try:
886 return fp.read()
896 return fp.read()
887 finally:
897 finally:
888 fp.close()
898 fp.close()
889
899
890 def writefile(path, text):
900 def writefile(path, text):
891 fp = open(path, 'wb')
901 fp = open(path, 'wb')
892 try:
902 try:
893 fp.write(text)
903 fp.write(text)
894 finally:
904 finally:
895 fp.close()
905 fp.close()
896
906
897 def appendfile(path, text):
907 def appendfile(path, text):
898 fp = open(path, 'ab')
908 fp = open(path, 'ab')
899 try:
909 try:
900 fp.write(text)
910 fp.write(text)
901 finally:
911 finally:
902 fp.close()
912 fp.close()
903
913
904 class chunkbuffer(object):
914 class chunkbuffer(object):
905 """Allow arbitrary sized chunks of data to be efficiently read from an
915 """Allow arbitrary sized chunks of data to be efficiently read from an
906 iterator over chunks of arbitrary size."""
916 iterator over chunks of arbitrary size."""
907
917
908 def __init__(self, in_iter):
918 def __init__(self, in_iter):
909 """in_iter is the iterator that's iterating over the input chunks.
919 """in_iter is the iterator that's iterating over the input chunks.
910 targetsize is how big a buffer to try to maintain."""
920 targetsize is how big a buffer to try to maintain."""
911 def splitbig(chunks):
921 def splitbig(chunks):
912 for chunk in chunks:
922 for chunk in chunks:
913 if len(chunk) > 2**20:
923 if len(chunk) > 2**20:
914 pos = 0
924 pos = 0
915 while pos < len(chunk):
925 while pos < len(chunk):
916 end = pos + 2 ** 18
926 end = pos + 2 ** 18
917 yield chunk[pos:end]
927 yield chunk[pos:end]
918 pos = end
928 pos = end
919 else:
929 else:
920 yield chunk
930 yield chunk
921 self.iter = splitbig(in_iter)
931 self.iter = splitbig(in_iter)
922 self._queue = deque()
932 self._queue = deque()
923
933
924 def read(self, l):
934 def read(self, l):
925 """Read L bytes of data from the iterator of chunks of data.
935 """Read L bytes of data from the iterator of chunks of data.
926 Returns less than L bytes if the iterator runs dry."""
936 Returns less than L bytes if the iterator runs dry."""
927 left = l
937 left = l
928 buf = []
938 buf = []
929 queue = self._queue
939 queue = self._queue
930 while left > 0:
940 while left > 0:
931 # refill the queue
941 # refill the queue
932 if not queue:
942 if not queue:
933 target = 2**18
943 target = 2**18
934 for chunk in self.iter:
944 for chunk in self.iter:
935 queue.append(chunk)
945 queue.append(chunk)
936 target -= len(chunk)
946 target -= len(chunk)
937 if target <= 0:
947 if target <= 0:
938 break
948 break
939 if not queue:
949 if not queue:
940 break
950 break
941
951
942 chunk = queue.popleft()
952 chunk = queue.popleft()
943 left -= len(chunk)
953 left -= len(chunk)
944 if left < 0:
954 if left < 0:
945 queue.appendleft(chunk[left:])
955 queue.appendleft(chunk[left:])
946 buf.append(chunk[:left])
956 buf.append(chunk[:left])
947 else:
957 else:
948 buf.append(chunk)
958 buf.append(chunk)
949
959
950 return ''.join(buf)
960 return ''.join(buf)
951
961
952 def filechunkiter(f, size=65536, limit=None):
962 def filechunkiter(f, size=65536, limit=None):
953 """Create a generator that produces the data in the file size
963 """Create a generator that produces the data in the file size
954 (default 65536) bytes at a time, up to optional limit (default is
964 (default 65536) bytes at a time, up to optional limit (default is
955 to read all data). Chunks may be less than size bytes if the
965 to read all data). Chunks may be less than size bytes if the
956 chunk is the last chunk in the file, or the file is a socket or
966 chunk is the last chunk in the file, or the file is a socket or
957 some other type of file that sometimes reads less data than is
967 some other type of file that sometimes reads less data than is
958 requested."""
968 requested."""
959 assert size >= 0
969 assert size >= 0
960 assert limit is None or limit >= 0
970 assert limit is None or limit >= 0
961 while True:
971 while True:
962 if limit is None:
972 if limit is None:
963 nbytes = size
973 nbytes = size
964 else:
974 else:
965 nbytes = min(limit, size)
975 nbytes = min(limit, size)
966 s = nbytes and f.read(nbytes)
976 s = nbytes and f.read(nbytes)
967 if not s:
977 if not s:
968 break
978 break
969 if limit:
979 if limit:
970 limit -= len(s)
980 limit -= len(s)
971 yield s
981 yield s
972
982
973 def makedate():
983 def makedate():
974 ct = time.time()
984 ct = time.time()
975 if ct < 0:
985 if ct < 0:
976 hint = _("check your clock")
986 hint = _("check your clock")
977 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
987 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
978 delta = (datetime.datetime.utcfromtimestamp(ct) -
988 delta = (datetime.datetime.utcfromtimestamp(ct) -
979 datetime.datetime.fromtimestamp(ct))
989 datetime.datetime.fromtimestamp(ct))
980 tz = delta.days * 86400 + delta.seconds
990 tz = delta.days * 86400 + delta.seconds
981 return ct, tz
991 return ct, tz
982
992
983 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
993 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
984 """represent a (unixtime, offset) tuple as a localized time.
994 """represent a (unixtime, offset) tuple as a localized time.
985 unixtime is seconds since the epoch, and offset is the time zone's
995 unixtime is seconds since the epoch, and offset is the time zone's
986 number of seconds away from UTC. if timezone is false, do not
996 number of seconds away from UTC. if timezone is false, do not
987 append time zone to string."""
997 append time zone to string."""
988 t, tz = date or makedate()
998 t, tz = date or makedate()
989 if t < 0:
999 if t < 0:
990 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
1000 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
991 tz = 0
1001 tz = 0
992 if "%1" in format or "%2" in format:
1002 if "%1" in format or "%2" in format:
993 sign = (tz > 0) and "-" or "+"
1003 sign = (tz > 0) and "-" or "+"
994 minutes = abs(tz) // 60
1004 minutes = abs(tz) // 60
995 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
1005 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
996 format = format.replace("%2", "%02d" % (minutes % 60))
1006 format = format.replace("%2", "%02d" % (minutes % 60))
997 try:
1007 try:
998 t = time.gmtime(float(t) - tz)
1008 t = time.gmtime(float(t) - tz)
999 except ValueError:
1009 except ValueError:
1000 # time was out of range
1010 # time was out of range
1001 t = time.gmtime(sys.maxint)
1011 t = time.gmtime(sys.maxint)
1002 s = time.strftime(format, t)
1012 s = time.strftime(format, t)
1003 return s
1013 return s
1004
1014
1005 def shortdate(date=None):
1015 def shortdate(date=None):
1006 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1016 """turn (timestamp, tzoff) tuple into iso 8631 date."""
1007 return datestr(date, format='%Y-%m-%d')
1017 return datestr(date, format='%Y-%m-%d')
1008
1018
1009 def strdate(string, format, defaults=[]):
1019 def strdate(string, format, defaults=[]):
1010 """parse a localized time string and return a (unixtime, offset) tuple.
1020 """parse a localized time string and return a (unixtime, offset) tuple.
1011 if the string cannot be parsed, ValueError is raised."""
1021 if the string cannot be parsed, ValueError is raised."""
1012 def timezone(string):
1022 def timezone(string):
1013 tz = string.split()[-1]
1023 tz = string.split()[-1]
1014 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1024 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
1015 sign = (tz[0] == "+") and 1 or -1
1025 sign = (tz[0] == "+") and 1 or -1
1016 hours = int(tz[1:3])
1026 hours = int(tz[1:3])
1017 minutes = int(tz[3:5])
1027 minutes = int(tz[3:5])
1018 return -sign * (hours * 60 + minutes) * 60
1028 return -sign * (hours * 60 + minutes) * 60
1019 if tz == "GMT" or tz == "UTC":
1029 if tz == "GMT" or tz == "UTC":
1020 return 0
1030 return 0
1021 return None
1031 return None
1022
1032
1023 # NOTE: unixtime = localunixtime + offset
1033 # NOTE: unixtime = localunixtime + offset
1024 offset, date = timezone(string), string
1034 offset, date = timezone(string), string
1025 if offset is not None:
1035 if offset is not None:
1026 date = " ".join(string.split()[:-1])
1036 date = " ".join(string.split()[:-1])
1027
1037
1028 # add missing elements from defaults
1038 # add missing elements from defaults
1029 usenow = False # default to using biased defaults
1039 usenow = False # default to using biased defaults
1030 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1040 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
1031 found = [True for p in part if ("%"+p) in format]
1041 found = [True for p in part if ("%"+p) in format]
1032 if not found:
1042 if not found:
1033 date += "@" + defaults[part][usenow]
1043 date += "@" + defaults[part][usenow]
1034 format += "@%" + part[0]
1044 format += "@%" + part[0]
1035 else:
1045 else:
1036 # We've found a specific time element, less specific time
1046 # We've found a specific time element, less specific time
1037 # elements are relative to today
1047 # elements are relative to today
1038 usenow = True
1048 usenow = True
1039
1049
1040 timetuple = time.strptime(date, format)
1050 timetuple = time.strptime(date, format)
1041 localunixtime = int(calendar.timegm(timetuple))
1051 localunixtime = int(calendar.timegm(timetuple))
1042 if offset is None:
1052 if offset is None:
1043 # local timezone
1053 # local timezone
1044 unixtime = int(time.mktime(timetuple))
1054 unixtime = int(time.mktime(timetuple))
1045 offset = unixtime - localunixtime
1055 offset = unixtime - localunixtime
1046 else:
1056 else:
1047 unixtime = localunixtime + offset
1057 unixtime = localunixtime + offset
1048 return unixtime, offset
1058 return unixtime, offset
1049
1059
1050 def parsedate(date, formats=None, bias={}):
1060 def parsedate(date, formats=None, bias={}):
1051 """parse a localized date/time and return a (unixtime, offset) tuple.
1061 """parse a localized date/time and return a (unixtime, offset) tuple.
1052
1062
1053 The date may be a "unixtime offset" string or in one of the specified
1063 The date may be a "unixtime offset" string or in one of the specified
1054 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1064 formats. If the date already is a (unixtime, offset) tuple, it is returned.
1055
1065
1056 >>> parsedate(' today ') == parsedate(\
1066 >>> parsedate(' today ') == parsedate(\
1057 datetime.date.today().strftime('%b %d'))
1067 datetime.date.today().strftime('%b %d'))
1058 True
1068 True
1059 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1069 >>> parsedate( 'yesterday ') == parsedate((datetime.date.today() -\
1060 datetime.timedelta(days=1)\
1070 datetime.timedelta(days=1)\
1061 ).strftime('%b %d'))
1071 ).strftime('%b %d'))
1062 True
1072 True
1063 >>> now, tz = makedate()
1073 >>> now, tz = makedate()
1064 >>> strnow, strtz = parsedate('now')
1074 >>> strnow, strtz = parsedate('now')
1065 >>> (strnow - now) < 1
1075 >>> (strnow - now) < 1
1066 True
1076 True
1067 >>> tz == strtz
1077 >>> tz == strtz
1068 True
1078 True
1069 """
1079 """
1070 if not date:
1080 if not date:
1071 return 0, 0
1081 return 0, 0
1072 if isinstance(date, tuple) and len(date) == 2:
1082 if isinstance(date, tuple) and len(date) == 2:
1073 return date
1083 return date
1074 if not formats:
1084 if not formats:
1075 formats = defaultdateformats
1085 formats = defaultdateformats
1076 date = date.strip()
1086 date = date.strip()
1077
1087
1078 if date == _('now'):
1088 if date == _('now'):
1079 return makedate()
1089 return makedate()
1080 if date == _('today'):
1090 if date == _('today'):
1081 date = datetime.date.today().strftime('%b %d')
1091 date = datetime.date.today().strftime('%b %d')
1082 elif date == _('yesterday'):
1092 elif date == _('yesterday'):
1083 date = (datetime.date.today() -
1093 date = (datetime.date.today() -
1084 datetime.timedelta(days=1)).strftime('%b %d')
1094 datetime.timedelta(days=1)).strftime('%b %d')
1085
1095
1086 try:
1096 try:
1087 when, offset = map(int, date.split(' '))
1097 when, offset = map(int, date.split(' '))
1088 except ValueError:
1098 except ValueError:
1089 # fill out defaults
1099 # fill out defaults
1090 now = makedate()
1100 now = makedate()
1091 defaults = {}
1101 defaults = {}
1092 for part in ("d", "mb", "yY", "HI", "M", "S"):
1102 for part in ("d", "mb", "yY", "HI", "M", "S"):
1093 # this piece is for rounding the specific end of unknowns
1103 # this piece is for rounding the specific end of unknowns
1094 b = bias.get(part)
1104 b = bias.get(part)
1095 if b is None:
1105 if b is None:
1096 if part[0] in "HMS":
1106 if part[0] in "HMS":
1097 b = "00"
1107 b = "00"
1098 else:
1108 else:
1099 b = "0"
1109 b = "0"
1100
1110
1101 # this piece is for matching the generic end to today's date
1111 # this piece is for matching the generic end to today's date
1102 n = datestr(now, "%" + part[0])
1112 n = datestr(now, "%" + part[0])
1103
1113
1104 defaults[part] = (b, n)
1114 defaults[part] = (b, n)
1105
1115
1106 for format in formats:
1116 for format in formats:
1107 try:
1117 try:
1108 when, offset = strdate(date, format, defaults)
1118 when, offset = strdate(date, format, defaults)
1109 except (ValueError, OverflowError):
1119 except (ValueError, OverflowError):
1110 pass
1120 pass
1111 else:
1121 else:
1112 break
1122 break
1113 else:
1123 else:
1114 raise Abort(_('invalid date: %r') % date)
1124 raise Abort(_('invalid date: %r') % date)
1115 # validate explicit (probably user-specified) date and
1125 # validate explicit (probably user-specified) date and
1116 # time zone offset. values must fit in signed 32 bits for
1126 # time zone offset. values must fit in signed 32 bits for
1117 # current 32-bit linux runtimes. timezones go from UTC-12
1127 # current 32-bit linux runtimes. timezones go from UTC-12
1118 # to UTC+14
1128 # to UTC+14
1119 if abs(when) > 0x7fffffff:
1129 if abs(when) > 0x7fffffff:
1120 raise Abort(_('date exceeds 32 bits: %d') % when)
1130 raise Abort(_('date exceeds 32 bits: %d') % when)
1121 if when < 0:
1131 if when < 0:
1122 raise Abort(_('negative date value: %d') % when)
1132 raise Abort(_('negative date value: %d') % when)
1123 if offset < -50400 or offset > 43200:
1133 if offset < -50400 or offset > 43200:
1124 raise Abort(_('impossible time zone offset: %d') % offset)
1134 raise Abort(_('impossible time zone offset: %d') % offset)
1125 return when, offset
1135 return when, offset
1126
1136
1127 def matchdate(date):
1137 def matchdate(date):
1128 """Return a function that matches a given date match specifier
1138 """Return a function that matches a given date match specifier
1129
1139
1130 Formats include:
1140 Formats include:
1131
1141
1132 '{date}' match a given date to the accuracy provided
1142 '{date}' match a given date to the accuracy provided
1133
1143
1134 '<{date}' on or before a given date
1144 '<{date}' on or before a given date
1135
1145
1136 '>{date}' on or after a given date
1146 '>{date}' on or after a given date
1137
1147
1138 >>> p1 = parsedate("10:29:59")
1148 >>> p1 = parsedate("10:29:59")
1139 >>> p2 = parsedate("10:30:00")
1149 >>> p2 = parsedate("10:30:00")
1140 >>> p3 = parsedate("10:30:59")
1150 >>> p3 = parsedate("10:30:59")
1141 >>> p4 = parsedate("10:31:00")
1151 >>> p4 = parsedate("10:31:00")
1142 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1152 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1143 >>> f = matchdate("10:30")
1153 >>> f = matchdate("10:30")
1144 >>> f(p1[0])
1154 >>> f(p1[0])
1145 False
1155 False
1146 >>> f(p2[0])
1156 >>> f(p2[0])
1147 True
1157 True
1148 >>> f(p3[0])
1158 >>> f(p3[0])
1149 True
1159 True
1150 >>> f(p4[0])
1160 >>> f(p4[0])
1151 False
1161 False
1152 >>> f(p5[0])
1162 >>> f(p5[0])
1153 False
1163 False
1154 """
1164 """
1155
1165
1156 def lower(date):
1166 def lower(date):
1157 d = dict(mb="1", d="1")
1167 d = dict(mb="1", d="1")
1158 return parsedate(date, extendeddateformats, d)[0]
1168 return parsedate(date, extendeddateformats, d)[0]
1159
1169
1160 def upper(date):
1170 def upper(date):
1161 d = dict(mb="12", HI="23", M="59", S="59")
1171 d = dict(mb="12", HI="23", M="59", S="59")
1162 for days in ("31", "30", "29"):
1172 for days in ("31", "30", "29"):
1163 try:
1173 try:
1164 d["d"] = days
1174 d["d"] = days
1165 return parsedate(date, extendeddateformats, d)[0]
1175 return parsedate(date, extendeddateformats, d)[0]
1166 except Abort:
1176 except Abort:
1167 pass
1177 pass
1168 d["d"] = "28"
1178 d["d"] = "28"
1169 return parsedate(date, extendeddateformats, d)[0]
1179 return parsedate(date, extendeddateformats, d)[0]
1170
1180
1171 date = date.strip()
1181 date = date.strip()
1172
1182
1173 if not date:
1183 if not date:
1174 raise Abort(_("dates cannot consist entirely of whitespace"))
1184 raise Abort(_("dates cannot consist entirely of whitespace"))
1175 elif date[0] == "<":
1185 elif date[0] == "<":
1176 if not date[1:]:
1186 if not date[1:]:
1177 raise Abort(_("invalid day spec, use '<DATE'"))
1187 raise Abort(_("invalid day spec, use '<DATE'"))
1178 when = upper(date[1:])
1188 when = upper(date[1:])
1179 return lambda x: x <= when
1189 return lambda x: x <= when
1180 elif date[0] == ">":
1190 elif date[0] == ">":
1181 if not date[1:]:
1191 if not date[1:]:
1182 raise Abort(_("invalid day spec, use '>DATE'"))
1192 raise Abort(_("invalid day spec, use '>DATE'"))
1183 when = lower(date[1:])
1193 when = lower(date[1:])
1184 return lambda x: x >= when
1194 return lambda x: x >= when
1185 elif date[0] == "-":
1195 elif date[0] == "-":
1186 try:
1196 try:
1187 days = int(date[1:])
1197 days = int(date[1:])
1188 except ValueError:
1198 except ValueError:
1189 raise Abort(_("invalid day spec: %s") % date[1:])
1199 raise Abort(_("invalid day spec: %s") % date[1:])
1190 if days < 0:
1200 if days < 0:
1191 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1201 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1192 % date[1:])
1202 % date[1:])
1193 when = makedate()[0] - days * 3600 * 24
1203 when = makedate()[0] - days * 3600 * 24
1194 return lambda x: x >= when
1204 return lambda x: x >= when
1195 elif " to " in date:
1205 elif " to " in date:
1196 a, b = date.split(" to ")
1206 a, b = date.split(" to ")
1197 start, stop = lower(a), upper(b)
1207 start, stop = lower(a), upper(b)
1198 return lambda x: x >= start and x <= stop
1208 return lambda x: x >= start and x <= stop
1199 else:
1209 else:
1200 start, stop = lower(date), upper(date)
1210 start, stop = lower(date), upper(date)
1201 return lambda x: x >= start and x <= stop
1211 return lambda x: x >= start and x <= stop
1202
1212
1203 def shortuser(user):
1213 def shortuser(user):
1204 """Return a short representation of a user name or email address."""
1214 """Return a short representation of a user name or email address."""
1205 f = user.find('@')
1215 f = user.find('@')
1206 if f >= 0:
1216 if f >= 0:
1207 user = user[:f]
1217 user = user[:f]
1208 f = user.find('<')
1218 f = user.find('<')
1209 if f >= 0:
1219 if f >= 0:
1210 user = user[f + 1:]
1220 user = user[f + 1:]
1211 f = user.find(' ')
1221 f = user.find(' ')
1212 if f >= 0:
1222 if f >= 0:
1213 user = user[:f]
1223 user = user[:f]
1214 f = user.find('.')
1224 f = user.find('.')
1215 if f >= 0:
1225 if f >= 0:
1216 user = user[:f]
1226 user = user[:f]
1217 return user
1227 return user
1218
1228
1219 def emailuser(user):
1229 def emailuser(user):
1220 """Return the user portion of an email address."""
1230 """Return the user portion of an email address."""
1221 f = user.find('@')
1231 f = user.find('@')
1222 if f >= 0:
1232 if f >= 0:
1223 user = user[:f]
1233 user = user[:f]
1224 f = user.find('<')
1234 f = user.find('<')
1225 if f >= 0:
1235 if f >= 0:
1226 user = user[f + 1:]
1236 user = user[f + 1:]
1227 return user
1237 return user
1228
1238
1229 def email(author):
1239 def email(author):
1230 '''get email of author.'''
1240 '''get email of author.'''
1231 r = author.find('>')
1241 r = author.find('>')
1232 if r == -1:
1242 if r == -1:
1233 r = None
1243 r = None
1234 return author[author.find('<') + 1:r]
1244 return author[author.find('<') + 1:r]
1235
1245
1236 def _ellipsis(text, maxlength):
1246 def _ellipsis(text, maxlength):
1237 if len(text) <= maxlength:
1247 if len(text) <= maxlength:
1238 return text, False
1248 return text, False
1239 else:
1249 else:
1240 return "%s..." % (text[:maxlength - 3]), True
1250 return "%s..." % (text[:maxlength - 3]), True
1241
1251
1242 def ellipsis(text, maxlength=400):
1252 def ellipsis(text, maxlength=400):
1243 """Trim string to at most maxlength (default: 400) characters."""
1253 """Trim string to at most maxlength (default: 400) characters."""
1244 try:
1254 try:
1245 # use unicode not to split at intermediate multi-byte sequence
1255 # use unicode not to split at intermediate multi-byte sequence
1246 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1256 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1247 maxlength)
1257 maxlength)
1248 if not truncated:
1258 if not truncated:
1249 return text
1259 return text
1250 return utext.encode(encoding.encoding)
1260 return utext.encode(encoding.encoding)
1251 except (UnicodeDecodeError, UnicodeEncodeError):
1261 except (UnicodeDecodeError, UnicodeEncodeError):
1252 return _ellipsis(text, maxlength)[0]
1262 return _ellipsis(text, maxlength)[0]
1253
1263
1254 _byteunits = (
1264 _byteunits = (
1255 (100, 1 << 30, _('%.0f GB')),
1265 (100, 1 << 30, _('%.0f GB')),
1256 (10, 1 << 30, _('%.1f GB')),
1266 (10, 1 << 30, _('%.1f GB')),
1257 (1, 1 << 30, _('%.2f GB')),
1267 (1, 1 << 30, _('%.2f GB')),
1258 (100, 1 << 20, _('%.0f MB')),
1268 (100, 1 << 20, _('%.0f MB')),
1259 (10, 1 << 20, _('%.1f MB')),
1269 (10, 1 << 20, _('%.1f MB')),
1260 (1, 1 << 20, _('%.2f MB')),
1270 (1, 1 << 20, _('%.2f MB')),
1261 (100, 1 << 10, _('%.0f KB')),
1271 (100, 1 << 10, _('%.0f KB')),
1262 (10, 1 << 10, _('%.1f KB')),
1272 (10, 1 << 10, _('%.1f KB')),
1263 (1, 1 << 10, _('%.2f KB')),
1273 (1, 1 << 10, _('%.2f KB')),
1264 (1, 1, _('%.0f bytes')),
1274 (1, 1, _('%.0f bytes')),
1265 )
1275 )
1266
1276
1267 def bytecount(nbytes):
1277 def bytecount(nbytes):
1268 '''return byte count formatted as readable string, with units'''
1278 '''return byte count formatted as readable string, with units'''
1269
1279
1270 for multiplier, divisor, format in _byteunits:
1280 for multiplier, divisor, format in _byteunits:
1271 if nbytes >= divisor * multiplier:
1281 if nbytes >= divisor * multiplier:
1272 return format % (nbytes / float(divisor))
1282 return format % (nbytes / float(divisor))
1273 return _byteunits[-1][2] % nbytes
1283 return _byteunits[-1][2] % nbytes
1274
1284
1275 def uirepr(s):
1285 def uirepr(s):
1276 # Avoid double backslash in Windows path repr()
1286 # Avoid double backslash in Windows path repr()
1277 return repr(s).replace('\\\\', '\\')
1287 return repr(s).replace('\\\\', '\\')
1278
1288
1279 # delay import of textwrap
1289 # delay import of textwrap
1280 def MBTextWrapper(**kwargs):
1290 def MBTextWrapper(**kwargs):
1281 class tw(textwrap.TextWrapper):
1291 class tw(textwrap.TextWrapper):
1282 """
1292 """
1283 Extend TextWrapper for width-awareness.
1293 Extend TextWrapper for width-awareness.
1284
1294
1285 Neither number of 'bytes' in any encoding nor 'characters' is
1295 Neither number of 'bytes' in any encoding nor 'characters' is
1286 appropriate to calculate terminal columns for specified string.
1296 appropriate to calculate terminal columns for specified string.
1287
1297
1288 Original TextWrapper implementation uses built-in 'len()' directly,
1298 Original TextWrapper implementation uses built-in 'len()' directly,
1289 so overriding is needed to use width information of each characters.
1299 so overriding is needed to use width information of each characters.
1290
1300
1291 In addition, characters classified into 'ambiguous' width are
1301 In addition, characters classified into 'ambiguous' width are
1292 treated as wide in East Asian area, but as narrow in other.
1302 treated as wide in East Asian area, but as narrow in other.
1293
1303
1294 This requires use decision to determine width of such characters.
1304 This requires use decision to determine width of such characters.
1295 """
1305 """
1296 def __init__(self, **kwargs):
1306 def __init__(self, **kwargs):
1297 textwrap.TextWrapper.__init__(self, **kwargs)
1307 textwrap.TextWrapper.__init__(self, **kwargs)
1298
1308
1299 # for compatibility between 2.4 and 2.6
1309 # for compatibility between 2.4 and 2.6
1300 if getattr(self, 'drop_whitespace', None) is None:
1310 if getattr(self, 'drop_whitespace', None) is None:
1301 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1311 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1302
1312
1303 def _cutdown(self, ucstr, space_left):
1313 def _cutdown(self, ucstr, space_left):
1304 l = 0
1314 l = 0
1305 colwidth = encoding.ucolwidth
1315 colwidth = encoding.ucolwidth
1306 for i in xrange(len(ucstr)):
1316 for i in xrange(len(ucstr)):
1307 l += colwidth(ucstr[i])
1317 l += colwidth(ucstr[i])
1308 if space_left < l:
1318 if space_left < l:
1309 return (ucstr[:i], ucstr[i:])
1319 return (ucstr[:i], ucstr[i:])
1310 return ucstr, ''
1320 return ucstr, ''
1311
1321
1312 # overriding of base class
1322 # overriding of base class
1313 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1323 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1314 space_left = max(width - cur_len, 1)
1324 space_left = max(width - cur_len, 1)
1315
1325
1316 if self.break_long_words:
1326 if self.break_long_words:
1317 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1327 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1318 cur_line.append(cut)
1328 cur_line.append(cut)
1319 reversed_chunks[-1] = res
1329 reversed_chunks[-1] = res
1320 elif not cur_line:
1330 elif not cur_line:
1321 cur_line.append(reversed_chunks.pop())
1331 cur_line.append(reversed_chunks.pop())
1322
1332
1323 # this overriding code is imported from TextWrapper of python 2.6
1333 # this overriding code is imported from TextWrapper of python 2.6
1324 # to calculate columns of string by 'encoding.ucolwidth()'
1334 # to calculate columns of string by 'encoding.ucolwidth()'
1325 def _wrap_chunks(self, chunks):
1335 def _wrap_chunks(self, chunks):
1326 colwidth = encoding.ucolwidth
1336 colwidth = encoding.ucolwidth
1327
1337
1328 lines = []
1338 lines = []
1329 if self.width <= 0:
1339 if self.width <= 0:
1330 raise ValueError("invalid width %r (must be > 0)" % self.width)
1340 raise ValueError("invalid width %r (must be > 0)" % self.width)
1331
1341
1332 # Arrange in reverse order so items can be efficiently popped
1342 # Arrange in reverse order so items can be efficiently popped
1333 # from a stack of chucks.
1343 # from a stack of chucks.
1334 chunks.reverse()
1344 chunks.reverse()
1335
1345
1336 while chunks:
1346 while chunks:
1337
1347
1338 # Start the list of chunks that will make up the current line.
1348 # Start the list of chunks that will make up the current line.
1339 # cur_len is just the length of all the chunks in cur_line.
1349 # cur_len is just the length of all the chunks in cur_line.
1340 cur_line = []
1350 cur_line = []
1341 cur_len = 0
1351 cur_len = 0
1342
1352
1343 # Figure out which static string will prefix this line.
1353 # Figure out which static string will prefix this line.
1344 if lines:
1354 if lines:
1345 indent = self.subsequent_indent
1355 indent = self.subsequent_indent
1346 else:
1356 else:
1347 indent = self.initial_indent
1357 indent = self.initial_indent
1348
1358
1349 # Maximum width for this line.
1359 # Maximum width for this line.
1350 width = self.width - len(indent)
1360 width = self.width - len(indent)
1351
1361
1352 # First chunk on line is whitespace -- drop it, unless this
1362 # First chunk on line is whitespace -- drop it, unless this
1353 # is the very beginning of the text (i.e. no lines started yet).
1363 # is the very beginning of the text (i.e. no lines started yet).
1354 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1364 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1355 del chunks[-1]
1365 del chunks[-1]
1356
1366
1357 while chunks:
1367 while chunks:
1358 l = colwidth(chunks[-1])
1368 l = colwidth(chunks[-1])
1359
1369
1360 # Can at least squeeze this chunk onto the current line.
1370 # Can at least squeeze this chunk onto the current line.
1361 if cur_len + l <= width:
1371 if cur_len + l <= width:
1362 cur_line.append(chunks.pop())
1372 cur_line.append(chunks.pop())
1363 cur_len += l
1373 cur_len += l
1364
1374
1365 # Nope, this line is full.
1375 # Nope, this line is full.
1366 else:
1376 else:
1367 break
1377 break
1368
1378
1369 # The current line is full, and the next chunk is too big to
1379 # The current line is full, and the next chunk is too big to
1370 # fit on *any* line (not just this one).
1380 # fit on *any* line (not just this one).
1371 if chunks and colwidth(chunks[-1]) > width:
1381 if chunks and colwidth(chunks[-1]) > width:
1372 self._handle_long_word(chunks, cur_line, cur_len, width)
1382 self._handle_long_word(chunks, cur_line, cur_len, width)
1373
1383
1374 # If the last chunk on this line is all whitespace, drop it.
1384 # If the last chunk on this line is all whitespace, drop it.
1375 if (self.drop_whitespace and
1385 if (self.drop_whitespace and
1376 cur_line and cur_line[-1].strip() == ''):
1386 cur_line and cur_line[-1].strip() == ''):
1377 del cur_line[-1]
1387 del cur_line[-1]
1378
1388
1379 # Convert current line back to a string and store it in list
1389 # Convert current line back to a string and store it in list
1380 # of all lines (return value).
1390 # of all lines (return value).
1381 if cur_line:
1391 if cur_line:
1382 lines.append(indent + ''.join(cur_line))
1392 lines.append(indent + ''.join(cur_line))
1383
1393
1384 return lines
1394 return lines
1385
1395
1386 global MBTextWrapper
1396 global MBTextWrapper
1387 MBTextWrapper = tw
1397 MBTextWrapper = tw
1388 return tw(**kwargs)
1398 return tw(**kwargs)
1389
1399
1390 def wrap(line, width, initindent='', hangindent=''):
1400 def wrap(line, width, initindent='', hangindent=''):
1391 maxindent = max(len(hangindent), len(initindent))
1401 maxindent = max(len(hangindent), len(initindent))
1392 if width <= maxindent:
1402 if width <= maxindent:
1393 # adjust for weird terminal size
1403 # adjust for weird terminal size
1394 width = max(78, maxindent + 1)
1404 width = max(78, maxindent + 1)
1395 line = line.decode(encoding.encoding, encoding.encodingmode)
1405 line = line.decode(encoding.encoding, encoding.encodingmode)
1396 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1406 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1397 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1407 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1398 wrapper = MBTextWrapper(width=width,
1408 wrapper = MBTextWrapper(width=width,
1399 initial_indent=initindent,
1409 initial_indent=initindent,
1400 subsequent_indent=hangindent)
1410 subsequent_indent=hangindent)
1401 return wrapper.fill(line).encode(encoding.encoding)
1411 return wrapper.fill(line).encode(encoding.encoding)
1402
1412
1403 def iterlines(iterator):
1413 def iterlines(iterator):
1404 for chunk in iterator:
1414 for chunk in iterator:
1405 for line in chunk.splitlines():
1415 for line in chunk.splitlines():
1406 yield line
1416 yield line
1407
1417
1408 def expandpath(path):
1418 def expandpath(path):
1409 return os.path.expanduser(os.path.expandvars(path))
1419 return os.path.expanduser(os.path.expandvars(path))
1410
1420
1411 def hgcmd():
1421 def hgcmd():
1412 """Return the command used to execute current hg
1422 """Return the command used to execute current hg
1413
1423
1414 This is different from hgexecutable() because on Windows we want
1424 This is different from hgexecutable() because on Windows we want
1415 to avoid things opening new shell windows like batch files, so we
1425 to avoid things opening new shell windows like batch files, so we
1416 get either the python call or current executable.
1426 get either the python call or current executable.
1417 """
1427 """
1418 if mainfrozen():
1428 if mainfrozen():
1419 return [sys.executable]
1429 return [sys.executable]
1420 return gethgcmd()
1430 return gethgcmd()
1421
1431
1422 def rundetached(args, condfn):
1432 def rundetached(args, condfn):
1423 """Execute the argument list in a detached process.
1433 """Execute the argument list in a detached process.
1424
1434
1425 condfn is a callable which is called repeatedly and should return
1435 condfn is a callable which is called repeatedly and should return
1426 True once the child process is known to have started successfully.
1436 True once the child process is known to have started successfully.
1427 At this point, the child process PID is returned. If the child
1437 At this point, the child process PID is returned. If the child
1428 process fails to start or finishes before condfn() evaluates to
1438 process fails to start or finishes before condfn() evaluates to
1429 True, return -1.
1439 True, return -1.
1430 """
1440 """
1431 # Windows case is easier because the child process is either
1441 # Windows case is easier because the child process is either
1432 # successfully starting and validating the condition or exiting
1442 # successfully starting and validating the condition or exiting
1433 # on failure. We just poll on its PID. On Unix, if the child
1443 # on failure. We just poll on its PID. On Unix, if the child
1434 # process fails to start, it will be left in a zombie state until
1444 # process fails to start, it will be left in a zombie state until
1435 # the parent wait on it, which we cannot do since we expect a long
1445 # the parent wait on it, which we cannot do since we expect a long
1436 # running process on success. Instead we listen for SIGCHLD telling
1446 # running process on success. Instead we listen for SIGCHLD telling
1437 # us our child process terminated.
1447 # us our child process terminated.
1438 terminated = set()
1448 terminated = set()
1439 def handler(signum, frame):
1449 def handler(signum, frame):
1440 terminated.add(os.wait())
1450 terminated.add(os.wait())
1441 prevhandler = None
1451 prevhandler = None
1442 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1452 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1443 if SIGCHLD is not None:
1453 if SIGCHLD is not None:
1444 prevhandler = signal.signal(SIGCHLD, handler)
1454 prevhandler = signal.signal(SIGCHLD, handler)
1445 try:
1455 try:
1446 pid = spawndetached(args)
1456 pid = spawndetached(args)
1447 while not condfn():
1457 while not condfn():
1448 if ((pid in terminated or not testpid(pid))
1458 if ((pid in terminated or not testpid(pid))
1449 and not condfn()):
1459 and not condfn()):
1450 return -1
1460 return -1
1451 time.sleep(0.1)
1461 time.sleep(0.1)
1452 return pid
1462 return pid
1453 finally:
1463 finally:
1454 if prevhandler is not None:
1464 if prevhandler is not None:
1455 signal.signal(signal.SIGCHLD, prevhandler)
1465 signal.signal(signal.SIGCHLD, prevhandler)
1456
1466
1457 try:
1467 try:
1458 any, all = any, all
1468 any, all = any, all
1459 except NameError:
1469 except NameError:
1460 def any(iterable):
1470 def any(iterable):
1461 for i in iterable:
1471 for i in iterable:
1462 if i:
1472 if i:
1463 return True
1473 return True
1464 return False
1474 return False
1465
1475
1466 def all(iterable):
1476 def all(iterable):
1467 for i in iterable:
1477 for i in iterable:
1468 if not i:
1478 if not i:
1469 return False
1479 return False
1470 return True
1480 return True
1471
1481
1472 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1482 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1473 """Return the result of interpolating items in the mapping into string s.
1483 """Return the result of interpolating items in the mapping into string s.
1474
1484
1475 prefix is a single character string, or a two character string with
1485 prefix is a single character string, or a two character string with
1476 a backslash as the first character if the prefix needs to be escaped in
1486 a backslash as the first character if the prefix needs to be escaped in
1477 a regular expression.
1487 a regular expression.
1478
1488
1479 fn is an optional function that will be applied to the replacement text
1489 fn is an optional function that will be applied to the replacement text
1480 just before replacement.
1490 just before replacement.
1481
1491
1482 escape_prefix is an optional flag that allows using doubled prefix for
1492 escape_prefix is an optional flag that allows using doubled prefix for
1483 its escaping.
1493 its escaping.
1484 """
1494 """
1485 fn = fn or (lambda s: s)
1495 fn = fn or (lambda s: s)
1486 patterns = '|'.join(mapping.keys())
1496 patterns = '|'.join(mapping.keys())
1487 if escape_prefix:
1497 if escape_prefix:
1488 patterns += '|' + prefix
1498 patterns += '|' + prefix
1489 if len(prefix) > 1:
1499 if len(prefix) > 1:
1490 prefix_char = prefix[1:]
1500 prefix_char = prefix[1:]
1491 else:
1501 else:
1492 prefix_char = prefix
1502 prefix_char = prefix
1493 mapping[prefix_char] = prefix_char
1503 mapping[prefix_char] = prefix_char
1494 r = re.compile(r'%s(%s)' % (prefix, patterns))
1504 r = re.compile(r'%s(%s)' % (prefix, patterns))
1495 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1505 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1496
1506
1497 def getport(port):
1507 def getport(port):
1498 """Return the port for a given network service.
1508 """Return the port for a given network service.
1499
1509
1500 If port is an integer, it's returned as is. If it's a string, it's
1510 If port is an integer, it's returned as is. If it's a string, it's
1501 looked up using socket.getservbyname(). If there's no matching
1511 looked up using socket.getservbyname(). If there's no matching
1502 service, util.Abort is raised.
1512 service, util.Abort is raised.
1503 """
1513 """
1504 try:
1514 try:
1505 return int(port)
1515 return int(port)
1506 except ValueError:
1516 except ValueError:
1507 pass
1517 pass
1508
1518
1509 try:
1519 try:
1510 return socket.getservbyname(port)
1520 return socket.getservbyname(port)
1511 except socket.error:
1521 except socket.error:
1512 raise Abort(_("no port number associated with service '%s'") % port)
1522 raise Abort(_("no port number associated with service '%s'") % port)
1513
1523
1514 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1524 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1515 '0': False, 'no': False, 'false': False, 'off': False,
1525 '0': False, 'no': False, 'false': False, 'off': False,
1516 'never': False}
1526 'never': False}
1517
1527
1518 def parsebool(s):
1528 def parsebool(s):
1519 """Parse s into a boolean.
1529 """Parse s into a boolean.
1520
1530
1521 If s is not a valid boolean, returns None.
1531 If s is not a valid boolean, returns None.
1522 """
1532 """
1523 return _booleans.get(s.lower(), None)
1533 return _booleans.get(s.lower(), None)
1524
1534
1525 _hexdig = '0123456789ABCDEFabcdef'
1535 _hexdig = '0123456789ABCDEFabcdef'
1526 _hextochr = dict((a + b, chr(int(a + b, 16)))
1536 _hextochr = dict((a + b, chr(int(a + b, 16)))
1527 for a in _hexdig for b in _hexdig)
1537 for a in _hexdig for b in _hexdig)
1528
1538
1529 def _urlunquote(s):
1539 def _urlunquote(s):
1530 """Decode HTTP/HTML % encoding.
1540 """Decode HTTP/HTML % encoding.
1531
1541
1532 >>> _urlunquote('abc%20def')
1542 >>> _urlunquote('abc%20def')
1533 'abc def'
1543 'abc def'
1534 """
1544 """
1535 res = s.split('%')
1545 res = s.split('%')
1536 # fastpath
1546 # fastpath
1537 if len(res) == 1:
1547 if len(res) == 1:
1538 return s
1548 return s
1539 s = res[0]
1549 s = res[0]
1540 for item in res[1:]:
1550 for item in res[1:]:
1541 try:
1551 try:
1542 s += _hextochr[item[:2]] + item[2:]
1552 s += _hextochr[item[:2]] + item[2:]
1543 except KeyError:
1553 except KeyError:
1544 s += '%' + item
1554 s += '%' + item
1545 except UnicodeDecodeError:
1555 except UnicodeDecodeError:
1546 s += unichr(int(item[:2], 16)) + item[2:]
1556 s += unichr(int(item[:2], 16)) + item[2:]
1547 return s
1557 return s
1548
1558
1549 class url(object):
1559 class url(object):
1550 r"""Reliable URL parser.
1560 r"""Reliable URL parser.
1551
1561
1552 This parses URLs and provides attributes for the following
1562 This parses URLs and provides attributes for the following
1553 components:
1563 components:
1554
1564
1555 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1565 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1556
1566
1557 Missing components are set to None. The only exception is
1567 Missing components are set to None. The only exception is
1558 fragment, which is set to '' if present but empty.
1568 fragment, which is set to '' if present but empty.
1559
1569
1560 If parsefragment is False, fragment is included in query. If
1570 If parsefragment is False, fragment is included in query. If
1561 parsequery is False, query is included in path. If both are
1571 parsequery is False, query is included in path. If both are
1562 False, both fragment and query are included in path.
1572 False, both fragment and query are included in path.
1563
1573
1564 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1574 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1565
1575
1566 Note that for backward compatibility reasons, bundle URLs do not
1576 Note that for backward compatibility reasons, bundle URLs do not
1567 take host names. That means 'bundle://../' has a path of '../'.
1577 take host names. That means 'bundle://../' has a path of '../'.
1568
1578
1569 Examples:
1579 Examples:
1570
1580
1571 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1581 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1572 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1582 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1573 >>> url('ssh://[::1]:2200//home/joe/repo')
1583 >>> url('ssh://[::1]:2200//home/joe/repo')
1574 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1584 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1575 >>> url('file:///home/joe/repo')
1585 >>> url('file:///home/joe/repo')
1576 <url scheme: 'file', path: '/home/joe/repo'>
1586 <url scheme: 'file', path: '/home/joe/repo'>
1577 >>> url('file:///c:/temp/foo/')
1587 >>> url('file:///c:/temp/foo/')
1578 <url scheme: 'file', path: 'c:/temp/foo/'>
1588 <url scheme: 'file', path: 'c:/temp/foo/'>
1579 >>> url('bundle:foo')
1589 >>> url('bundle:foo')
1580 <url scheme: 'bundle', path: 'foo'>
1590 <url scheme: 'bundle', path: 'foo'>
1581 >>> url('bundle://../foo')
1591 >>> url('bundle://../foo')
1582 <url scheme: 'bundle', path: '../foo'>
1592 <url scheme: 'bundle', path: '../foo'>
1583 >>> url(r'c:\foo\bar')
1593 >>> url(r'c:\foo\bar')
1584 <url path: 'c:\\foo\\bar'>
1594 <url path: 'c:\\foo\\bar'>
1585 >>> url(r'\\blah\blah\blah')
1595 >>> url(r'\\blah\blah\blah')
1586 <url path: '\\\\blah\\blah\\blah'>
1596 <url path: '\\\\blah\\blah\\blah'>
1587 >>> url(r'\\blah\blah\blah#baz')
1597 >>> url(r'\\blah\blah\blah#baz')
1588 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1598 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1589
1599
1590 Authentication credentials:
1600 Authentication credentials:
1591
1601
1592 >>> url('ssh://joe:xyz@x/repo')
1602 >>> url('ssh://joe:xyz@x/repo')
1593 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1603 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1594 >>> url('ssh://joe@x/repo')
1604 >>> url('ssh://joe@x/repo')
1595 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1605 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1596
1606
1597 Query strings and fragments:
1607 Query strings and fragments:
1598
1608
1599 >>> url('http://host/a?b#c')
1609 >>> url('http://host/a?b#c')
1600 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1610 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1601 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1611 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1602 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1612 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1603 """
1613 """
1604
1614
1605 _safechars = "!~*'()+"
1615 _safechars = "!~*'()+"
1606 _safepchars = "/!~*'()+:"
1616 _safepchars = "/!~*'()+:"
1607 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1617 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1608
1618
1609 def __init__(self, path, parsequery=True, parsefragment=True):
1619 def __init__(self, path, parsequery=True, parsefragment=True):
1610 # We slowly chomp away at path until we have only the path left
1620 # We slowly chomp away at path until we have only the path left
1611 self.scheme = self.user = self.passwd = self.host = None
1621 self.scheme = self.user = self.passwd = self.host = None
1612 self.port = self.path = self.query = self.fragment = None
1622 self.port = self.path = self.query = self.fragment = None
1613 self._localpath = True
1623 self._localpath = True
1614 self._hostport = ''
1624 self._hostport = ''
1615 self._origpath = path
1625 self._origpath = path
1616
1626
1617 if parsefragment and '#' in path:
1627 if parsefragment and '#' in path:
1618 path, self.fragment = path.split('#', 1)
1628 path, self.fragment = path.split('#', 1)
1619 if not path:
1629 if not path:
1620 path = None
1630 path = None
1621
1631
1622 # special case for Windows drive letters and UNC paths
1632 # special case for Windows drive letters and UNC paths
1623 if hasdriveletter(path) or path.startswith(r'\\'):
1633 if hasdriveletter(path) or path.startswith(r'\\'):
1624 self.path = path
1634 self.path = path
1625 return
1635 return
1626
1636
1627 # For compatibility reasons, we can't handle bundle paths as
1637 # For compatibility reasons, we can't handle bundle paths as
1628 # normal URLS
1638 # normal URLS
1629 if path.startswith('bundle:'):
1639 if path.startswith('bundle:'):
1630 self.scheme = 'bundle'
1640 self.scheme = 'bundle'
1631 path = path[7:]
1641 path = path[7:]
1632 if path.startswith('//'):
1642 if path.startswith('//'):
1633 path = path[2:]
1643 path = path[2:]
1634 self.path = path
1644 self.path = path
1635 return
1645 return
1636
1646
1637 if self._matchscheme(path):
1647 if self._matchscheme(path):
1638 parts = path.split(':', 1)
1648 parts = path.split(':', 1)
1639 if parts[0]:
1649 if parts[0]:
1640 self.scheme, path = parts
1650 self.scheme, path = parts
1641 self._localpath = False
1651 self._localpath = False
1642
1652
1643 if not path:
1653 if not path:
1644 path = None
1654 path = None
1645 if self._localpath:
1655 if self._localpath:
1646 self.path = ''
1656 self.path = ''
1647 return
1657 return
1648 else:
1658 else:
1649 if self._localpath:
1659 if self._localpath:
1650 self.path = path
1660 self.path = path
1651 return
1661 return
1652
1662
1653 if parsequery and '?' in path:
1663 if parsequery and '?' in path:
1654 path, self.query = path.split('?', 1)
1664 path, self.query = path.split('?', 1)
1655 if not path:
1665 if not path:
1656 path = None
1666 path = None
1657 if not self.query:
1667 if not self.query:
1658 self.query = None
1668 self.query = None
1659
1669
1660 # // is required to specify a host/authority
1670 # // is required to specify a host/authority
1661 if path and path.startswith('//'):
1671 if path and path.startswith('//'):
1662 parts = path[2:].split('/', 1)
1672 parts = path[2:].split('/', 1)
1663 if len(parts) > 1:
1673 if len(parts) > 1:
1664 self.host, path = parts
1674 self.host, path = parts
1665 path = path
1675 path = path
1666 else:
1676 else:
1667 self.host = parts[0]
1677 self.host = parts[0]
1668 path = None
1678 path = None
1669 if not self.host:
1679 if not self.host:
1670 self.host = None
1680 self.host = None
1671 # path of file:///d is /d
1681 # path of file:///d is /d
1672 # path of file:///d:/ is d:/, not /d:/
1682 # path of file:///d:/ is d:/, not /d:/
1673 if path and not hasdriveletter(path):
1683 if path and not hasdriveletter(path):
1674 path = '/' + path
1684 path = '/' + path
1675
1685
1676 if self.host and '@' in self.host:
1686 if self.host and '@' in self.host:
1677 self.user, self.host = self.host.rsplit('@', 1)
1687 self.user, self.host = self.host.rsplit('@', 1)
1678 if ':' in self.user:
1688 if ':' in self.user:
1679 self.user, self.passwd = self.user.split(':', 1)
1689 self.user, self.passwd = self.user.split(':', 1)
1680 if not self.host:
1690 if not self.host:
1681 self.host = None
1691 self.host = None
1682
1692
1683 # Don't split on colons in IPv6 addresses without ports
1693 # Don't split on colons in IPv6 addresses without ports
1684 if (self.host and ':' in self.host and
1694 if (self.host and ':' in self.host and
1685 not (self.host.startswith('[') and self.host.endswith(']'))):
1695 not (self.host.startswith('[') and self.host.endswith(']'))):
1686 self._hostport = self.host
1696 self._hostport = self.host
1687 self.host, self.port = self.host.rsplit(':', 1)
1697 self.host, self.port = self.host.rsplit(':', 1)
1688 if not self.host:
1698 if not self.host:
1689 self.host = None
1699 self.host = None
1690
1700
1691 if (self.host and self.scheme == 'file' and
1701 if (self.host and self.scheme == 'file' and
1692 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1702 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1693 raise Abort(_('file:// URLs can only refer to localhost'))
1703 raise Abort(_('file:// URLs can only refer to localhost'))
1694
1704
1695 self.path = path
1705 self.path = path
1696
1706
1697 # leave the query string escaped
1707 # leave the query string escaped
1698 for a in ('user', 'passwd', 'host', 'port',
1708 for a in ('user', 'passwd', 'host', 'port',
1699 'path', 'fragment'):
1709 'path', 'fragment'):
1700 v = getattr(self, a)
1710 v = getattr(self, a)
1701 if v is not None:
1711 if v is not None:
1702 setattr(self, a, _urlunquote(v))
1712 setattr(self, a, _urlunquote(v))
1703
1713
1704 def __repr__(self):
1714 def __repr__(self):
1705 attrs = []
1715 attrs = []
1706 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1716 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1707 'query', 'fragment'):
1717 'query', 'fragment'):
1708 v = getattr(self, a)
1718 v = getattr(self, a)
1709 if v is not None:
1719 if v is not None:
1710 attrs.append('%s: %r' % (a, v))
1720 attrs.append('%s: %r' % (a, v))
1711 return '<url %s>' % ', '.join(attrs)
1721 return '<url %s>' % ', '.join(attrs)
1712
1722
1713 def __str__(self):
1723 def __str__(self):
1714 r"""Join the URL's components back into a URL string.
1724 r"""Join the URL's components back into a URL string.
1715
1725
1716 Examples:
1726 Examples:
1717
1727
1718 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1728 >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
1719 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1729 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
1720 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1730 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1721 'http://user:pw@host:80/?foo=bar&baz=42'
1731 'http://user:pw@host:80/?foo=bar&baz=42'
1722 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1732 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1723 'http://user:pw@host:80/?foo=bar%3dbaz'
1733 'http://user:pw@host:80/?foo=bar%3dbaz'
1724 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1734 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1725 'ssh://user:pw@[::1]:2200//home/joe#'
1735 'ssh://user:pw@[::1]:2200//home/joe#'
1726 >>> str(url('http://localhost:80//'))
1736 >>> str(url('http://localhost:80//'))
1727 'http://localhost:80//'
1737 'http://localhost:80//'
1728 >>> str(url('http://localhost:80/'))
1738 >>> str(url('http://localhost:80/'))
1729 'http://localhost:80/'
1739 'http://localhost:80/'
1730 >>> str(url('http://localhost:80'))
1740 >>> str(url('http://localhost:80'))
1731 'http://localhost:80/'
1741 'http://localhost:80/'
1732 >>> str(url('bundle:foo'))
1742 >>> str(url('bundle:foo'))
1733 'bundle:foo'
1743 'bundle:foo'
1734 >>> str(url('bundle://../foo'))
1744 >>> str(url('bundle://../foo'))
1735 'bundle:../foo'
1745 'bundle:../foo'
1736 >>> str(url('path'))
1746 >>> str(url('path'))
1737 'path'
1747 'path'
1738 >>> str(url('file:///tmp/foo/bar'))
1748 >>> str(url('file:///tmp/foo/bar'))
1739 'file:///tmp/foo/bar'
1749 'file:///tmp/foo/bar'
1740 >>> str(url('file:///c:/tmp/foo/bar'))
1750 >>> str(url('file:///c:/tmp/foo/bar'))
1741 'file:///c:/tmp/foo/bar'
1751 'file:///c:/tmp/foo/bar'
1742 >>> print url(r'bundle:foo\bar')
1752 >>> print url(r'bundle:foo\bar')
1743 bundle:foo\bar
1753 bundle:foo\bar
1744 """
1754 """
1745 if self._localpath:
1755 if self._localpath:
1746 s = self.path
1756 s = self.path
1747 if self.scheme == 'bundle':
1757 if self.scheme == 'bundle':
1748 s = 'bundle:' + s
1758 s = 'bundle:' + s
1749 if self.fragment:
1759 if self.fragment:
1750 s += '#' + self.fragment
1760 s += '#' + self.fragment
1751 return s
1761 return s
1752
1762
1753 s = self.scheme + ':'
1763 s = self.scheme + ':'
1754 if self.user or self.passwd or self.host:
1764 if self.user or self.passwd or self.host:
1755 s += '//'
1765 s += '//'
1756 elif self.scheme and (not self.path or self.path.startswith('/')
1766 elif self.scheme and (not self.path or self.path.startswith('/')
1757 or hasdriveletter(self.path)):
1767 or hasdriveletter(self.path)):
1758 s += '//'
1768 s += '//'
1759 if hasdriveletter(self.path):
1769 if hasdriveletter(self.path):
1760 s += '/'
1770 s += '/'
1761 if self.user:
1771 if self.user:
1762 s += urllib.quote(self.user, safe=self._safechars)
1772 s += urllib.quote(self.user, safe=self._safechars)
1763 if self.passwd:
1773 if self.passwd:
1764 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1774 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1765 if self.user or self.passwd:
1775 if self.user or self.passwd:
1766 s += '@'
1776 s += '@'
1767 if self.host:
1777 if self.host:
1768 if not (self.host.startswith('[') and self.host.endswith(']')):
1778 if not (self.host.startswith('[') and self.host.endswith(']')):
1769 s += urllib.quote(self.host)
1779 s += urllib.quote(self.host)
1770 else:
1780 else:
1771 s += self.host
1781 s += self.host
1772 if self.port:
1782 if self.port:
1773 s += ':' + urllib.quote(self.port)
1783 s += ':' + urllib.quote(self.port)
1774 if self.host:
1784 if self.host:
1775 s += '/'
1785 s += '/'
1776 if self.path:
1786 if self.path:
1777 # TODO: similar to the query string, we should not unescape the
1787 # TODO: similar to the query string, we should not unescape the
1778 # path when we store it, the path might contain '%2f' = '/',
1788 # path when we store it, the path might contain '%2f' = '/',
1779 # which we should *not* escape.
1789 # which we should *not* escape.
1780 s += urllib.quote(self.path, safe=self._safepchars)
1790 s += urllib.quote(self.path, safe=self._safepchars)
1781 if self.query:
1791 if self.query:
1782 # we store the query in escaped form.
1792 # we store the query in escaped form.
1783 s += '?' + self.query
1793 s += '?' + self.query
1784 if self.fragment is not None:
1794 if self.fragment is not None:
1785 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1795 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1786 return s
1796 return s
1787
1797
1788 def authinfo(self):
1798 def authinfo(self):
1789 user, passwd = self.user, self.passwd
1799 user, passwd = self.user, self.passwd
1790 try:
1800 try:
1791 self.user, self.passwd = None, None
1801 self.user, self.passwd = None, None
1792 s = str(self)
1802 s = str(self)
1793 finally:
1803 finally:
1794 self.user, self.passwd = user, passwd
1804 self.user, self.passwd = user, passwd
1795 if not self.user:
1805 if not self.user:
1796 return (s, None)
1806 return (s, None)
1797 # authinfo[1] is passed to urllib2 password manager, and its
1807 # authinfo[1] is passed to urllib2 password manager, and its
1798 # URIs must not contain credentials. The host is passed in the
1808 # URIs must not contain credentials. The host is passed in the
1799 # URIs list because Python < 2.4.3 uses only that to search for
1809 # URIs list because Python < 2.4.3 uses only that to search for
1800 # a password.
1810 # a password.
1801 return (s, (None, (s, self.host),
1811 return (s, (None, (s, self.host),
1802 self.user, self.passwd or ''))
1812 self.user, self.passwd or ''))
1803
1813
1804 def isabs(self):
1814 def isabs(self):
1805 if self.scheme and self.scheme != 'file':
1815 if self.scheme and self.scheme != 'file':
1806 return True # remote URL
1816 return True # remote URL
1807 if hasdriveletter(self.path):
1817 if hasdriveletter(self.path):
1808 return True # absolute for our purposes - can't be joined()
1818 return True # absolute for our purposes - can't be joined()
1809 if self.path.startswith(r'\\'):
1819 if self.path.startswith(r'\\'):
1810 return True # Windows UNC path
1820 return True # Windows UNC path
1811 if self.path.startswith('/'):
1821 if self.path.startswith('/'):
1812 return True # POSIX-style
1822 return True # POSIX-style
1813 return False
1823 return False
1814
1824
1815 def localpath(self):
1825 def localpath(self):
1816 if self.scheme == 'file' or self.scheme == 'bundle':
1826 if self.scheme == 'file' or self.scheme == 'bundle':
1817 path = self.path or '/'
1827 path = self.path or '/'
1818 # For Windows, we need to promote hosts containing drive
1828 # For Windows, we need to promote hosts containing drive
1819 # letters to paths with drive letters.
1829 # letters to paths with drive letters.
1820 if hasdriveletter(self._hostport):
1830 if hasdriveletter(self._hostport):
1821 path = self._hostport + '/' + self.path
1831 path = self._hostport + '/' + self.path
1822 elif (self.host is not None and self.path
1832 elif (self.host is not None and self.path
1823 and not hasdriveletter(path)):
1833 and not hasdriveletter(path)):
1824 path = '/' + path
1834 path = '/' + path
1825 return path
1835 return path
1826 return self._origpath
1836 return self._origpath
1827
1837
1828 def hasscheme(path):
1838 def hasscheme(path):
1829 return bool(url(path).scheme)
1839 return bool(url(path).scheme)
1830
1840
1831 def hasdriveletter(path):
1841 def hasdriveletter(path):
1832 return path and path[1:2] == ':' and path[0:1].isalpha()
1842 return path and path[1:2] == ':' and path[0:1].isalpha()
1833
1843
1834 def urllocalpath(path):
1844 def urllocalpath(path):
1835 return url(path, parsequery=False, parsefragment=False).localpath()
1845 return url(path, parsequery=False, parsefragment=False).localpath()
1836
1846
1837 def hidepassword(u):
1847 def hidepassword(u):
1838 '''hide user credential in a url string'''
1848 '''hide user credential in a url string'''
1839 u = url(u)
1849 u = url(u)
1840 if u.passwd:
1850 if u.passwd:
1841 u.passwd = '***'
1851 u.passwd = '***'
1842 return str(u)
1852 return str(u)
1843
1853
1844 def removeauth(u):
1854 def removeauth(u):
1845 '''remove all authentication information from a url string'''
1855 '''remove all authentication information from a url string'''
1846 u = url(u)
1856 u = url(u)
1847 u.user = u.passwd = None
1857 u.user = u.passwd = None
1848 return str(u)
1858 return str(u)
1849
1859
1850 def isatty(fd):
1860 def isatty(fd):
1851 try:
1861 try:
1852 return fd.isatty()
1862 return fd.isatty()
1853 except AttributeError:
1863 except AttributeError:
1854 return False
1864 return False
General Comments 0
You need to be logged in to leave comments. Login now