##// END OF EJS Templates
icasefs: avoid normcase()-ing in util.fspath() for efficiency...
FUJIWARA Katsunori -
r15670:d6c19cfa stable
parent child Browse files
Show More
@@ -1,726 +1,730 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import scmutil, util, ignore, osutil, parsers, encoding
10 import scmutil, util, ignore, osutil, parsers, encoding
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _format = ">cllll"
14 _format = ">cllll"
15 propertycache = util.propertycache
15 propertycache = util.propertycache
16
16
17 def _finddirs(path):
17 def _finddirs(path):
18 pos = path.rfind('/')
18 pos = path.rfind('/')
19 while pos != -1:
19 while pos != -1:
20 yield path[:pos]
20 yield path[:pos]
21 pos = path.rfind('/', 0, pos)
21 pos = path.rfind('/', 0, pos)
22
22
23 def _incdirs(dirs, path):
23 def _incdirs(dirs, path):
24 for base in _finddirs(path):
24 for base in _finddirs(path):
25 if base in dirs:
25 if base in dirs:
26 dirs[base] += 1
26 dirs[base] += 1
27 return
27 return
28 dirs[base] = 1
28 dirs[base] = 1
29
29
30 def _decdirs(dirs, path):
30 def _decdirs(dirs, path):
31 for base in _finddirs(path):
31 for base in _finddirs(path):
32 if dirs[base] > 1:
32 if dirs[base] > 1:
33 dirs[base] -= 1
33 dirs[base] -= 1
34 return
34 return
35 del dirs[base]
35 del dirs[base]
36
36
37 class dirstate(object):
37 class dirstate(object):
38
38
39 def __init__(self, opener, ui, root, validate):
39 def __init__(self, opener, ui, root, validate):
40 '''Create a new dirstate object.
40 '''Create a new dirstate object.
41
41
42 opener is an open()-like callable that can be used to open the
42 opener is an open()-like callable that can be used to open the
43 dirstate file; root is the root of the directory tracked by
43 dirstate file; root is the root of the directory tracked by
44 the dirstate.
44 the dirstate.
45 '''
45 '''
46 self._opener = opener
46 self._opener = opener
47 self._validate = validate
47 self._validate = validate
48 self._root = root
48 self._root = root
49 self._rootdir = os.path.join(root, '')
49 self._rootdir = os.path.join(root, '')
50 self._dirty = False
50 self._dirty = False
51 self._dirtypl = False
51 self._dirtypl = False
52 self._lastnormaltime = None
52 self._lastnormaltime = None
53 self._ui = ui
53 self._ui = ui
54
54
55 @propertycache
55 @propertycache
56 def _map(self):
56 def _map(self):
57 '''Return the dirstate contents as a map from filename to
57 '''Return the dirstate contents as a map from filename to
58 (state, mode, size, time).'''
58 (state, mode, size, time).'''
59 self._read()
59 self._read()
60 return self._map
60 return self._map
61
61
62 @propertycache
62 @propertycache
63 def _copymap(self):
63 def _copymap(self):
64 self._read()
64 self._read()
65 return self._copymap
65 return self._copymap
66
66
67 @propertycache
67 @propertycache
68 def _normroot(self):
69 return util.normcase(self._root)
70
71 @propertycache
68 def _foldmap(self):
72 def _foldmap(self):
69 f = {}
73 f = {}
70 for name in self._map:
74 for name in self._map:
71 f[util.normcase(name)] = name
75 f[util.normcase(name)] = name
72 f['.'] = '.' # prevents useless util.fspath() invocation
76 f['.'] = '.' # prevents useless util.fspath() invocation
73 return f
77 return f
74
78
75 @propertycache
79 @propertycache
76 def _branch(self):
80 def _branch(self):
77 try:
81 try:
78 return self._opener.read("branch").strip() or "default"
82 return self._opener.read("branch").strip() or "default"
79 except IOError:
83 except IOError:
80 return "default"
84 return "default"
81
85
82 @propertycache
86 @propertycache
83 def _pl(self):
87 def _pl(self):
84 try:
88 try:
85 fp = self._opener("dirstate")
89 fp = self._opener("dirstate")
86 st = fp.read(40)
90 st = fp.read(40)
87 fp.close()
91 fp.close()
88 l = len(st)
92 l = len(st)
89 if l == 40:
93 if l == 40:
90 return st[:20], st[20:40]
94 return st[:20], st[20:40]
91 elif l > 0 and l < 40:
95 elif l > 0 and l < 40:
92 raise util.Abort(_('working directory state appears damaged!'))
96 raise util.Abort(_('working directory state appears damaged!'))
93 except IOError, err:
97 except IOError, err:
94 if err.errno != errno.ENOENT:
98 if err.errno != errno.ENOENT:
95 raise
99 raise
96 return [nullid, nullid]
100 return [nullid, nullid]
97
101
98 @propertycache
102 @propertycache
99 def _dirs(self):
103 def _dirs(self):
100 dirs = {}
104 dirs = {}
101 for f, s in self._map.iteritems():
105 for f, s in self._map.iteritems():
102 if s[0] != 'r':
106 if s[0] != 'r':
103 _incdirs(dirs, f)
107 _incdirs(dirs, f)
104 return dirs
108 return dirs
105
109
106 @propertycache
110 @propertycache
107 def _ignore(self):
111 def _ignore(self):
108 files = [self._join('.hgignore')]
112 files = [self._join('.hgignore')]
109 for name, path in self._ui.configitems("ui"):
113 for name, path in self._ui.configitems("ui"):
110 if name == 'ignore' or name.startswith('ignore.'):
114 if name == 'ignore' or name.startswith('ignore.'):
111 files.append(util.expandpath(path))
115 files.append(util.expandpath(path))
112 return ignore.ignore(self._root, files, self._ui.warn)
116 return ignore.ignore(self._root, files, self._ui.warn)
113
117
114 @propertycache
118 @propertycache
115 def _slash(self):
119 def _slash(self):
116 return self._ui.configbool('ui', 'slash') and os.sep != '/'
120 return self._ui.configbool('ui', 'slash') and os.sep != '/'
117
121
118 @propertycache
122 @propertycache
119 def _checklink(self):
123 def _checklink(self):
120 return util.checklink(self._root)
124 return util.checklink(self._root)
121
125
122 @propertycache
126 @propertycache
123 def _checkexec(self):
127 def _checkexec(self):
124 return util.checkexec(self._root)
128 return util.checkexec(self._root)
125
129
126 @propertycache
130 @propertycache
127 def _checkcase(self):
131 def _checkcase(self):
128 return not util.checkcase(self._join('.hg'))
132 return not util.checkcase(self._join('.hg'))
129
133
130 def _join(self, f):
134 def _join(self, f):
131 # much faster than os.path.join()
135 # much faster than os.path.join()
132 # it's safe because f is always a relative path
136 # it's safe because f is always a relative path
133 return self._rootdir + f
137 return self._rootdir + f
134
138
135 def flagfunc(self, buildfallback):
139 def flagfunc(self, buildfallback):
136 if self._checklink and self._checkexec:
140 if self._checklink and self._checkexec:
137 def f(x):
141 def f(x):
138 p = self._join(x)
142 p = self._join(x)
139 if os.path.islink(p):
143 if os.path.islink(p):
140 return 'l'
144 return 'l'
141 if util.isexec(p):
145 if util.isexec(p):
142 return 'x'
146 return 'x'
143 return ''
147 return ''
144 return f
148 return f
145
149
146 fallback = buildfallback()
150 fallback = buildfallback()
147 if self._checklink:
151 if self._checklink:
148 def f(x):
152 def f(x):
149 if os.path.islink(self._join(x)):
153 if os.path.islink(self._join(x)):
150 return 'l'
154 return 'l'
151 if 'x' in fallback(x):
155 if 'x' in fallback(x):
152 return 'x'
156 return 'x'
153 return ''
157 return ''
154 return f
158 return f
155 if self._checkexec:
159 if self._checkexec:
156 def f(x):
160 def f(x):
157 if 'l' in fallback(x):
161 if 'l' in fallback(x):
158 return 'l'
162 return 'l'
159 if util.isexec(self._join(x)):
163 if util.isexec(self._join(x)):
160 return 'x'
164 return 'x'
161 return ''
165 return ''
162 return f
166 return f
163 else:
167 else:
164 return fallback
168 return fallback
165
169
166 def getcwd(self):
170 def getcwd(self):
167 cwd = os.getcwd()
171 cwd = os.getcwd()
168 if cwd == self._root:
172 if cwd == self._root:
169 return ''
173 return ''
170 # self._root ends with a path separator if self._root is '/' or 'C:\'
174 # self._root ends with a path separator if self._root is '/' or 'C:\'
171 rootsep = self._root
175 rootsep = self._root
172 if not util.endswithsep(rootsep):
176 if not util.endswithsep(rootsep):
173 rootsep += os.sep
177 rootsep += os.sep
174 if cwd.startswith(rootsep):
178 if cwd.startswith(rootsep):
175 return cwd[len(rootsep):]
179 return cwd[len(rootsep):]
176 else:
180 else:
177 # we're outside the repo. return an absolute path.
181 # we're outside the repo. return an absolute path.
178 return cwd
182 return cwd
179
183
180 def pathto(self, f, cwd=None):
184 def pathto(self, f, cwd=None):
181 if cwd is None:
185 if cwd is None:
182 cwd = self.getcwd()
186 cwd = self.getcwd()
183 path = util.pathto(self._root, cwd, f)
187 path = util.pathto(self._root, cwd, f)
184 if self._slash:
188 if self._slash:
185 return util.normpath(path)
189 return util.normpath(path)
186 return path
190 return path
187
191
188 def __getitem__(self, key):
192 def __getitem__(self, key):
189 '''Return the current state of key (a filename) in the dirstate.
193 '''Return the current state of key (a filename) in the dirstate.
190
194
191 States are:
195 States are:
192 n normal
196 n normal
193 m needs merging
197 m needs merging
194 r marked for removal
198 r marked for removal
195 a marked for addition
199 a marked for addition
196 ? not tracked
200 ? not tracked
197 '''
201 '''
198 return self._map.get(key, ("?",))[0]
202 return self._map.get(key, ("?",))[0]
199
203
200 def __contains__(self, key):
204 def __contains__(self, key):
201 return key in self._map
205 return key in self._map
202
206
203 def __iter__(self):
207 def __iter__(self):
204 for x in sorted(self._map):
208 for x in sorted(self._map):
205 yield x
209 yield x
206
210
207 def parents(self):
211 def parents(self):
208 return [self._validate(p) for p in self._pl]
212 return [self._validate(p) for p in self._pl]
209
213
210 def p1(self):
214 def p1(self):
211 return self._validate(self._pl[0])
215 return self._validate(self._pl[0])
212
216
213 def p2(self):
217 def p2(self):
214 return self._validate(self._pl[1])
218 return self._validate(self._pl[1])
215
219
216 def branch(self):
220 def branch(self):
217 return encoding.tolocal(self._branch)
221 return encoding.tolocal(self._branch)
218
222
219 def setparents(self, p1, p2=nullid):
223 def setparents(self, p1, p2=nullid):
220 self._dirty = self._dirtypl = True
224 self._dirty = self._dirtypl = True
221 self._pl = p1, p2
225 self._pl = p1, p2
222
226
223 def setbranch(self, branch):
227 def setbranch(self, branch):
224 if branch in ['tip', '.', 'null']:
228 if branch in ['tip', '.', 'null']:
225 raise util.Abort(_('the name \'%s\' is reserved') % branch)
229 raise util.Abort(_('the name \'%s\' is reserved') % branch)
226 self._branch = encoding.fromlocal(branch)
230 self._branch = encoding.fromlocal(branch)
227 self._opener.write("branch", self._branch + '\n')
231 self._opener.write("branch", self._branch + '\n')
228
232
229 def _read(self):
233 def _read(self):
230 self._map = {}
234 self._map = {}
231 self._copymap = {}
235 self._copymap = {}
232 try:
236 try:
233 st = self._opener.read("dirstate")
237 st = self._opener.read("dirstate")
234 except IOError, err:
238 except IOError, err:
235 if err.errno != errno.ENOENT:
239 if err.errno != errno.ENOENT:
236 raise
240 raise
237 return
241 return
238 if not st:
242 if not st:
239 return
243 return
240
244
241 p = parsers.parse_dirstate(self._map, self._copymap, st)
245 p = parsers.parse_dirstate(self._map, self._copymap, st)
242 if not self._dirtypl:
246 if not self._dirtypl:
243 self._pl = p
247 self._pl = p
244
248
245 def invalidate(self):
249 def invalidate(self):
246 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
250 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
247 "_ignore"):
251 "_ignore"):
248 if a in self.__dict__:
252 if a in self.__dict__:
249 delattr(self, a)
253 delattr(self, a)
250 self._lastnormaltime = None
254 self._lastnormaltime = None
251 self._dirty = False
255 self._dirty = False
252
256
253 def copy(self, source, dest):
257 def copy(self, source, dest):
254 """Mark dest as a copy of source. Unmark dest if source is None."""
258 """Mark dest as a copy of source. Unmark dest if source is None."""
255 if source == dest:
259 if source == dest:
256 return
260 return
257 self._dirty = True
261 self._dirty = True
258 if source is not None:
262 if source is not None:
259 self._copymap[dest] = source
263 self._copymap[dest] = source
260 elif dest in self._copymap:
264 elif dest in self._copymap:
261 del self._copymap[dest]
265 del self._copymap[dest]
262
266
263 def copied(self, file):
267 def copied(self, file):
264 return self._copymap.get(file, None)
268 return self._copymap.get(file, None)
265
269
266 def copies(self):
270 def copies(self):
267 return self._copymap
271 return self._copymap
268
272
269 def _droppath(self, f):
273 def _droppath(self, f):
270 if self[f] not in "?r" and "_dirs" in self.__dict__:
274 if self[f] not in "?r" and "_dirs" in self.__dict__:
271 _decdirs(self._dirs, f)
275 _decdirs(self._dirs, f)
272
276
273 def _addpath(self, f, check=False):
277 def _addpath(self, f, check=False):
274 oldstate = self[f]
278 oldstate = self[f]
275 if check or oldstate == "r":
279 if check or oldstate == "r":
276 scmutil.checkfilename(f)
280 scmutil.checkfilename(f)
277 if f in self._dirs:
281 if f in self._dirs:
278 raise util.Abort(_('directory %r already in dirstate') % f)
282 raise util.Abort(_('directory %r already in dirstate') % f)
279 # shadows
283 # shadows
280 for d in _finddirs(f):
284 for d in _finddirs(f):
281 if d in self._dirs:
285 if d in self._dirs:
282 break
286 break
283 if d in self._map and self[d] != 'r':
287 if d in self._map and self[d] != 'r':
284 raise util.Abort(
288 raise util.Abort(
285 _('file %r in dirstate clashes with %r') % (d, f))
289 _('file %r in dirstate clashes with %r') % (d, f))
286 if oldstate in "?r" and "_dirs" in self.__dict__:
290 if oldstate in "?r" and "_dirs" in self.__dict__:
287 _incdirs(self._dirs, f)
291 _incdirs(self._dirs, f)
288
292
289 def normal(self, f):
293 def normal(self, f):
290 '''Mark a file normal and clean.'''
294 '''Mark a file normal and clean.'''
291 self._dirty = True
295 self._dirty = True
292 self._addpath(f)
296 self._addpath(f)
293 s = os.lstat(self._join(f))
297 s = os.lstat(self._join(f))
294 mtime = int(s.st_mtime)
298 mtime = int(s.st_mtime)
295 self._map[f] = ('n', s.st_mode, s.st_size, mtime)
299 self._map[f] = ('n', s.st_mode, s.st_size, mtime)
296 if f in self._copymap:
300 if f in self._copymap:
297 del self._copymap[f]
301 del self._copymap[f]
298 if mtime > self._lastnormaltime:
302 if mtime > self._lastnormaltime:
299 # Remember the most recent modification timeslot for status(),
303 # Remember the most recent modification timeslot for status(),
300 # to make sure we won't miss future size-preserving file content
304 # to make sure we won't miss future size-preserving file content
301 # modifications that happen within the same timeslot.
305 # modifications that happen within the same timeslot.
302 self._lastnormaltime = mtime
306 self._lastnormaltime = mtime
303
307
304 def normallookup(self, f):
308 def normallookup(self, f):
305 '''Mark a file normal, but possibly dirty.'''
309 '''Mark a file normal, but possibly dirty.'''
306 if self._pl[1] != nullid and f in self._map:
310 if self._pl[1] != nullid and f in self._map:
307 # if there is a merge going on and the file was either
311 # if there is a merge going on and the file was either
308 # in state 'm' (-1) or coming from other parent (-2) before
312 # in state 'm' (-1) or coming from other parent (-2) before
309 # being removed, restore that state.
313 # being removed, restore that state.
310 entry = self._map[f]
314 entry = self._map[f]
311 if entry[0] == 'r' and entry[2] in (-1, -2):
315 if entry[0] == 'r' and entry[2] in (-1, -2):
312 source = self._copymap.get(f)
316 source = self._copymap.get(f)
313 if entry[2] == -1:
317 if entry[2] == -1:
314 self.merge(f)
318 self.merge(f)
315 elif entry[2] == -2:
319 elif entry[2] == -2:
316 self.otherparent(f)
320 self.otherparent(f)
317 if source:
321 if source:
318 self.copy(source, f)
322 self.copy(source, f)
319 return
323 return
320 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
324 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
321 return
325 return
322 self._dirty = True
326 self._dirty = True
323 self._addpath(f)
327 self._addpath(f)
324 self._map[f] = ('n', 0, -1, -1)
328 self._map[f] = ('n', 0, -1, -1)
325 if f in self._copymap:
329 if f in self._copymap:
326 del self._copymap[f]
330 del self._copymap[f]
327
331
328 def otherparent(self, f):
332 def otherparent(self, f):
329 '''Mark as coming from the other parent, always dirty.'''
333 '''Mark as coming from the other parent, always dirty.'''
330 if self._pl[1] == nullid:
334 if self._pl[1] == nullid:
331 raise util.Abort(_("setting %r to other parent "
335 raise util.Abort(_("setting %r to other parent "
332 "only allowed in merges") % f)
336 "only allowed in merges") % f)
333 self._dirty = True
337 self._dirty = True
334 self._addpath(f)
338 self._addpath(f)
335 self._map[f] = ('n', 0, -2, -1)
339 self._map[f] = ('n', 0, -2, -1)
336 if f in self._copymap:
340 if f in self._copymap:
337 del self._copymap[f]
341 del self._copymap[f]
338
342
339 def add(self, f):
343 def add(self, f):
340 '''Mark a file added.'''
344 '''Mark a file added.'''
341 self._dirty = True
345 self._dirty = True
342 self._addpath(f, True)
346 self._addpath(f, True)
343 self._map[f] = ('a', 0, -1, -1)
347 self._map[f] = ('a', 0, -1, -1)
344 if f in self._copymap:
348 if f in self._copymap:
345 del self._copymap[f]
349 del self._copymap[f]
346
350
347 def remove(self, f):
351 def remove(self, f):
348 '''Mark a file removed.'''
352 '''Mark a file removed.'''
349 self._dirty = True
353 self._dirty = True
350 self._droppath(f)
354 self._droppath(f)
351 size = 0
355 size = 0
352 if self._pl[1] != nullid and f in self._map:
356 if self._pl[1] != nullid and f in self._map:
353 # backup the previous state
357 # backup the previous state
354 entry = self._map[f]
358 entry = self._map[f]
355 if entry[0] == 'm': # merge
359 if entry[0] == 'm': # merge
356 size = -1
360 size = -1
357 elif entry[0] == 'n' and entry[2] == -2: # other parent
361 elif entry[0] == 'n' and entry[2] == -2: # other parent
358 size = -2
362 size = -2
359 self._map[f] = ('r', 0, size, 0)
363 self._map[f] = ('r', 0, size, 0)
360 if size == 0 and f in self._copymap:
364 if size == 0 and f in self._copymap:
361 del self._copymap[f]
365 del self._copymap[f]
362
366
363 def merge(self, f):
367 def merge(self, f):
364 '''Mark a file merged.'''
368 '''Mark a file merged.'''
365 self._dirty = True
369 self._dirty = True
366 s = os.lstat(self._join(f))
370 s = os.lstat(self._join(f))
367 self._addpath(f)
371 self._addpath(f)
368 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
372 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
369 if f in self._copymap:
373 if f in self._copymap:
370 del self._copymap[f]
374 del self._copymap[f]
371
375
372 def drop(self, f):
376 def drop(self, f):
373 '''Drop a file from the dirstate'''
377 '''Drop a file from the dirstate'''
374 if f in self._map:
378 if f in self._map:
375 self._dirty = True
379 self._dirty = True
376 self._droppath(f)
380 self._droppath(f)
377 del self._map[f]
381 del self._map[f]
378
382
379 def _normalize(self, path, isknown):
383 def _normalize(self, path, isknown):
380 normed = util.normcase(path)
384 normed = util.normcase(path)
381 folded = self._foldmap.get(normed, None)
385 folded = self._foldmap.get(normed, None)
382 if folded is None:
386 if folded is None:
383 if isknown or not os.path.lexists(os.path.join(self._root, path)):
387 if isknown or not os.path.lexists(os.path.join(self._root, path)):
384 folded = path
388 folded = path
385 else:
389 else:
386 folded = self._foldmap.setdefault(normed,
390 folded = self._foldmap.setdefault(normed,
387 util.fspath(path, self._root))
391 util.fspath(normed, self._normroot))
388 return folded
392 return folded
389
393
390 def normalize(self, path, isknown=False):
394 def normalize(self, path, isknown=False):
391 '''
395 '''
392 normalize the case of a pathname when on a casefolding filesystem
396 normalize the case of a pathname when on a casefolding filesystem
393
397
394 isknown specifies whether the filename came from walking the
398 isknown specifies whether the filename came from walking the
395 disk, to avoid extra filesystem access
399 disk, to avoid extra filesystem access
396
400
397 The normalized case is determined based on the following precedence:
401 The normalized case is determined based on the following precedence:
398
402
399 - version of name already stored in the dirstate
403 - version of name already stored in the dirstate
400 - version of name stored on disk
404 - version of name stored on disk
401 - version provided via command arguments
405 - version provided via command arguments
402 '''
406 '''
403
407
404 if self._checkcase:
408 if self._checkcase:
405 return self._normalize(path, isknown)
409 return self._normalize(path, isknown)
406 return path
410 return path
407
411
408 def clear(self):
412 def clear(self):
409 self._map = {}
413 self._map = {}
410 if "_dirs" in self.__dict__:
414 if "_dirs" in self.__dict__:
411 delattr(self, "_dirs")
415 delattr(self, "_dirs")
412 self._copymap = {}
416 self._copymap = {}
413 self._pl = [nullid, nullid]
417 self._pl = [nullid, nullid]
414 self._lastnormaltime = None
418 self._lastnormaltime = None
415 self._dirty = True
419 self._dirty = True
416
420
417 def rebuild(self, parent, files):
421 def rebuild(self, parent, files):
418 self.clear()
422 self.clear()
419 for f in files:
423 for f in files:
420 if 'x' in files.flags(f):
424 if 'x' in files.flags(f):
421 self._map[f] = ('n', 0777, -1, 0)
425 self._map[f] = ('n', 0777, -1, 0)
422 else:
426 else:
423 self._map[f] = ('n', 0666, -1, 0)
427 self._map[f] = ('n', 0666, -1, 0)
424 self._pl = (parent, nullid)
428 self._pl = (parent, nullid)
425 self._dirty = True
429 self._dirty = True
426
430
427 def write(self):
431 def write(self):
428 if not self._dirty:
432 if not self._dirty:
429 return
433 return
430 st = self._opener("dirstate", "w", atomictemp=True)
434 st = self._opener("dirstate", "w", atomictemp=True)
431
435
432 # use the modification time of the newly created temporary file as the
436 # use the modification time of the newly created temporary file as the
433 # filesystem's notion of 'now'
437 # filesystem's notion of 'now'
434 now = int(util.fstat(st).st_mtime)
438 now = int(util.fstat(st).st_mtime)
435
439
436 cs = cStringIO.StringIO()
440 cs = cStringIO.StringIO()
437 copymap = self._copymap
441 copymap = self._copymap
438 pack = struct.pack
442 pack = struct.pack
439 write = cs.write
443 write = cs.write
440 write("".join(self._pl))
444 write("".join(self._pl))
441 for f, e in self._map.iteritems():
445 for f, e in self._map.iteritems():
442 if e[0] == 'n' and e[3] == now:
446 if e[0] == 'n' and e[3] == now:
443 # The file was last modified "simultaneously" with the current
447 # The file was last modified "simultaneously" with the current
444 # write to dirstate (i.e. within the same second for file-
448 # write to dirstate (i.e. within the same second for file-
445 # systems with a granularity of 1 sec). This commonly happens
449 # systems with a granularity of 1 sec). This commonly happens
446 # for at least a couple of files on 'update'.
450 # for at least a couple of files on 'update'.
447 # The user could change the file without changing its size
451 # The user could change the file without changing its size
448 # within the same second. Invalidate the file's stat data in
452 # within the same second. Invalidate the file's stat data in
449 # dirstate, forcing future 'status' calls to compare the
453 # dirstate, forcing future 'status' calls to compare the
450 # contents of the file. This prevents mistakenly treating such
454 # contents of the file. This prevents mistakenly treating such
451 # files as clean.
455 # files as clean.
452 e = (e[0], 0, -1, -1) # mark entry as 'unset'
456 e = (e[0], 0, -1, -1) # mark entry as 'unset'
453 self._map[f] = e
457 self._map[f] = e
454
458
455 if f in copymap:
459 if f in copymap:
456 f = "%s\0%s" % (f, copymap[f])
460 f = "%s\0%s" % (f, copymap[f])
457 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
461 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
458 write(e)
462 write(e)
459 write(f)
463 write(f)
460 st.write(cs.getvalue())
464 st.write(cs.getvalue())
461 st.close()
465 st.close()
462 self._lastnormaltime = None
466 self._lastnormaltime = None
463 self._dirty = self._dirtypl = False
467 self._dirty = self._dirtypl = False
464
468
465 def _dirignore(self, f):
469 def _dirignore(self, f):
466 if f == '.':
470 if f == '.':
467 return False
471 return False
468 if self._ignore(f):
472 if self._ignore(f):
469 return True
473 return True
470 for p in _finddirs(f):
474 for p in _finddirs(f):
471 if self._ignore(p):
475 if self._ignore(p):
472 return True
476 return True
473 return False
477 return False
474
478
475 def walk(self, match, subrepos, unknown, ignored):
479 def walk(self, match, subrepos, unknown, ignored):
476 '''
480 '''
477 Walk recursively through the directory tree, finding all files
481 Walk recursively through the directory tree, finding all files
478 matched by match.
482 matched by match.
479
483
480 Return a dict mapping filename to stat-like object (either
484 Return a dict mapping filename to stat-like object (either
481 mercurial.osutil.stat instance or return value of os.stat()).
485 mercurial.osutil.stat instance or return value of os.stat()).
482 '''
486 '''
483
487
484 def fwarn(f, msg):
488 def fwarn(f, msg):
485 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
489 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
486 return False
490 return False
487
491
488 def badtype(mode):
492 def badtype(mode):
489 kind = _('unknown')
493 kind = _('unknown')
490 if stat.S_ISCHR(mode):
494 if stat.S_ISCHR(mode):
491 kind = _('character device')
495 kind = _('character device')
492 elif stat.S_ISBLK(mode):
496 elif stat.S_ISBLK(mode):
493 kind = _('block device')
497 kind = _('block device')
494 elif stat.S_ISFIFO(mode):
498 elif stat.S_ISFIFO(mode):
495 kind = _('fifo')
499 kind = _('fifo')
496 elif stat.S_ISSOCK(mode):
500 elif stat.S_ISSOCK(mode):
497 kind = _('socket')
501 kind = _('socket')
498 elif stat.S_ISDIR(mode):
502 elif stat.S_ISDIR(mode):
499 kind = _('directory')
503 kind = _('directory')
500 return _('unsupported file type (type is %s)') % kind
504 return _('unsupported file type (type is %s)') % kind
501
505
502 ignore = self._ignore
506 ignore = self._ignore
503 dirignore = self._dirignore
507 dirignore = self._dirignore
504 if ignored:
508 if ignored:
505 ignore = util.never
509 ignore = util.never
506 dirignore = util.never
510 dirignore = util.never
507 elif not unknown:
511 elif not unknown:
508 # if unknown and ignored are False, skip step 2
512 # if unknown and ignored are False, skip step 2
509 ignore = util.always
513 ignore = util.always
510 dirignore = util.always
514 dirignore = util.always
511
515
512 matchfn = match.matchfn
516 matchfn = match.matchfn
513 badfn = match.bad
517 badfn = match.bad
514 dmap = self._map
518 dmap = self._map
515 normpath = util.normpath
519 normpath = util.normpath
516 listdir = osutil.listdir
520 listdir = osutil.listdir
517 lstat = os.lstat
521 lstat = os.lstat
518 getkind = stat.S_IFMT
522 getkind = stat.S_IFMT
519 dirkind = stat.S_IFDIR
523 dirkind = stat.S_IFDIR
520 regkind = stat.S_IFREG
524 regkind = stat.S_IFREG
521 lnkkind = stat.S_IFLNK
525 lnkkind = stat.S_IFLNK
522 join = self._join
526 join = self._join
523 work = []
527 work = []
524 wadd = work.append
528 wadd = work.append
525
529
526 exact = skipstep3 = False
530 exact = skipstep3 = False
527 if matchfn == match.exact: # match.exact
531 if matchfn == match.exact: # match.exact
528 exact = True
532 exact = True
529 dirignore = util.always # skip step 2
533 dirignore = util.always # skip step 2
530 elif match.files() and not match.anypats(): # match.match, no patterns
534 elif match.files() and not match.anypats(): # match.match, no patterns
531 skipstep3 = True
535 skipstep3 = True
532
536
533 if self._checkcase:
537 if self._checkcase:
534 normalize = self._normalize
538 normalize = self._normalize
535 skipstep3 = False
539 skipstep3 = False
536 else:
540 else:
537 normalize = lambda x, y: x
541 normalize = lambda x, y: x
538
542
539 files = sorted(match.files())
543 files = sorted(match.files())
540 subrepos.sort()
544 subrepos.sort()
541 i, j = 0, 0
545 i, j = 0, 0
542 while i < len(files) and j < len(subrepos):
546 while i < len(files) and j < len(subrepos):
543 subpath = subrepos[j] + "/"
547 subpath = subrepos[j] + "/"
544 if files[i] < subpath:
548 if files[i] < subpath:
545 i += 1
549 i += 1
546 continue
550 continue
547 while i < len(files) and files[i].startswith(subpath):
551 while i < len(files) and files[i].startswith(subpath):
548 del files[i]
552 del files[i]
549 j += 1
553 j += 1
550
554
551 if not files or '.' in files:
555 if not files or '.' in files:
552 files = ['']
556 files = ['']
553 results = dict.fromkeys(subrepos)
557 results = dict.fromkeys(subrepos)
554 results['.hg'] = None
558 results['.hg'] = None
555
559
556 # step 1: find all explicit files
560 # step 1: find all explicit files
557 for ff in files:
561 for ff in files:
558 nf = normalize(normpath(ff), False)
562 nf = normalize(normpath(ff), False)
559 if nf in results:
563 if nf in results:
560 continue
564 continue
561
565
562 try:
566 try:
563 st = lstat(join(nf))
567 st = lstat(join(nf))
564 kind = getkind(st.st_mode)
568 kind = getkind(st.st_mode)
565 if kind == dirkind:
569 if kind == dirkind:
566 skipstep3 = False
570 skipstep3 = False
567 if nf in dmap:
571 if nf in dmap:
568 #file deleted on disk but still in dirstate
572 #file deleted on disk but still in dirstate
569 results[nf] = None
573 results[nf] = None
570 match.dir(nf)
574 match.dir(nf)
571 if not dirignore(nf):
575 if not dirignore(nf):
572 wadd(nf)
576 wadd(nf)
573 elif kind == regkind or kind == lnkkind:
577 elif kind == regkind or kind == lnkkind:
574 results[nf] = st
578 results[nf] = st
575 else:
579 else:
576 badfn(ff, badtype(kind))
580 badfn(ff, badtype(kind))
577 if nf in dmap:
581 if nf in dmap:
578 results[nf] = None
582 results[nf] = None
579 except OSError, inst:
583 except OSError, inst:
580 if nf in dmap: # does it exactly match a file?
584 if nf in dmap: # does it exactly match a file?
581 results[nf] = None
585 results[nf] = None
582 else: # does it match a directory?
586 else: # does it match a directory?
583 prefix = nf + "/"
587 prefix = nf + "/"
584 for fn in dmap:
588 for fn in dmap:
585 if fn.startswith(prefix):
589 if fn.startswith(prefix):
586 match.dir(nf)
590 match.dir(nf)
587 skipstep3 = False
591 skipstep3 = False
588 break
592 break
589 else:
593 else:
590 badfn(ff, inst.strerror)
594 badfn(ff, inst.strerror)
591
595
592 # step 2: visit subdirectories
596 # step 2: visit subdirectories
593 while work:
597 while work:
594 nd = work.pop()
598 nd = work.pop()
595 skip = None
599 skip = None
596 if nd == '.':
600 if nd == '.':
597 nd = ''
601 nd = ''
598 else:
602 else:
599 skip = '.hg'
603 skip = '.hg'
600 try:
604 try:
601 entries = listdir(join(nd), stat=True, skip=skip)
605 entries = listdir(join(nd), stat=True, skip=skip)
602 except OSError, inst:
606 except OSError, inst:
603 if inst.errno == errno.EACCES:
607 if inst.errno == errno.EACCES:
604 fwarn(nd, inst.strerror)
608 fwarn(nd, inst.strerror)
605 continue
609 continue
606 raise
610 raise
607 for f, kind, st in entries:
611 for f, kind, st in entries:
608 nf = normalize(nd and (nd + "/" + f) or f, True)
612 nf = normalize(nd and (nd + "/" + f) or f, True)
609 if nf not in results:
613 if nf not in results:
610 if kind == dirkind:
614 if kind == dirkind:
611 if not ignore(nf):
615 if not ignore(nf):
612 match.dir(nf)
616 match.dir(nf)
613 wadd(nf)
617 wadd(nf)
614 if nf in dmap and matchfn(nf):
618 if nf in dmap and matchfn(nf):
615 results[nf] = None
619 results[nf] = None
616 elif kind == regkind or kind == lnkkind:
620 elif kind == regkind or kind == lnkkind:
617 if nf in dmap:
621 if nf in dmap:
618 if matchfn(nf):
622 if matchfn(nf):
619 results[nf] = st
623 results[nf] = st
620 elif matchfn(nf) and not ignore(nf):
624 elif matchfn(nf) and not ignore(nf):
621 results[nf] = st
625 results[nf] = st
622 elif nf in dmap and matchfn(nf):
626 elif nf in dmap and matchfn(nf):
623 results[nf] = None
627 results[nf] = None
624
628
625 # step 3: report unseen items in the dmap hash
629 # step 3: report unseen items in the dmap hash
626 if not skipstep3 and not exact:
630 if not skipstep3 and not exact:
627 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
631 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
628 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
632 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
629 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
633 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
630 st = None
634 st = None
631 results[nf] = st
635 results[nf] = st
632 for s in subrepos:
636 for s in subrepos:
633 del results[s]
637 del results[s]
634 del results['.hg']
638 del results['.hg']
635 return results
639 return results
636
640
637 def status(self, match, subrepos, ignored, clean, unknown):
641 def status(self, match, subrepos, ignored, clean, unknown):
638 '''Determine the status of the working copy relative to the
642 '''Determine the status of the working copy relative to the
639 dirstate and return a tuple of lists (unsure, modified, added,
643 dirstate and return a tuple of lists (unsure, modified, added,
640 removed, deleted, unknown, ignored, clean), where:
644 removed, deleted, unknown, ignored, clean), where:
641
645
642 unsure:
646 unsure:
643 files that might have been modified since the dirstate was
647 files that might have been modified since the dirstate was
644 written, but need to be read to be sure (size is the same
648 written, but need to be read to be sure (size is the same
645 but mtime differs)
649 but mtime differs)
646 modified:
650 modified:
647 files that have definitely been modified since the dirstate
651 files that have definitely been modified since the dirstate
648 was written (different size or mode)
652 was written (different size or mode)
649 added:
653 added:
650 files that have been explicitly added with hg add
654 files that have been explicitly added with hg add
651 removed:
655 removed:
652 files that have been explicitly removed with hg remove
656 files that have been explicitly removed with hg remove
653 deleted:
657 deleted:
654 files that have been deleted through other means ("missing")
658 files that have been deleted through other means ("missing")
655 unknown:
659 unknown:
656 files not in the dirstate that are not ignored
660 files not in the dirstate that are not ignored
657 ignored:
661 ignored:
658 files not in the dirstate that are ignored
662 files not in the dirstate that are ignored
659 (by _dirignore())
663 (by _dirignore())
660 clean:
664 clean:
661 files that have definitely not been modified since the
665 files that have definitely not been modified since the
662 dirstate was written
666 dirstate was written
663 '''
667 '''
664 listignored, listclean, listunknown = ignored, clean, unknown
668 listignored, listclean, listunknown = ignored, clean, unknown
665 lookup, modified, added, unknown, ignored = [], [], [], [], []
669 lookup, modified, added, unknown, ignored = [], [], [], [], []
666 removed, deleted, clean = [], [], []
670 removed, deleted, clean = [], [], []
667
671
668 dmap = self._map
672 dmap = self._map
669 ladd = lookup.append # aka "unsure"
673 ladd = lookup.append # aka "unsure"
670 madd = modified.append
674 madd = modified.append
671 aadd = added.append
675 aadd = added.append
672 uadd = unknown.append
676 uadd = unknown.append
673 iadd = ignored.append
677 iadd = ignored.append
674 radd = removed.append
678 radd = removed.append
675 dadd = deleted.append
679 dadd = deleted.append
676 cadd = clean.append
680 cadd = clean.append
677
681
678 lnkkind = stat.S_IFLNK
682 lnkkind = stat.S_IFLNK
679
683
680 for fn, st in self.walk(match, subrepos, listunknown,
684 for fn, st in self.walk(match, subrepos, listunknown,
681 listignored).iteritems():
685 listignored).iteritems():
682 if fn not in dmap:
686 if fn not in dmap:
683 if (listignored or match.exact(fn)) and self._dirignore(fn):
687 if (listignored or match.exact(fn)) and self._dirignore(fn):
684 if listignored:
688 if listignored:
685 iadd(fn)
689 iadd(fn)
686 elif listunknown:
690 elif listunknown:
687 uadd(fn)
691 uadd(fn)
688 continue
692 continue
689
693
690 state, mode, size, time = dmap[fn]
694 state, mode, size, time = dmap[fn]
691
695
692 if not st and state in "nma":
696 if not st and state in "nma":
693 dadd(fn)
697 dadd(fn)
694 elif state == 'n':
698 elif state == 'n':
695 # The "mode & lnkkind != lnkkind or self._checklink"
699 # The "mode & lnkkind != lnkkind or self._checklink"
696 # lines are an expansion of "islink => checklink"
700 # lines are an expansion of "islink => checklink"
697 # where islink means "is this a link?" and checklink
701 # where islink means "is this a link?" and checklink
698 # means "can we check links?".
702 # means "can we check links?".
699 mtime = int(st.st_mtime)
703 mtime = int(st.st_mtime)
700 if (size >= 0 and
704 if (size >= 0 and
701 (size != st.st_size
705 (size != st.st_size
702 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
706 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
703 and (mode & lnkkind != lnkkind or self._checklink)
707 and (mode & lnkkind != lnkkind or self._checklink)
704 or size == -2 # other parent
708 or size == -2 # other parent
705 or fn in self._copymap):
709 or fn in self._copymap):
706 madd(fn)
710 madd(fn)
707 elif (mtime != time
711 elif (mtime != time
708 and (mode & lnkkind != lnkkind or self._checklink)):
712 and (mode & lnkkind != lnkkind or self._checklink)):
709 ladd(fn)
713 ladd(fn)
710 elif mtime == self._lastnormaltime:
714 elif mtime == self._lastnormaltime:
711 # fn may have been changed in the same timeslot without
715 # fn may have been changed in the same timeslot without
712 # changing its size. This can happen if we quickly do
716 # changing its size. This can happen if we quickly do
713 # multiple commits in a single transaction.
717 # multiple commits in a single transaction.
714 # Force lookup, so we don't miss such a racy file change.
718 # Force lookup, so we don't miss such a racy file change.
715 ladd(fn)
719 ladd(fn)
716 elif listclean:
720 elif listclean:
717 cadd(fn)
721 cadd(fn)
718 elif state == 'm':
722 elif state == 'm':
719 madd(fn)
723 madd(fn)
720 elif state == 'a':
724 elif state == 'a':
721 aadd(fn)
725 aadd(fn)
722 elif state == 'r':
726 elif state == 'r':
723 radd(fn)
727 radd(fn)
724
728
725 return (lookup, modified, added, removed, deleted, unknown, ignored,
729 return (lookup, modified, added, removed, deleted, unknown, ignored,
726 clean)
730 clean)
@@ -1,1750 +1,1750 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, datetime, calendar, textwrap, signal
19 import os, time, datetime, calendar, textwrap, signal
20 import imp, socket, urllib
20 import imp, socket, urllib
21
21
22 if os.name == 'nt':
22 if os.name == 'nt':
23 import windows as platform
23 import windows as platform
24 else:
24 else:
25 import posix as platform
25 import posix as platform
26
26
27 cachestat = platform.cachestat
27 cachestat = platform.cachestat
28 checkexec = platform.checkexec
28 checkexec = platform.checkexec
29 checklink = platform.checklink
29 checklink = platform.checklink
30 copymode = platform.copymode
30 copymode = platform.copymode
31 executablepath = platform.executablepath
31 executablepath = platform.executablepath
32 expandglobs = platform.expandglobs
32 expandglobs = platform.expandglobs
33 explainexit = platform.explainexit
33 explainexit = platform.explainexit
34 findexe = platform.findexe
34 findexe = platform.findexe
35 gethgcmd = platform.gethgcmd
35 gethgcmd = platform.gethgcmd
36 getuser = platform.getuser
36 getuser = platform.getuser
37 groupmembers = platform.groupmembers
37 groupmembers = platform.groupmembers
38 groupname = platform.groupname
38 groupname = platform.groupname
39 hidewindow = platform.hidewindow
39 hidewindow = platform.hidewindow
40 isexec = platform.isexec
40 isexec = platform.isexec
41 isowner = platform.isowner
41 isowner = platform.isowner
42 localpath = platform.localpath
42 localpath = platform.localpath
43 lookupreg = platform.lookupreg
43 lookupreg = platform.lookupreg
44 makedir = platform.makedir
44 makedir = platform.makedir
45 nlinks = platform.nlinks
45 nlinks = platform.nlinks
46 normpath = platform.normpath
46 normpath = platform.normpath
47 normcase = platform.normcase
47 normcase = platform.normcase
48 nulldev = platform.nulldev
48 nulldev = platform.nulldev
49 openhardlinks = platform.openhardlinks
49 openhardlinks = platform.openhardlinks
50 oslink = platform.oslink
50 oslink = platform.oslink
51 parsepatchoutput = platform.parsepatchoutput
51 parsepatchoutput = platform.parsepatchoutput
52 pconvert = platform.pconvert
52 pconvert = platform.pconvert
53 popen = platform.popen
53 popen = platform.popen
54 posixfile = platform.posixfile
54 posixfile = platform.posixfile
55 quotecommand = platform.quotecommand
55 quotecommand = platform.quotecommand
56 realpath = platform.realpath
56 realpath = platform.realpath
57 rename = platform.rename
57 rename = platform.rename
58 samedevice = platform.samedevice
58 samedevice = platform.samedevice
59 samefile = platform.samefile
59 samefile = platform.samefile
60 samestat = platform.samestat
60 samestat = platform.samestat
61 setbinary = platform.setbinary
61 setbinary = platform.setbinary
62 setflags = platform.setflags
62 setflags = platform.setflags
63 setsignalhandler = platform.setsignalhandler
63 setsignalhandler = platform.setsignalhandler
64 shellquote = platform.shellquote
64 shellquote = platform.shellquote
65 spawndetached = platform.spawndetached
65 spawndetached = platform.spawndetached
66 sshargs = platform.sshargs
66 sshargs = platform.sshargs
67 statfiles = platform.statfiles
67 statfiles = platform.statfiles
68 termwidth = platform.termwidth
68 termwidth = platform.termwidth
69 testpid = platform.testpid
69 testpid = platform.testpid
70 umask = platform.umask
70 umask = platform.umask
71 unlink = platform.unlink
71 unlink = platform.unlink
72 unlinkpath = platform.unlinkpath
72 unlinkpath = platform.unlinkpath
73 username = platform.username
73 username = platform.username
74
74
75 # Python compatibility
75 # Python compatibility
76
76
77 def sha1(s=''):
77 def sha1(s=''):
78 '''
78 '''
79 Low-overhead wrapper around Python's SHA support
79 Low-overhead wrapper around Python's SHA support
80
80
81 >>> f = _fastsha1
81 >>> f = _fastsha1
82 >>> a = sha1()
82 >>> a = sha1()
83 >>> a = f()
83 >>> a = f()
84 >>> a.hexdigest()
84 >>> a.hexdigest()
85 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
85 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
86 '''
86 '''
87
87
88 return _fastsha1(s)
88 return _fastsha1(s)
89
89
90 _notset = object()
90 _notset = object()
91 def safehasattr(thing, attr):
91 def safehasattr(thing, attr):
92 return getattr(thing, attr, _notset) is not _notset
92 return getattr(thing, attr, _notset) is not _notset
93
93
94 def _fastsha1(s=''):
94 def _fastsha1(s=''):
95 # This function will import sha1 from hashlib or sha (whichever is
95 # This function will import sha1 from hashlib or sha (whichever is
96 # available) and overwrite itself with it on the first call.
96 # available) and overwrite itself with it on the first call.
97 # Subsequent calls will go directly to the imported function.
97 # Subsequent calls will go directly to the imported function.
98 if sys.version_info >= (2, 5):
98 if sys.version_info >= (2, 5):
99 from hashlib import sha1 as _sha1
99 from hashlib import sha1 as _sha1
100 else:
100 else:
101 from sha import sha as _sha1
101 from sha import sha as _sha1
102 global _fastsha1, sha1
102 global _fastsha1, sha1
103 _fastsha1 = sha1 = _sha1
103 _fastsha1 = sha1 = _sha1
104 return _sha1(s)
104 return _sha1(s)
105
105
106 import __builtin__
106 import __builtin__
107
107
108 if sys.version_info[0] < 3:
108 if sys.version_info[0] < 3:
109 def fakebuffer(sliceable, offset=0):
109 def fakebuffer(sliceable, offset=0):
110 return sliceable[offset:]
110 return sliceable[offset:]
111 else:
111 else:
112 def fakebuffer(sliceable, offset=0):
112 def fakebuffer(sliceable, offset=0):
113 return memoryview(sliceable)[offset:]
113 return memoryview(sliceable)[offset:]
114 try:
114 try:
115 buffer
115 buffer
116 except NameError:
116 except NameError:
117 __builtin__.buffer = fakebuffer
117 __builtin__.buffer = fakebuffer
118
118
119 import subprocess
119 import subprocess
120 closefds = os.name == 'posix'
120 closefds = os.name == 'posix'
121
121
122 def popen2(cmd, env=None, newlines=False):
122 def popen2(cmd, env=None, newlines=False):
123 # Setting bufsize to -1 lets the system decide the buffer size.
123 # Setting bufsize to -1 lets the system decide the buffer size.
124 # The default for bufsize is 0, meaning unbuffered. This leads to
124 # The default for bufsize is 0, meaning unbuffered. This leads to
125 # poor performance on Mac OS X: http://bugs.python.org/issue4194
125 # poor performance on Mac OS X: http://bugs.python.org/issue4194
126 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
126 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
127 close_fds=closefds,
127 close_fds=closefds,
128 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
128 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
129 universal_newlines=newlines,
129 universal_newlines=newlines,
130 env=env)
130 env=env)
131 return p.stdin, p.stdout
131 return p.stdin, p.stdout
132
132
133 def popen3(cmd, env=None, newlines=False):
133 def popen3(cmd, env=None, newlines=False):
134 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
134 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
135 close_fds=closefds,
135 close_fds=closefds,
136 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
136 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
137 stderr=subprocess.PIPE,
137 stderr=subprocess.PIPE,
138 universal_newlines=newlines,
138 universal_newlines=newlines,
139 env=env)
139 env=env)
140 return p.stdin, p.stdout, p.stderr
140 return p.stdin, p.stdout, p.stderr
141
141
142 def version():
142 def version():
143 """Return version information if available."""
143 """Return version information if available."""
144 try:
144 try:
145 import __version__
145 import __version__
146 return __version__.version
146 return __version__.version
147 except ImportError:
147 except ImportError:
148 return 'unknown'
148 return 'unknown'
149
149
150 # used by parsedate
150 # used by parsedate
151 defaultdateformats = (
151 defaultdateformats = (
152 '%Y-%m-%d %H:%M:%S',
152 '%Y-%m-%d %H:%M:%S',
153 '%Y-%m-%d %I:%M:%S%p',
153 '%Y-%m-%d %I:%M:%S%p',
154 '%Y-%m-%d %H:%M',
154 '%Y-%m-%d %H:%M',
155 '%Y-%m-%d %I:%M%p',
155 '%Y-%m-%d %I:%M%p',
156 '%Y-%m-%d',
156 '%Y-%m-%d',
157 '%m-%d',
157 '%m-%d',
158 '%m/%d',
158 '%m/%d',
159 '%m/%d/%y',
159 '%m/%d/%y',
160 '%m/%d/%Y',
160 '%m/%d/%Y',
161 '%a %b %d %H:%M:%S %Y',
161 '%a %b %d %H:%M:%S %Y',
162 '%a %b %d %I:%M:%S%p %Y',
162 '%a %b %d %I:%M:%S%p %Y',
163 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
163 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
164 '%b %d %H:%M:%S %Y',
164 '%b %d %H:%M:%S %Y',
165 '%b %d %I:%M:%S%p %Y',
165 '%b %d %I:%M:%S%p %Y',
166 '%b %d %H:%M:%S',
166 '%b %d %H:%M:%S',
167 '%b %d %I:%M:%S%p',
167 '%b %d %I:%M:%S%p',
168 '%b %d %H:%M',
168 '%b %d %H:%M',
169 '%b %d %I:%M%p',
169 '%b %d %I:%M%p',
170 '%b %d %Y',
170 '%b %d %Y',
171 '%b %d',
171 '%b %d',
172 '%H:%M:%S',
172 '%H:%M:%S',
173 '%I:%M:%S%p',
173 '%I:%M:%S%p',
174 '%H:%M',
174 '%H:%M',
175 '%I:%M%p',
175 '%I:%M%p',
176 )
176 )
177
177
178 extendeddateformats = defaultdateformats + (
178 extendeddateformats = defaultdateformats + (
179 "%Y",
179 "%Y",
180 "%Y-%m",
180 "%Y-%m",
181 "%b",
181 "%b",
182 "%b %Y",
182 "%b %Y",
183 )
183 )
184
184
185 def cachefunc(func):
185 def cachefunc(func):
186 '''cache the result of function calls'''
186 '''cache the result of function calls'''
187 # XXX doesn't handle keywords args
187 # XXX doesn't handle keywords args
188 cache = {}
188 cache = {}
189 if func.func_code.co_argcount == 1:
189 if func.func_code.co_argcount == 1:
190 # we gain a small amount of time because
190 # we gain a small amount of time because
191 # we don't need to pack/unpack the list
191 # we don't need to pack/unpack the list
192 def f(arg):
192 def f(arg):
193 if arg not in cache:
193 if arg not in cache:
194 cache[arg] = func(arg)
194 cache[arg] = func(arg)
195 return cache[arg]
195 return cache[arg]
196 else:
196 else:
197 def f(*args):
197 def f(*args):
198 if args not in cache:
198 if args not in cache:
199 cache[args] = func(*args)
199 cache[args] = func(*args)
200 return cache[args]
200 return cache[args]
201
201
202 return f
202 return f
203
203
204 def lrucachefunc(func):
204 def lrucachefunc(func):
205 '''cache most recent results of function calls'''
205 '''cache most recent results of function calls'''
206 cache = {}
206 cache = {}
207 order = []
207 order = []
208 if func.func_code.co_argcount == 1:
208 if func.func_code.co_argcount == 1:
209 def f(arg):
209 def f(arg):
210 if arg not in cache:
210 if arg not in cache:
211 if len(cache) > 20:
211 if len(cache) > 20:
212 del cache[order.pop(0)]
212 del cache[order.pop(0)]
213 cache[arg] = func(arg)
213 cache[arg] = func(arg)
214 else:
214 else:
215 order.remove(arg)
215 order.remove(arg)
216 order.append(arg)
216 order.append(arg)
217 return cache[arg]
217 return cache[arg]
218 else:
218 else:
219 def f(*args):
219 def f(*args):
220 if args not in cache:
220 if args not in cache:
221 if len(cache) > 20:
221 if len(cache) > 20:
222 del cache[order.pop(0)]
222 del cache[order.pop(0)]
223 cache[args] = func(*args)
223 cache[args] = func(*args)
224 else:
224 else:
225 order.remove(args)
225 order.remove(args)
226 order.append(args)
226 order.append(args)
227 return cache[args]
227 return cache[args]
228
228
229 return f
229 return f
230
230
231 class propertycache(object):
231 class propertycache(object):
232 def __init__(self, func):
232 def __init__(self, func):
233 self.func = func
233 self.func = func
234 self.name = func.__name__
234 self.name = func.__name__
235 def __get__(self, obj, type=None):
235 def __get__(self, obj, type=None):
236 result = self.func(obj)
236 result = self.func(obj)
237 setattr(obj, self.name, result)
237 setattr(obj, self.name, result)
238 return result
238 return result
239
239
240 def pipefilter(s, cmd):
240 def pipefilter(s, cmd):
241 '''filter string S through command CMD, returning its output'''
241 '''filter string S through command CMD, returning its output'''
242 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
242 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
243 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
243 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
244 pout, perr = p.communicate(s)
244 pout, perr = p.communicate(s)
245 return pout
245 return pout
246
246
247 def tempfilter(s, cmd):
247 def tempfilter(s, cmd):
248 '''filter string S through a pair of temporary files with CMD.
248 '''filter string S through a pair of temporary files with CMD.
249 CMD is used as a template to create the real command to be run,
249 CMD is used as a template to create the real command to be run,
250 with the strings INFILE and OUTFILE replaced by the real names of
250 with the strings INFILE and OUTFILE replaced by the real names of
251 the temporary files generated.'''
251 the temporary files generated.'''
252 inname, outname = None, None
252 inname, outname = None, None
253 try:
253 try:
254 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
254 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
255 fp = os.fdopen(infd, 'wb')
255 fp = os.fdopen(infd, 'wb')
256 fp.write(s)
256 fp.write(s)
257 fp.close()
257 fp.close()
258 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
258 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
259 os.close(outfd)
259 os.close(outfd)
260 cmd = cmd.replace('INFILE', inname)
260 cmd = cmd.replace('INFILE', inname)
261 cmd = cmd.replace('OUTFILE', outname)
261 cmd = cmd.replace('OUTFILE', outname)
262 code = os.system(cmd)
262 code = os.system(cmd)
263 if sys.platform == 'OpenVMS' and code & 1:
263 if sys.platform == 'OpenVMS' and code & 1:
264 code = 0
264 code = 0
265 if code:
265 if code:
266 raise Abort(_("command '%s' failed: %s") %
266 raise Abort(_("command '%s' failed: %s") %
267 (cmd, explainexit(code)))
267 (cmd, explainexit(code)))
268 fp = open(outname, 'rb')
268 fp = open(outname, 'rb')
269 r = fp.read()
269 r = fp.read()
270 fp.close()
270 fp.close()
271 return r
271 return r
272 finally:
272 finally:
273 try:
273 try:
274 if inname:
274 if inname:
275 os.unlink(inname)
275 os.unlink(inname)
276 except OSError:
276 except OSError:
277 pass
277 pass
278 try:
278 try:
279 if outname:
279 if outname:
280 os.unlink(outname)
280 os.unlink(outname)
281 except OSError:
281 except OSError:
282 pass
282 pass
283
283
284 filtertable = {
284 filtertable = {
285 'tempfile:': tempfilter,
285 'tempfile:': tempfilter,
286 'pipe:': pipefilter,
286 'pipe:': pipefilter,
287 }
287 }
288
288
289 def filter(s, cmd):
289 def filter(s, cmd):
290 "filter a string through a command that transforms its input to its output"
290 "filter a string through a command that transforms its input to its output"
291 for name, fn in filtertable.iteritems():
291 for name, fn in filtertable.iteritems():
292 if cmd.startswith(name):
292 if cmd.startswith(name):
293 return fn(s, cmd[len(name):].lstrip())
293 return fn(s, cmd[len(name):].lstrip())
294 return pipefilter(s, cmd)
294 return pipefilter(s, cmd)
295
295
296 def binary(s):
296 def binary(s):
297 """return true if a string is binary data"""
297 """return true if a string is binary data"""
298 return bool(s and '\0' in s)
298 return bool(s and '\0' in s)
299
299
300 def increasingchunks(source, min=1024, max=65536):
300 def increasingchunks(source, min=1024, max=65536):
301 '''return no less than min bytes per chunk while data remains,
301 '''return no less than min bytes per chunk while data remains,
302 doubling min after each chunk until it reaches max'''
302 doubling min after each chunk until it reaches max'''
303 def log2(x):
303 def log2(x):
304 if not x:
304 if not x:
305 return 0
305 return 0
306 i = 0
306 i = 0
307 while x:
307 while x:
308 x >>= 1
308 x >>= 1
309 i += 1
309 i += 1
310 return i - 1
310 return i - 1
311
311
312 buf = []
312 buf = []
313 blen = 0
313 blen = 0
314 for chunk in source:
314 for chunk in source:
315 buf.append(chunk)
315 buf.append(chunk)
316 blen += len(chunk)
316 blen += len(chunk)
317 if blen >= min:
317 if blen >= min:
318 if min < max:
318 if min < max:
319 min = min << 1
319 min = min << 1
320 nmin = 1 << log2(blen)
320 nmin = 1 << log2(blen)
321 if nmin > min:
321 if nmin > min:
322 min = nmin
322 min = nmin
323 if min > max:
323 if min > max:
324 min = max
324 min = max
325 yield ''.join(buf)
325 yield ''.join(buf)
326 blen = 0
326 blen = 0
327 buf = []
327 buf = []
328 if buf:
328 if buf:
329 yield ''.join(buf)
329 yield ''.join(buf)
330
330
331 Abort = error.Abort
331 Abort = error.Abort
332
332
333 def always(fn):
333 def always(fn):
334 return True
334 return True
335
335
336 def never(fn):
336 def never(fn):
337 return False
337 return False
338
338
339 def pathto(root, n1, n2):
339 def pathto(root, n1, n2):
340 '''return the relative path from one place to another.
340 '''return the relative path from one place to another.
341 root should use os.sep to separate directories
341 root should use os.sep to separate directories
342 n1 should use os.sep to separate directories
342 n1 should use os.sep to separate directories
343 n2 should use "/" to separate directories
343 n2 should use "/" to separate directories
344 returns an os.sep-separated path.
344 returns an os.sep-separated path.
345
345
346 If n1 is a relative path, it's assumed it's
346 If n1 is a relative path, it's assumed it's
347 relative to root.
347 relative to root.
348 n2 should always be relative to root.
348 n2 should always be relative to root.
349 '''
349 '''
350 if not n1:
350 if not n1:
351 return localpath(n2)
351 return localpath(n2)
352 if os.path.isabs(n1):
352 if os.path.isabs(n1):
353 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
353 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
354 return os.path.join(root, localpath(n2))
354 return os.path.join(root, localpath(n2))
355 n2 = '/'.join((pconvert(root), n2))
355 n2 = '/'.join((pconvert(root), n2))
356 a, b = splitpath(n1), n2.split('/')
356 a, b = splitpath(n1), n2.split('/')
357 a.reverse()
357 a.reverse()
358 b.reverse()
358 b.reverse()
359 while a and b and a[-1] == b[-1]:
359 while a and b and a[-1] == b[-1]:
360 a.pop()
360 a.pop()
361 b.pop()
361 b.pop()
362 b.reverse()
362 b.reverse()
363 return os.sep.join((['..'] * len(a)) + b) or '.'
363 return os.sep.join((['..'] * len(a)) + b) or '.'
364
364
365 _hgexecutable = None
365 _hgexecutable = None
366
366
367 def mainfrozen():
367 def mainfrozen():
368 """return True if we are a frozen executable.
368 """return True if we are a frozen executable.
369
369
370 The code supports py2exe (most common, Windows only) and tools/freeze
370 The code supports py2exe (most common, Windows only) and tools/freeze
371 (portable, not much used).
371 (portable, not much used).
372 """
372 """
373 return (safehasattr(sys, "frozen") or # new py2exe
373 return (safehasattr(sys, "frozen") or # new py2exe
374 safehasattr(sys, "importers") or # old py2exe
374 safehasattr(sys, "importers") or # old py2exe
375 imp.is_frozen("__main__")) # tools/freeze
375 imp.is_frozen("__main__")) # tools/freeze
376
376
377 def hgexecutable():
377 def hgexecutable():
378 """return location of the 'hg' executable.
378 """return location of the 'hg' executable.
379
379
380 Defaults to $HG or 'hg' in the search path.
380 Defaults to $HG or 'hg' in the search path.
381 """
381 """
382 if _hgexecutable is None:
382 if _hgexecutable is None:
383 hg = os.environ.get('HG')
383 hg = os.environ.get('HG')
384 mainmod = sys.modules['__main__']
384 mainmod = sys.modules['__main__']
385 if hg:
385 if hg:
386 _sethgexecutable(hg)
386 _sethgexecutable(hg)
387 elif mainfrozen():
387 elif mainfrozen():
388 _sethgexecutable(sys.executable)
388 _sethgexecutable(sys.executable)
389 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
389 elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
390 _sethgexecutable(mainmod.__file__)
390 _sethgexecutable(mainmod.__file__)
391 else:
391 else:
392 exe = findexe('hg') or os.path.basename(sys.argv[0])
392 exe = findexe('hg') or os.path.basename(sys.argv[0])
393 _sethgexecutable(exe)
393 _sethgexecutable(exe)
394 return _hgexecutable
394 return _hgexecutable
395
395
396 def _sethgexecutable(path):
396 def _sethgexecutable(path):
397 """set location of the 'hg' executable"""
397 """set location of the 'hg' executable"""
398 global _hgexecutable
398 global _hgexecutable
399 _hgexecutable = path
399 _hgexecutable = path
400
400
401 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
401 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
402 '''enhanced shell command execution.
402 '''enhanced shell command execution.
403 run with environment maybe modified, maybe in different dir.
403 run with environment maybe modified, maybe in different dir.
404
404
405 if command fails and onerr is None, return status. if ui object,
405 if command fails and onerr is None, return status. if ui object,
406 print error message and return status, else raise onerr object as
406 print error message and return status, else raise onerr object as
407 exception.
407 exception.
408
408
409 if out is specified, it is assumed to be a file-like object that has a
409 if out is specified, it is assumed to be a file-like object that has a
410 write() method. stdout and stderr will be redirected to out.'''
410 write() method. stdout and stderr will be redirected to out.'''
411 try:
411 try:
412 sys.stdout.flush()
412 sys.stdout.flush()
413 except Exception:
413 except Exception:
414 pass
414 pass
415 def py2shell(val):
415 def py2shell(val):
416 'convert python object into string that is useful to shell'
416 'convert python object into string that is useful to shell'
417 if val is None or val is False:
417 if val is None or val is False:
418 return '0'
418 return '0'
419 if val is True:
419 if val is True:
420 return '1'
420 return '1'
421 return str(val)
421 return str(val)
422 origcmd = cmd
422 origcmd = cmd
423 cmd = quotecommand(cmd)
423 cmd = quotecommand(cmd)
424 env = dict(os.environ)
424 env = dict(os.environ)
425 env.update((k, py2shell(v)) for k, v in environ.iteritems())
425 env.update((k, py2shell(v)) for k, v in environ.iteritems())
426 env['HG'] = hgexecutable()
426 env['HG'] = hgexecutable()
427 if out is None or out == sys.__stdout__:
427 if out is None or out == sys.__stdout__:
428 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
428 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
429 env=env, cwd=cwd)
429 env=env, cwd=cwd)
430 else:
430 else:
431 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
431 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
432 env=env, cwd=cwd, stdout=subprocess.PIPE,
432 env=env, cwd=cwd, stdout=subprocess.PIPE,
433 stderr=subprocess.STDOUT)
433 stderr=subprocess.STDOUT)
434 for line in proc.stdout:
434 for line in proc.stdout:
435 out.write(line)
435 out.write(line)
436 proc.wait()
436 proc.wait()
437 rc = proc.returncode
437 rc = proc.returncode
438 if sys.platform == 'OpenVMS' and rc & 1:
438 if sys.platform == 'OpenVMS' and rc & 1:
439 rc = 0
439 rc = 0
440 if rc and onerr:
440 if rc and onerr:
441 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
441 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
442 explainexit(rc)[0])
442 explainexit(rc)[0])
443 if errprefix:
443 if errprefix:
444 errmsg = '%s: %s' % (errprefix, errmsg)
444 errmsg = '%s: %s' % (errprefix, errmsg)
445 try:
445 try:
446 onerr.warn(errmsg + '\n')
446 onerr.warn(errmsg + '\n')
447 except AttributeError:
447 except AttributeError:
448 raise onerr(errmsg)
448 raise onerr(errmsg)
449 return rc
449 return rc
450
450
451 def checksignature(func):
451 def checksignature(func):
452 '''wrap a function with code to check for calling errors'''
452 '''wrap a function with code to check for calling errors'''
453 def check(*args, **kwargs):
453 def check(*args, **kwargs):
454 try:
454 try:
455 return func(*args, **kwargs)
455 return func(*args, **kwargs)
456 except TypeError:
456 except TypeError:
457 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
457 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
458 raise error.SignatureError
458 raise error.SignatureError
459 raise
459 raise
460
460
461 return check
461 return check
462
462
463 def copyfile(src, dest):
463 def copyfile(src, dest):
464 "copy a file, preserving mode and atime/mtime"
464 "copy a file, preserving mode and atime/mtime"
465 if os.path.islink(src):
465 if os.path.islink(src):
466 try:
466 try:
467 os.unlink(dest)
467 os.unlink(dest)
468 except OSError:
468 except OSError:
469 pass
469 pass
470 os.symlink(os.readlink(src), dest)
470 os.symlink(os.readlink(src), dest)
471 else:
471 else:
472 try:
472 try:
473 shutil.copyfile(src, dest)
473 shutil.copyfile(src, dest)
474 shutil.copymode(src, dest)
474 shutil.copymode(src, dest)
475 except shutil.Error, inst:
475 except shutil.Error, inst:
476 raise Abort(str(inst))
476 raise Abort(str(inst))
477
477
478 def copyfiles(src, dst, hardlink=None):
478 def copyfiles(src, dst, hardlink=None):
479 """Copy a directory tree using hardlinks if possible"""
479 """Copy a directory tree using hardlinks if possible"""
480
480
481 if hardlink is None:
481 if hardlink is None:
482 hardlink = (os.stat(src).st_dev ==
482 hardlink = (os.stat(src).st_dev ==
483 os.stat(os.path.dirname(dst)).st_dev)
483 os.stat(os.path.dirname(dst)).st_dev)
484
484
485 num = 0
485 num = 0
486 if os.path.isdir(src):
486 if os.path.isdir(src):
487 os.mkdir(dst)
487 os.mkdir(dst)
488 for name, kind in osutil.listdir(src):
488 for name, kind in osutil.listdir(src):
489 srcname = os.path.join(src, name)
489 srcname = os.path.join(src, name)
490 dstname = os.path.join(dst, name)
490 dstname = os.path.join(dst, name)
491 hardlink, n = copyfiles(srcname, dstname, hardlink)
491 hardlink, n = copyfiles(srcname, dstname, hardlink)
492 num += n
492 num += n
493 else:
493 else:
494 if hardlink:
494 if hardlink:
495 try:
495 try:
496 oslink(src, dst)
496 oslink(src, dst)
497 except (IOError, OSError):
497 except (IOError, OSError):
498 hardlink = False
498 hardlink = False
499 shutil.copy(src, dst)
499 shutil.copy(src, dst)
500 else:
500 else:
501 shutil.copy(src, dst)
501 shutil.copy(src, dst)
502 num += 1
502 num += 1
503
503
504 return hardlink, num
504 return hardlink, num
505
505
506 _winreservednames = '''con prn aux nul
506 _winreservednames = '''con prn aux nul
507 com1 com2 com3 com4 com5 com6 com7 com8 com9
507 com1 com2 com3 com4 com5 com6 com7 com8 com9
508 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
508 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
509 _winreservedchars = ':*?"<>|'
509 _winreservedchars = ':*?"<>|'
510 def checkwinfilename(path):
510 def checkwinfilename(path):
511 '''Check that the base-relative path is a valid filename on Windows.
511 '''Check that the base-relative path is a valid filename on Windows.
512 Returns None if the path is ok, or a UI string describing the problem.
512 Returns None if the path is ok, or a UI string describing the problem.
513
513
514 >>> checkwinfilename("just/a/normal/path")
514 >>> checkwinfilename("just/a/normal/path")
515 >>> checkwinfilename("foo/bar/con.xml")
515 >>> checkwinfilename("foo/bar/con.xml")
516 "filename contains 'con', which is reserved on Windows"
516 "filename contains 'con', which is reserved on Windows"
517 >>> checkwinfilename("foo/con.xml/bar")
517 >>> checkwinfilename("foo/con.xml/bar")
518 "filename contains 'con', which is reserved on Windows"
518 "filename contains 'con', which is reserved on Windows"
519 >>> checkwinfilename("foo/bar/xml.con")
519 >>> checkwinfilename("foo/bar/xml.con")
520 >>> checkwinfilename("foo/bar/AUX/bla.txt")
520 >>> checkwinfilename("foo/bar/AUX/bla.txt")
521 "filename contains 'AUX', which is reserved on Windows"
521 "filename contains 'AUX', which is reserved on Windows"
522 >>> checkwinfilename("foo/bar/bla:.txt")
522 >>> checkwinfilename("foo/bar/bla:.txt")
523 "filename contains ':', which is reserved on Windows"
523 "filename contains ':', which is reserved on Windows"
524 >>> checkwinfilename("foo/bar/b\07la.txt")
524 >>> checkwinfilename("foo/bar/b\07la.txt")
525 "filename contains '\\\\x07', which is invalid on Windows"
525 "filename contains '\\\\x07', which is invalid on Windows"
526 >>> checkwinfilename("foo/bar/bla ")
526 >>> checkwinfilename("foo/bar/bla ")
527 "filename ends with ' ', which is not allowed on Windows"
527 "filename ends with ' ', which is not allowed on Windows"
528 >>> checkwinfilename("../bar")
528 >>> checkwinfilename("../bar")
529 '''
529 '''
530 for n in path.replace('\\', '/').split('/'):
530 for n in path.replace('\\', '/').split('/'):
531 if not n:
531 if not n:
532 continue
532 continue
533 for c in n:
533 for c in n:
534 if c in _winreservedchars:
534 if c in _winreservedchars:
535 return _("filename contains '%s', which is reserved "
535 return _("filename contains '%s', which is reserved "
536 "on Windows") % c
536 "on Windows") % c
537 if ord(c) <= 31:
537 if ord(c) <= 31:
538 return _("filename contains %r, which is invalid "
538 return _("filename contains %r, which is invalid "
539 "on Windows") % c
539 "on Windows") % c
540 base = n.split('.')[0]
540 base = n.split('.')[0]
541 if base and base.lower() in _winreservednames:
541 if base and base.lower() in _winreservednames:
542 return _("filename contains '%s', which is reserved "
542 return _("filename contains '%s', which is reserved "
543 "on Windows") % base
543 "on Windows") % base
544 t = n[-1]
544 t = n[-1]
545 if t in '. ' and n not in '..':
545 if t in '. ' and n not in '..':
546 return _("filename ends with '%s', which is not allowed "
546 return _("filename ends with '%s', which is not allowed "
547 "on Windows") % t
547 "on Windows") % t
548
548
549 if os.name == 'nt':
549 if os.name == 'nt':
550 checkosfilename = checkwinfilename
550 checkosfilename = checkwinfilename
551 else:
551 else:
552 checkosfilename = platform.checkosfilename
552 checkosfilename = platform.checkosfilename
553
553
554 def makelock(info, pathname):
554 def makelock(info, pathname):
555 try:
555 try:
556 return os.symlink(info, pathname)
556 return os.symlink(info, pathname)
557 except OSError, why:
557 except OSError, why:
558 if why.errno == errno.EEXIST:
558 if why.errno == errno.EEXIST:
559 raise
559 raise
560 except AttributeError: # no symlink in os
560 except AttributeError: # no symlink in os
561 pass
561 pass
562
562
563 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
563 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
564 os.write(ld, info)
564 os.write(ld, info)
565 os.close(ld)
565 os.close(ld)
566
566
567 def readlock(pathname):
567 def readlock(pathname):
568 try:
568 try:
569 return os.readlink(pathname)
569 return os.readlink(pathname)
570 except OSError, why:
570 except OSError, why:
571 if why.errno not in (errno.EINVAL, errno.ENOSYS):
571 if why.errno not in (errno.EINVAL, errno.ENOSYS):
572 raise
572 raise
573 except AttributeError: # no symlink in os
573 except AttributeError: # no symlink in os
574 pass
574 pass
575 fp = posixfile(pathname)
575 fp = posixfile(pathname)
576 r = fp.read()
576 r = fp.read()
577 fp.close()
577 fp.close()
578 return r
578 return r
579
579
580 def fstat(fp):
580 def fstat(fp):
581 '''stat file object that may not have fileno method.'''
581 '''stat file object that may not have fileno method.'''
582 try:
582 try:
583 return os.fstat(fp.fileno())
583 return os.fstat(fp.fileno())
584 except AttributeError:
584 except AttributeError:
585 return os.stat(fp.name)
585 return os.stat(fp.name)
586
586
587 # File system features
587 # File system features
588
588
589 def checkcase(path):
589 def checkcase(path):
590 """
590 """
591 Check whether the given path is on a case-sensitive filesystem
591 Check whether the given path is on a case-sensitive filesystem
592
592
593 Requires a path (like /foo/.hg) ending with a foldable final
593 Requires a path (like /foo/.hg) ending with a foldable final
594 directory component.
594 directory component.
595 """
595 """
596 s1 = os.stat(path)
596 s1 = os.stat(path)
597 d, b = os.path.split(path)
597 d, b = os.path.split(path)
598 b2 = b.upper()
598 b2 = b.upper()
599 if b == b2:
599 if b == b2:
600 b2 = b.lower()
600 b2 = b.lower()
601 if b == b2:
601 if b == b2:
602 return True # no evidence against case sensitivity
602 return True # no evidence against case sensitivity
603 p2 = os.path.join(d, b2)
603 p2 = os.path.join(d, b2)
604 try:
604 try:
605 s2 = os.stat(p2)
605 s2 = os.stat(p2)
606 if s2 == s1:
606 if s2 == s1:
607 return False
607 return False
608 return True
608 return True
609 except OSError:
609 except OSError:
610 return True
610 return True
611
611
612 _fspathcache = {}
612 _fspathcache = {}
613 def fspath(name, root):
613 def fspath(name, root):
614 '''Get name in the case stored in the filesystem
614 '''Get name in the case stored in the filesystem
615
615
616 The name is either relative to root, or it is an absolute path starting
616 The name is either relative to root, or it is an absolute path starting
617 with root. Note that this function is unnecessary, and should not be
617 with root. Note that this function is unnecessary, and should not be
618 called, for case-sensitive filesystems (simply because it's expensive).
618 called, for case-sensitive filesystems (simply because it's expensive).
619
620 Both name and root should be normcase-ed.
619 '''
621 '''
620 # If name is absolute, make it relative
622 # If name is absolute, make it relative
621 name = normcase(name)
622 root = normcase(root)
623 if name.startswith(root):
623 if name.startswith(root):
624 l = len(root)
624 l = len(root)
625 if name[l] == os.sep or name[l] == os.altsep:
625 if name[l] == os.sep or name[l] == os.altsep:
626 l = l + 1
626 l = l + 1
627 name = name[l:]
627 name = name[l:]
628
628
629 if not os.path.lexists(os.path.join(root, name)):
629 if not os.path.lexists(os.path.join(root, name)):
630 return None
630 return None
631
631
632 seps = os.sep
632 seps = os.sep
633 if os.altsep:
633 if os.altsep:
634 seps = seps + os.altsep
634 seps = seps + os.altsep
635 # Protect backslashes. This gets silly very quickly.
635 # Protect backslashes. This gets silly very quickly.
636 seps.replace('\\','\\\\')
636 seps.replace('\\','\\\\')
637 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
637 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
638 dir = os.path.normpath(root)
638 dir = os.path.normpath(root)
639 result = []
639 result = []
640 for part, sep in pattern.findall(name):
640 for part, sep in pattern.findall(name):
641 if sep:
641 if sep:
642 result.append(sep)
642 result.append(sep)
643 continue
643 continue
644
644
645 if dir not in _fspathcache:
645 if dir not in _fspathcache:
646 _fspathcache[dir] = os.listdir(dir)
646 _fspathcache[dir] = os.listdir(dir)
647 contents = _fspathcache[dir]
647 contents = _fspathcache[dir]
648
648
649 lenp = len(part)
649 lenp = len(part)
650 for n in contents:
650 for n in contents:
651 if lenp == len(n) and normcase(n) == part:
651 if lenp == len(n) and normcase(n) == part:
652 result.append(n)
652 result.append(n)
653 break
653 break
654 else:
654 else:
655 # Cannot happen, as the file exists!
655 # Cannot happen, as the file exists!
656 result.append(part)
656 result.append(part)
657 dir = os.path.join(dir, part)
657 dir = os.path.join(dir, part)
658
658
659 return ''.join(result)
659 return ''.join(result)
660
660
661 def checknlink(testfile):
661 def checknlink(testfile):
662 '''check whether hardlink count reporting works properly'''
662 '''check whether hardlink count reporting works properly'''
663
663
664 # testfile may be open, so we need a separate file for checking to
664 # testfile may be open, so we need a separate file for checking to
665 # work around issue2543 (or testfile may get lost on Samba shares)
665 # work around issue2543 (or testfile may get lost on Samba shares)
666 f1 = testfile + ".hgtmp1"
666 f1 = testfile + ".hgtmp1"
667 if os.path.lexists(f1):
667 if os.path.lexists(f1):
668 return False
668 return False
669 try:
669 try:
670 posixfile(f1, 'w').close()
670 posixfile(f1, 'w').close()
671 except IOError:
671 except IOError:
672 return False
672 return False
673
673
674 f2 = testfile + ".hgtmp2"
674 f2 = testfile + ".hgtmp2"
675 fd = None
675 fd = None
676 try:
676 try:
677 try:
677 try:
678 oslink(f1, f2)
678 oslink(f1, f2)
679 except OSError:
679 except OSError:
680 return False
680 return False
681
681
682 # nlinks() may behave differently for files on Windows shares if
682 # nlinks() may behave differently for files on Windows shares if
683 # the file is open.
683 # the file is open.
684 fd = posixfile(f2)
684 fd = posixfile(f2)
685 return nlinks(f2) > 1
685 return nlinks(f2) > 1
686 finally:
686 finally:
687 if fd is not None:
687 if fd is not None:
688 fd.close()
688 fd.close()
689 for f in (f1, f2):
689 for f in (f1, f2):
690 try:
690 try:
691 os.unlink(f)
691 os.unlink(f)
692 except OSError:
692 except OSError:
693 pass
693 pass
694
694
695 return False
695 return False
696
696
697 def endswithsep(path):
697 def endswithsep(path):
698 '''Check path ends with os.sep or os.altsep.'''
698 '''Check path ends with os.sep or os.altsep.'''
699 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
699 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
700
700
701 def splitpath(path):
701 def splitpath(path):
702 '''Split path by os.sep.
702 '''Split path by os.sep.
703 Note that this function does not use os.altsep because this is
703 Note that this function does not use os.altsep because this is
704 an alternative of simple "xxx.split(os.sep)".
704 an alternative of simple "xxx.split(os.sep)".
705 It is recommended to use os.path.normpath() before using this
705 It is recommended to use os.path.normpath() before using this
706 function if need.'''
706 function if need.'''
707 return path.split(os.sep)
707 return path.split(os.sep)
708
708
709 def gui():
709 def gui():
710 '''Are we running in a GUI?'''
710 '''Are we running in a GUI?'''
711 if sys.platform == 'darwin':
711 if sys.platform == 'darwin':
712 if 'SSH_CONNECTION' in os.environ:
712 if 'SSH_CONNECTION' in os.environ:
713 # handle SSH access to a box where the user is logged in
713 # handle SSH access to a box where the user is logged in
714 return False
714 return False
715 elif getattr(osutil, 'isgui', None):
715 elif getattr(osutil, 'isgui', None):
716 # check if a CoreGraphics session is available
716 # check if a CoreGraphics session is available
717 return osutil.isgui()
717 return osutil.isgui()
718 else:
718 else:
719 # pure build; use a safe default
719 # pure build; use a safe default
720 return True
720 return True
721 else:
721 else:
722 return os.name == "nt" or os.environ.get("DISPLAY")
722 return os.name == "nt" or os.environ.get("DISPLAY")
723
723
724 def mktempcopy(name, emptyok=False, createmode=None):
724 def mktempcopy(name, emptyok=False, createmode=None):
725 """Create a temporary file with the same contents from name
725 """Create a temporary file with the same contents from name
726
726
727 The permission bits are copied from the original file.
727 The permission bits are copied from the original file.
728
728
729 If the temporary file is going to be truncated immediately, you
729 If the temporary file is going to be truncated immediately, you
730 can use emptyok=True as an optimization.
730 can use emptyok=True as an optimization.
731
731
732 Returns the name of the temporary file.
732 Returns the name of the temporary file.
733 """
733 """
734 d, fn = os.path.split(name)
734 d, fn = os.path.split(name)
735 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
735 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
736 os.close(fd)
736 os.close(fd)
737 # Temporary files are created with mode 0600, which is usually not
737 # Temporary files are created with mode 0600, which is usually not
738 # what we want. If the original file already exists, just copy
738 # what we want. If the original file already exists, just copy
739 # its mode. Otherwise, manually obey umask.
739 # its mode. Otherwise, manually obey umask.
740 copymode(name, temp, createmode)
740 copymode(name, temp, createmode)
741 if emptyok:
741 if emptyok:
742 return temp
742 return temp
743 try:
743 try:
744 try:
744 try:
745 ifp = posixfile(name, "rb")
745 ifp = posixfile(name, "rb")
746 except IOError, inst:
746 except IOError, inst:
747 if inst.errno == errno.ENOENT:
747 if inst.errno == errno.ENOENT:
748 return temp
748 return temp
749 if not getattr(inst, 'filename', None):
749 if not getattr(inst, 'filename', None):
750 inst.filename = name
750 inst.filename = name
751 raise
751 raise
752 ofp = posixfile(temp, "wb")
752 ofp = posixfile(temp, "wb")
753 for chunk in filechunkiter(ifp):
753 for chunk in filechunkiter(ifp):
754 ofp.write(chunk)
754 ofp.write(chunk)
755 ifp.close()
755 ifp.close()
756 ofp.close()
756 ofp.close()
757 except:
757 except:
758 try: os.unlink(temp)
758 try: os.unlink(temp)
759 except: pass
759 except: pass
760 raise
760 raise
761 return temp
761 return temp
762
762
763 class atomictempfile(object):
763 class atomictempfile(object):
764 '''writeable file object that atomically updates a file
764 '''writeable file object that atomically updates a file
765
765
766 All writes will go to a temporary copy of the original file. Call
766 All writes will go to a temporary copy of the original file. Call
767 close() when you are done writing, and atomictempfile will rename
767 close() when you are done writing, and atomictempfile will rename
768 the temporary copy to the original name, making the changes
768 the temporary copy to the original name, making the changes
769 visible. If the object is destroyed without being closed, all your
769 visible. If the object is destroyed without being closed, all your
770 writes are discarded.
770 writes are discarded.
771 '''
771 '''
772 def __init__(self, name, mode='w+b', createmode=None):
772 def __init__(self, name, mode='w+b', createmode=None):
773 self.__name = name # permanent name
773 self.__name = name # permanent name
774 self._tempname = mktempcopy(name, emptyok=('w' in mode),
774 self._tempname = mktempcopy(name, emptyok=('w' in mode),
775 createmode=createmode)
775 createmode=createmode)
776 self._fp = posixfile(self._tempname, mode)
776 self._fp = posixfile(self._tempname, mode)
777
777
778 # delegated methods
778 # delegated methods
779 self.write = self._fp.write
779 self.write = self._fp.write
780 self.fileno = self._fp.fileno
780 self.fileno = self._fp.fileno
781
781
782 def close(self):
782 def close(self):
783 if not self._fp.closed:
783 if not self._fp.closed:
784 self._fp.close()
784 self._fp.close()
785 rename(self._tempname, localpath(self.__name))
785 rename(self._tempname, localpath(self.__name))
786
786
787 def discard(self):
787 def discard(self):
788 if not self._fp.closed:
788 if not self._fp.closed:
789 try:
789 try:
790 os.unlink(self._tempname)
790 os.unlink(self._tempname)
791 except OSError:
791 except OSError:
792 pass
792 pass
793 self._fp.close()
793 self._fp.close()
794
794
795 def __del__(self):
795 def __del__(self):
796 if safehasattr(self, '_fp'): # constructor actually did something
796 if safehasattr(self, '_fp'): # constructor actually did something
797 self.discard()
797 self.discard()
798
798
799 def makedirs(name, mode=None):
799 def makedirs(name, mode=None):
800 """recursive directory creation with parent mode inheritance"""
800 """recursive directory creation with parent mode inheritance"""
801 try:
801 try:
802 os.mkdir(name)
802 os.mkdir(name)
803 except OSError, err:
803 except OSError, err:
804 if err.errno == errno.EEXIST:
804 if err.errno == errno.EEXIST:
805 return
805 return
806 if err.errno != errno.ENOENT or not name:
806 if err.errno != errno.ENOENT or not name:
807 raise
807 raise
808 parent = os.path.dirname(os.path.abspath(name))
808 parent = os.path.dirname(os.path.abspath(name))
809 if parent == name:
809 if parent == name:
810 raise
810 raise
811 makedirs(parent, mode)
811 makedirs(parent, mode)
812 os.mkdir(name)
812 os.mkdir(name)
813 if mode is not None:
813 if mode is not None:
814 os.chmod(name, mode)
814 os.chmod(name, mode)
815
815
816 def readfile(path):
816 def readfile(path):
817 fp = open(path, 'rb')
817 fp = open(path, 'rb')
818 try:
818 try:
819 return fp.read()
819 return fp.read()
820 finally:
820 finally:
821 fp.close()
821 fp.close()
822
822
823 def writefile(path, text):
823 def writefile(path, text):
824 fp = open(path, 'wb')
824 fp = open(path, 'wb')
825 try:
825 try:
826 fp.write(text)
826 fp.write(text)
827 finally:
827 finally:
828 fp.close()
828 fp.close()
829
829
830 def appendfile(path, text):
830 def appendfile(path, text):
831 fp = open(path, 'ab')
831 fp = open(path, 'ab')
832 try:
832 try:
833 fp.write(text)
833 fp.write(text)
834 finally:
834 finally:
835 fp.close()
835 fp.close()
836
836
837 class chunkbuffer(object):
837 class chunkbuffer(object):
838 """Allow arbitrary sized chunks of data to be efficiently read from an
838 """Allow arbitrary sized chunks of data to be efficiently read from an
839 iterator over chunks of arbitrary size."""
839 iterator over chunks of arbitrary size."""
840
840
841 def __init__(self, in_iter):
841 def __init__(self, in_iter):
842 """in_iter is the iterator that's iterating over the input chunks.
842 """in_iter is the iterator that's iterating over the input chunks.
843 targetsize is how big a buffer to try to maintain."""
843 targetsize is how big a buffer to try to maintain."""
844 def splitbig(chunks):
844 def splitbig(chunks):
845 for chunk in chunks:
845 for chunk in chunks:
846 if len(chunk) > 2**20:
846 if len(chunk) > 2**20:
847 pos = 0
847 pos = 0
848 while pos < len(chunk):
848 while pos < len(chunk):
849 end = pos + 2 ** 18
849 end = pos + 2 ** 18
850 yield chunk[pos:end]
850 yield chunk[pos:end]
851 pos = end
851 pos = end
852 else:
852 else:
853 yield chunk
853 yield chunk
854 self.iter = splitbig(in_iter)
854 self.iter = splitbig(in_iter)
855 self._queue = []
855 self._queue = []
856
856
857 def read(self, l):
857 def read(self, l):
858 """Read L bytes of data from the iterator of chunks of data.
858 """Read L bytes of data from the iterator of chunks of data.
859 Returns less than L bytes if the iterator runs dry."""
859 Returns less than L bytes if the iterator runs dry."""
860 left = l
860 left = l
861 buf = ''
861 buf = ''
862 queue = self._queue
862 queue = self._queue
863 while left > 0:
863 while left > 0:
864 # refill the queue
864 # refill the queue
865 if not queue:
865 if not queue:
866 target = 2**18
866 target = 2**18
867 for chunk in self.iter:
867 for chunk in self.iter:
868 queue.append(chunk)
868 queue.append(chunk)
869 target -= len(chunk)
869 target -= len(chunk)
870 if target <= 0:
870 if target <= 0:
871 break
871 break
872 if not queue:
872 if not queue:
873 break
873 break
874
874
875 chunk = queue.pop(0)
875 chunk = queue.pop(0)
876 left -= len(chunk)
876 left -= len(chunk)
877 if left < 0:
877 if left < 0:
878 queue.insert(0, chunk[left:])
878 queue.insert(0, chunk[left:])
879 buf += chunk[:left]
879 buf += chunk[:left]
880 else:
880 else:
881 buf += chunk
881 buf += chunk
882
882
883 return buf
883 return buf
884
884
885 def filechunkiter(f, size=65536, limit=None):
885 def filechunkiter(f, size=65536, limit=None):
886 """Create a generator that produces the data in the file size
886 """Create a generator that produces the data in the file size
887 (default 65536) bytes at a time, up to optional limit (default is
887 (default 65536) bytes at a time, up to optional limit (default is
888 to read all data). Chunks may be less than size bytes if the
888 to read all data). Chunks may be less than size bytes if the
889 chunk is the last chunk in the file, or the file is a socket or
889 chunk is the last chunk in the file, or the file is a socket or
890 some other type of file that sometimes reads less data than is
890 some other type of file that sometimes reads less data than is
891 requested."""
891 requested."""
892 assert size >= 0
892 assert size >= 0
893 assert limit is None or limit >= 0
893 assert limit is None or limit >= 0
894 while True:
894 while True:
895 if limit is None:
895 if limit is None:
896 nbytes = size
896 nbytes = size
897 else:
897 else:
898 nbytes = min(limit, size)
898 nbytes = min(limit, size)
899 s = nbytes and f.read(nbytes)
899 s = nbytes and f.read(nbytes)
900 if not s:
900 if not s:
901 break
901 break
902 if limit:
902 if limit:
903 limit -= len(s)
903 limit -= len(s)
904 yield s
904 yield s
905
905
906 def makedate():
906 def makedate():
907 ct = time.time()
907 ct = time.time()
908 if ct < 0:
908 if ct < 0:
909 hint = _("check your clock")
909 hint = _("check your clock")
910 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
910 raise Abort(_("negative timestamp: %d") % ct, hint=hint)
911 delta = (datetime.datetime.utcfromtimestamp(ct) -
911 delta = (datetime.datetime.utcfromtimestamp(ct) -
912 datetime.datetime.fromtimestamp(ct))
912 datetime.datetime.fromtimestamp(ct))
913 tz = delta.days * 86400 + delta.seconds
913 tz = delta.days * 86400 + delta.seconds
914 return ct, tz
914 return ct, tz
915
915
916 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
916 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
917 """represent a (unixtime, offset) tuple as a localized time.
917 """represent a (unixtime, offset) tuple as a localized time.
918 unixtime is seconds since the epoch, and offset is the time zone's
918 unixtime is seconds since the epoch, and offset is the time zone's
919 number of seconds away from UTC. if timezone is false, do not
919 number of seconds away from UTC. if timezone is false, do not
920 append time zone to string."""
920 append time zone to string."""
921 t, tz = date or makedate()
921 t, tz = date or makedate()
922 if t < 0:
922 if t < 0:
923 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
923 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
924 tz = 0
924 tz = 0
925 if "%1" in format or "%2" in format:
925 if "%1" in format or "%2" in format:
926 sign = (tz > 0) and "-" or "+"
926 sign = (tz > 0) and "-" or "+"
927 minutes = abs(tz) // 60
927 minutes = abs(tz) // 60
928 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
928 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
929 format = format.replace("%2", "%02d" % (minutes % 60))
929 format = format.replace("%2", "%02d" % (minutes % 60))
930 try:
930 try:
931 t = time.gmtime(float(t) - tz)
931 t = time.gmtime(float(t) - tz)
932 except ValueError:
932 except ValueError:
933 # time was out of range
933 # time was out of range
934 t = time.gmtime(sys.maxint)
934 t = time.gmtime(sys.maxint)
935 s = time.strftime(format, t)
935 s = time.strftime(format, t)
936 return s
936 return s
937
937
938 def shortdate(date=None):
938 def shortdate(date=None):
939 """turn (timestamp, tzoff) tuple into iso 8631 date."""
939 """turn (timestamp, tzoff) tuple into iso 8631 date."""
940 return datestr(date, format='%Y-%m-%d')
940 return datestr(date, format='%Y-%m-%d')
941
941
942 def strdate(string, format, defaults=[]):
942 def strdate(string, format, defaults=[]):
943 """parse a localized time string and return a (unixtime, offset) tuple.
943 """parse a localized time string and return a (unixtime, offset) tuple.
944 if the string cannot be parsed, ValueError is raised."""
944 if the string cannot be parsed, ValueError is raised."""
945 def timezone(string):
945 def timezone(string):
946 tz = string.split()[-1]
946 tz = string.split()[-1]
947 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
947 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
948 sign = (tz[0] == "+") and 1 or -1
948 sign = (tz[0] == "+") and 1 or -1
949 hours = int(tz[1:3])
949 hours = int(tz[1:3])
950 minutes = int(tz[3:5])
950 minutes = int(tz[3:5])
951 return -sign * (hours * 60 + minutes) * 60
951 return -sign * (hours * 60 + minutes) * 60
952 if tz == "GMT" or tz == "UTC":
952 if tz == "GMT" or tz == "UTC":
953 return 0
953 return 0
954 return None
954 return None
955
955
956 # NOTE: unixtime = localunixtime + offset
956 # NOTE: unixtime = localunixtime + offset
957 offset, date = timezone(string), string
957 offset, date = timezone(string), string
958 if offset is not None:
958 if offset is not None:
959 date = " ".join(string.split()[:-1])
959 date = " ".join(string.split()[:-1])
960
960
961 # add missing elements from defaults
961 # add missing elements from defaults
962 usenow = False # default to using biased defaults
962 usenow = False # default to using biased defaults
963 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
963 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
964 found = [True for p in part if ("%"+p) in format]
964 found = [True for p in part if ("%"+p) in format]
965 if not found:
965 if not found:
966 date += "@" + defaults[part][usenow]
966 date += "@" + defaults[part][usenow]
967 format += "@%" + part[0]
967 format += "@%" + part[0]
968 else:
968 else:
969 # We've found a specific time element, less specific time
969 # We've found a specific time element, less specific time
970 # elements are relative to today
970 # elements are relative to today
971 usenow = True
971 usenow = True
972
972
973 timetuple = time.strptime(date, format)
973 timetuple = time.strptime(date, format)
974 localunixtime = int(calendar.timegm(timetuple))
974 localunixtime = int(calendar.timegm(timetuple))
975 if offset is None:
975 if offset is None:
976 # local timezone
976 # local timezone
977 unixtime = int(time.mktime(timetuple))
977 unixtime = int(time.mktime(timetuple))
978 offset = unixtime - localunixtime
978 offset = unixtime - localunixtime
979 else:
979 else:
980 unixtime = localunixtime + offset
980 unixtime = localunixtime + offset
981 return unixtime, offset
981 return unixtime, offset
982
982
983 def parsedate(date, formats=None, bias={}):
983 def parsedate(date, formats=None, bias={}):
984 """parse a localized date/time and return a (unixtime, offset) tuple.
984 """parse a localized date/time and return a (unixtime, offset) tuple.
985
985
986 The date may be a "unixtime offset" string or in one of the specified
986 The date may be a "unixtime offset" string or in one of the specified
987 formats. If the date already is a (unixtime, offset) tuple, it is returned.
987 formats. If the date already is a (unixtime, offset) tuple, it is returned.
988 """
988 """
989 if not date:
989 if not date:
990 return 0, 0
990 return 0, 0
991 if isinstance(date, tuple) and len(date) == 2:
991 if isinstance(date, tuple) and len(date) == 2:
992 return date
992 return date
993 if not formats:
993 if not formats:
994 formats = defaultdateformats
994 formats = defaultdateformats
995 date = date.strip()
995 date = date.strip()
996 try:
996 try:
997 when, offset = map(int, date.split(' '))
997 when, offset = map(int, date.split(' '))
998 except ValueError:
998 except ValueError:
999 # fill out defaults
999 # fill out defaults
1000 now = makedate()
1000 now = makedate()
1001 defaults = {}
1001 defaults = {}
1002 for part in ("d", "mb", "yY", "HI", "M", "S"):
1002 for part in ("d", "mb", "yY", "HI", "M", "S"):
1003 # this piece is for rounding the specific end of unknowns
1003 # this piece is for rounding the specific end of unknowns
1004 b = bias.get(part)
1004 b = bias.get(part)
1005 if b is None:
1005 if b is None:
1006 if part[0] in "HMS":
1006 if part[0] in "HMS":
1007 b = "00"
1007 b = "00"
1008 else:
1008 else:
1009 b = "0"
1009 b = "0"
1010
1010
1011 # this piece is for matching the generic end to today's date
1011 # this piece is for matching the generic end to today's date
1012 n = datestr(now, "%" + part[0])
1012 n = datestr(now, "%" + part[0])
1013
1013
1014 defaults[part] = (b, n)
1014 defaults[part] = (b, n)
1015
1015
1016 for format in formats:
1016 for format in formats:
1017 try:
1017 try:
1018 when, offset = strdate(date, format, defaults)
1018 when, offset = strdate(date, format, defaults)
1019 except (ValueError, OverflowError):
1019 except (ValueError, OverflowError):
1020 pass
1020 pass
1021 else:
1021 else:
1022 break
1022 break
1023 else:
1023 else:
1024 raise Abort(_('invalid date: %r') % date)
1024 raise Abort(_('invalid date: %r') % date)
1025 # validate explicit (probably user-specified) date and
1025 # validate explicit (probably user-specified) date and
1026 # time zone offset. values must fit in signed 32 bits for
1026 # time zone offset. values must fit in signed 32 bits for
1027 # current 32-bit linux runtimes. timezones go from UTC-12
1027 # current 32-bit linux runtimes. timezones go from UTC-12
1028 # to UTC+14
1028 # to UTC+14
1029 if abs(when) > 0x7fffffff:
1029 if abs(when) > 0x7fffffff:
1030 raise Abort(_('date exceeds 32 bits: %d') % when)
1030 raise Abort(_('date exceeds 32 bits: %d') % when)
1031 if when < 0:
1031 if when < 0:
1032 raise Abort(_('negative date value: %d') % when)
1032 raise Abort(_('negative date value: %d') % when)
1033 if offset < -50400 or offset > 43200:
1033 if offset < -50400 or offset > 43200:
1034 raise Abort(_('impossible time zone offset: %d') % offset)
1034 raise Abort(_('impossible time zone offset: %d') % offset)
1035 return when, offset
1035 return when, offset
1036
1036
1037 def matchdate(date):
1037 def matchdate(date):
1038 """Return a function that matches a given date match specifier
1038 """Return a function that matches a given date match specifier
1039
1039
1040 Formats include:
1040 Formats include:
1041
1041
1042 '{date}' match a given date to the accuracy provided
1042 '{date}' match a given date to the accuracy provided
1043
1043
1044 '<{date}' on or before a given date
1044 '<{date}' on or before a given date
1045
1045
1046 '>{date}' on or after a given date
1046 '>{date}' on or after a given date
1047
1047
1048 >>> p1 = parsedate("10:29:59")
1048 >>> p1 = parsedate("10:29:59")
1049 >>> p2 = parsedate("10:30:00")
1049 >>> p2 = parsedate("10:30:00")
1050 >>> p3 = parsedate("10:30:59")
1050 >>> p3 = parsedate("10:30:59")
1051 >>> p4 = parsedate("10:31:00")
1051 >>> p4 = parsedate("10:31:00")
1052 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1052 >>> p5 = parsedate("Sep 15 10:30:00 1999")
1053 >>> f = matchdate("10:30")
1053 >>> f = matchdate("10:30")
1054 >>> f(p1[0])
1054 >>> f(p1[0])
1055 False
1055 False
1056 >>> f(p2[0])
1056 >>> f(p2[0])
1057 True
1057 True
1058 >>> f(p3[0])
1058 >>> f(p3[0])
1059 True
1059 True
1060 >>> f(p4[0])
1060 >>> f(p4[0])
1061 False
1061 False
1062 >>> f(p5[0])
1062 >>> f(p5[0])
1063 False
1063 False
1064 """
1064 """
1065
1065
1066 def lower(date):
1066 def lower(date):
1067 d = dict(mb="1", d="1")
1067 d = dict(mb="1", d="1")
1068 return parsedate(date, extendeddateformats, d)[0]
1068 return parsedate(date, extendeddateformats, d)[0]
1069
1069
1070 def upper(date):
1070 def upper(date):
1071 d = dict(mb="12", HI="23", M="59", S="59")
1071 d = dict(mb="12", HI="23", M="59", S="59")
1072 for days in ("31", "30", "29"):
1072 for days in ("31", "30", "29"):
1073 try:
1073 try:
1074 d["d"] = days
1074 d["d"] = days
1075 return parsedate(date, extendeddateformats, d)[0]
1075 return parsedate(date, extendeddateformats, d)[0]
1076 except:
1076 except:
1077 pass
1077 pass
1078 d["d"] = "28"
1078 d["d"] = "28"
1079 return parsedate(date, extendeddateformats, d)[0]
1079 return parsedate(date, extendeddateformats, d)[0]
1080
1080
1081 date = date.strip()
1081 date = date.strip()
1082
1082
1083 if not date:
1083 if not date:
1084 raise Abort(_("dates cannot consist entirely of whitespace"))
1084 raise Abort(_("dates cannot consist entirely of whitespace"))
1085 elif date[0] == "<":
1085 elif date[0] == "<":
1086 if not date[1:]:
1086 if not date[1:]:
1087 raise Abort(_("invalid day spec, use '<DATE'"))
1087 raise Abort(_("invalid day spec, use '<DATE'"))
1088 when = upper(date[1:])
1088 when = upper(date[1:])
1089 return lambda x: x <= when
1089 return lambda x: x <= when
1090 elif date[0] == ">":
1090 elif date[0] == ">":
1091 if not date[1:]:
1091 if not date[1:]:
1092 raise Abort(_("invalid day spec, use '>DATE'"))
1092 raise Abort(_("invalid day spec, use '>DATE'"))
1093 when = lower(date[1:])
1093 when = lower(date[1:])
1094 return lambda x: x >= when
1094 return lambda x: x >= when
1095 elif date[0] == "-":
1095 elif date[0] == "-":
1096 try:
1096 try:
1097 days = int(date[1:])
1097 days = int(date[1:])
1098 except ValueError:
1098 except ValueError:
1099 raise Abort(_("invalid day spec: %s") % date[1:])
1099 raise Abort(_("invalid day spec: %s") % date[1:])
1100 if days < 0:
1100 if days < 0:
1101 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1101 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1102 % date[1:])
1102 % date[1:])
1103 when = makedate()[0] - days * 3600 * 24
1103 when = makedate()[0] - days * 3600 * 24
1104 return lambda x: x >= when
1104 return lambda x: x >= when
1105 elif " to " in date:
1105 elif " to " in date:
1106 a, b = date.split(" to ")
1106 a, b = date.split(" to ")
1107 start, stop = lower(a), upper(b)
1107 start, stop = lower(a), upper(b)
1108 return lambda x: x >= start and x <= stop
1108 return lambda x: x >= start and x <= stop
1109 else:
1109 else:
1110 start, stop = lower(date), upper(date)
1110 start, stop = lower(date), upper(date)
1111 return lambda x: x >= start and x <= stop
1111 return lambda x: x >= start and x <= stop
1112
1112
1113 def shortuser(user):
1113 def shortuser(user):
1114 """Return a short representation of a user name or email address."""
1114 """Return a short representation of a user name or email address."""
1115 f = user.find('@')
1115 f = user.find('@')
1116 if f >= 0:
1116 if f >= 0:
1117 user = user[:f]
1117 user = user[:f]
1118 f = user.find('<')
1118 f = user.find('<')
1119 if f >= 0:
1119 if f >= 0:
1120 user = user[f + 1:]
1120 user = user[f + 1:]
1121 f = user.find(' ')
1121 f = user.find(' ')
1122 if f >= 0:
1122 if f >= 0:
1123 user = user[:f]
1123 user = user[:f]
1124 f = user.find('.')
1124 f = user.find('.')
1125 if f >= 0:
1125 if f >= 0:
1126 user = user[:f]
1126 user = user[:f]
1127 return user
1127 return user
1128
1128
1129 def email(author):
1129 def email(author):
1130 '''get email of author.'''
1130 '''get email of author.'''
1131 r = author.find('>')
1131 r = author.find('>')
1132 if r == -1:
1132 if r == -1:
1133 r = None
1133 r = None
1134 return author[author.find('<') + 1:r]
1134 return author[author.find('<') + 1:r]
1135
1135
1136 def _ellipsis(text, maxlength):
1136 def _ellipsis(text, maxlength):
1137 if len(text) <= maxlength:
1137 if len(text) <= maxlength:
1138 return text, False
1138 return text, False
1139 else:
1139 else:
1140 return "%s..." % (text[:maxlength - 3]), True
1140 return "%s..." % (text[:maxlength - 3]), True
1141
1141
1142 def ellipsis(text, maxlength=400):
1142 def ellipsis(text, maxlength=400):
1143 """Trim string to at most maxlength (default: 400) characters."""
1143 """Trim string to at most maxlength (default: 400) characters."""
1144 try:
1144 try:
1145 # use unicode not to split at intermediate multi-byte sequence
1145 # use unicode not to split at intermediate multi-byte sequence
1146 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1146 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1147 maxlength)
1147 maxlength)
1148 if not truncated:
1148 if not truncated:
1149 return text
1149 return text
1150 return utext.encode(encoding.encoding)
1150 return utext.encode(encoding.encoding)
1151 except (UnicodeDecodeError, UnicodeEncodeError):
1151 except (UnicodeDecodeError, UnicodeEncodeError):
1152 return _ellipsis(text, maxlength)[0]
1152 return _ellipsis(text, maxlength)[0]
1153
1153
1154 def bytecount(nbytes):
1154 def bytecount(nbytes):
1155 '''return byte count formatted as readable string, with units'''
1155 '''return byte count formatted as readable string, with units'''
1156
1156
1157 units = (
1157 units = (
1158 (100, 1 << 30, _('%.0f GB')),
1158 (100, 1 << 30, _('%.0f GB')),
1159 (10, 1 << 30, _('%.1f GB')),
1159 (10, 1 << 30, _('%.1f GB')),
1160 (1, 1 << 30, _('%.2f GB')),
1160 (1, 1 << 30, _('%.2f GB')),
1161 (100, 1 << 20, _('%.0f MB')),
1161 (100, 1 << 20, _('%.0f MB')),
1162 (10, 1 << 20, _('%.1f MB')),
1162 (10, 1 << 20, _('%.1f MB')),
1163 (1, 1 << 20, _('%.2f MB')),
1163 (1, 1 << 20, _('%.2f MB')),
1164 (100, 1 << 10, _('%.0f KB')),
1164 (100, 1 << 10, _('%.0f KB')),
1165 (10, 1 << 10, _('%.1f KB')),
1165 (10, 1 << 10, _('%.1f KB')),
1166 (1, 1 << 10, _('%.2f KB')),
1166 (1, 1 << 10, _('%.2f KB')),
1167 (1, 1, _('%.0f bytes')),
1167 (1, 1, _('%.0f bytes')),
1168 )
1168 )
1169
1169
1170 for multiplier, divisor, format in units:
1170 for multiplier, divisor, format in units:
1171 if nbytes >= divisor * multiplier:
1171 if nbytes >= divisor * multiplier:
1172 return format % (nbytes / float(divisor))
1172 return format % (nbytes / float(divisor))
1173 return units[-1][2] % nbytes
1173 return units[-1][2] % nbytes
1174
1174
1175 def uirepr(s):
1175 def uirepr(s):
1176 # Avoid double backslash in Windows path repr()
1176 # Avoid double backslash in Windows path repr()
1177 return repr(s).replace('\\\\', '\\')
1177 return repr(s).replace('\\\\', '\\')
1178
1178
1179 # delay import of textwrap
1179 # delay import of textwrap
1180 def MBTextWrapper(**kwargs):
1180 def MBTextWrapper(**kwargs):
1181 class tw(textwrap.TextWrapper):
1181 class tw(textwrap.TextWrapper):
1182 """
1182 """
1183 Extend TextWrapper for width-awareness.
1183 Extend TextWrapper for width-awareness.
1184
1184
1185 Neither number of 'bytes' in any encoding nor 'characters' is
1185 Neither number of 'bytes' in any encoding nor 'characters' is
1186 appropriate to calculate terminal columns for specified string.
1186 appropriate to calculate terminal columns for specified string.
1187
1187
1188 Original TextWrapper implementation uses built-in 'len()' directly,
1188 Original TextWrapper implementation uses built-in 'len()' directly,
1189 so overriding is needed to use width information of each characters.
1189 so overriding is needed to use width information of each characters.
1190
1190
1191 In addition, characters classified into 'ambiguous' width are
1191 In addition, characters classified into 'ambiguous' width are
1192 treated as wide in east asian area, but as narrow in other.
1192 treated as wide in east asian area, but as narrow in other.
1193
1193
1194 This requires use decision to determine width of such characters.
1194 This requires use decision to determine width of such characters.
1195 """
1195 """
1196 def __init__(self, **kwargs):
1196 def __init__(self, **kwargs):
1197 textwrap.TextWrapper.__init__(self, **kwargs)
1197 textwrap.TextWrapper.__init__(self, **kwargs)
1198
1198
1199 # for compatibility between 2.4 and 2.6
1199 # for compatibility between 2.4 and 2.6
1200 if getattr(self, 'drop_whitespace', None) is None:
1200 if getattr(self, 'drop_whitespace', None) is None:
1201 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1201 self.drop_whitespace = kwargs.get('drop_whitespace', True)
1202
1202
1203 def _cutdown(self, ucstr, space_left):
1203 def _cutdown(self, ucstr, space_left):
1204 l = 0
1204 l = 0
1205 colwidth = encoding.ucolwidth
1205 colwidth = encoding.ucolwidth
1206 for i in xrange(len(ucstr)):
1206 for i in xrange(len(ucstr)):
1207 l += colwidth(ucstr[i])
1207 l += colwidth(ucstr[i])
1208 if space_left < l:
1208 if space_left < l:
1209 return (ucstr[:i], ucstr[i:])
1209 return (ucstr[:i], ucstr[i:])
1210 return ucstr, ''
1210 return ucstr, ''
1211
1211
1212 # overriding of base class
1212 # overriding of base class
1213 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1213 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1214 space_left = max(width - cur_len, 1)
1214 space_left = max(width - cur_len, 1)
1215
1215
1216 if self.break_long_words:
1216 if self.break_long_words:
1217 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1217 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1218 cur_line.append(cut)
1218 cur_line.append(cut)
1219 reversed_chunks[-1] = res
1219 reversed_chunks[-1] = res
1220 elif not cur_line:
1220 elif not cur_line:
1221 cur_line.append(reversed_chunks.pop())
1221 cur_line.append(reversed_chunks.pop())
1222
1222
1223 # this overriding code is imported from TextWrapper of python 2.6
1223 # this overriding code is imported from TextWrapper of python 2.6
1224 # to calculate columns of string by 'encoding.ucolwidth()'
1224 # to calculate columns of string by 'encoding.ucolwidth()'
1225 def _wrap_chunks(self, chunks):
1225 def _wrap_chunks(self, chunks):
1226 colwidth = encoding.ucolwidth
1226 colwidth = encoding.ucolwidth
1227
1227
1228 lines = []
1228 lines = []
1229 if self.width <= 0:
1229 if self.width <= 0:
1230 raise ValueError("invalid width %r (must be > 0)" % self.width)
1230 raise ValueError("invalid width %r (must be > 0)" % self.width)
1231
1231
1232 # Arrange in reverse order so items can be efficiently popped
1232 # Arrange in reverse order so items can be efficiently popped
1233 # from a stack of chucks.
1233 # from a stack of chucks.
1234 chunks.reverse()
1234 chunks.reverse()
1235
1235
1236 while chunks:
1236 while chunks:
1237
1237
1238 # Start the list of chunks that will make up the current line.
1238 # Start the list of chunks that will make up the current line.
1239 # cur_len is just the length of all the chunks in cur_line.
1239 # cur_len is just the length of all the chunks in cur_line.
1240 cur_line = []
1240 cur_line = []
1241 cur_len = 0
1241 cur_len = 0
1242
1242
1243 # Figure out which static string will prefix this line.
1243 # Figure out which static string will prefix this line.
1244 if lines:
1244 if lines:
1245 indent = self.subsequent_indent
1245 indent = self.subsequent_indent
1246 else:
1246 else:
1247 indent = self.initial_indent
1247 indent = self.initial_indent
1248
1248
1249 # Maximum width for this line.
1249 # Maximum width for this line.
1250 width = self.width - len(indent)
1250 width = self.width - len(indent)
1251
1251
1252 # First chunk on line is whitespace -- drop it, unless this
1252 # First chunk on line is whitespace -- drop it, unless this
1253 # is the very beginning of the text (ie. no lines started yet).
1253 # is the very beginning of the text (ie. no lines started yet).
1254 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1254 if self.drop_whitespace and chunks[-1].strip() == '' and lines:
1255 del chunks[-1]
1255 del chunks[-1]
1256
1256
1257 while chunks:
1257 while chunks:
1258 l = colwidth(chunks[-1])
1258 l = colwidth(chunks[-1])
1259
1259
1260 # Can at least squeeze this chunk onto the current line.
1260 # Can at least squeeze this chunk onto the current line.
1261 if cur_len + l <= width:
1261 if cur_len + l <= width:
1262 cur_line.append(chunks.pop())
1262 cur_line.append(chunks.pop())
1263 cur_len += l
1263 cur_len += l
1264
1264
1265 # Nope, this line is full.
1265 # Nope, this line is full.
1266 else:
1266 else:
1267 break
1267 break
1268
1268
1269 # The current line is full, and the next chunk is too big to
1269 # The current line is full, and the next chunk is too big to
1270 # fit on *any* line (not just this one).
1270 # fit on *any* line (not just this one).
1271 if chunks and colwidth(chunks[-1]) > width:
1271 if chunks and colwidth(chunks[-1]) > width:
1272 self._handle_long_word(chunks, cur_line, cur_len, width)
1272 self._handle_long_word(chunks, cur_line, cur_len, width)
1273
1273
1274 # If the last chunk on this line is all whitespace, drop it.
1274 # If the last chunk on this line is all whitespace, drop it.
1275 if (self.drop_whitespace and
1275 if (self.drop_whitespace and
1276 cur_line and cur_line[-1].strip() == ''):
1276 cur_line and cur_line[-1].strip() == ''):
1277 del cur_line[-1]
1277 del cur_line[-1]
1278
1278
1279 # Convert current line back to a string and store it in list
1279 # Convert current line back to a string and store it in list
1280 # of all lines (return value).
1280 # of all lines (return value).
1281 if cur_line:
1281 if cur_line:
1282 lines.append(indent + ''.join(cur_line))
1282 lines.append(indent + ''.join(cur_line))
1283
1283
1284 return lines
1284 return lines
1285
1285
1286 global MBTextWrapper
1286 global MBTextWrapper
1287 MBTextWrapper = tw
1287 MBTextWrapper = tw
1288 return tw(**kwargs)
1288 return tw(**kwargs)
1289
1289
1290 def wrap(line, width, initindent='', hangindent=''):
1290 def wrap(line, width, initindent='', hangindent=''):
1291 maxindent = max(len(hangindent), len(initindent))
1291 maxindent = max(len(hangindent), len(initindent))
1292 if width <= maxindent:
1292 if width <= maxindent:
1293 # adjust for weird terminal size
1293 # adjust for weird terminal size
1294 width = max(78, maxindent + 1)
1294 width = max(78, maxindent + 1)
1295 line = line.decode(encoding.encoding, encoding.encodingmode)
1295 line = line.decode(encoding.encoding, encoding.encodingmode)
1296 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1296 initindent = initindent.decode(encoding.encoding, encoding.encodingmode)
1297 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1297 hangindent = hangindent.decode(encoding.encoding, encoding.encodingmode)
1298 wrapper = MBTextWrapper(width=width,
1298 wrapper = MBTextWrapper(width=width,
1299 initial_indent=initindent,
1299 initial_indent=initindent,
1300 subsequent_indent=hangindent)
1300 subsequent_indent=hangindent)
1301 return wrapper.fill(line).encode(encoding.encoding)
1301 return wrapper.fill(line).encode(encoding.encoding)
1302
1302
1303 def iterlines(iterator):
1303 def iterlines(iterator):
1304 for chunk in iterator:
1304 for chunk in iterator:
1305 for line in chunk.splitlines():
1305 for line in chunk.splitlines():
1306 yield line
1306 yield line
1307
1307
1308 def expandpath(path):
1308 def expandpath(path):
1309 return os.path.expanduser(os.path.expandvars(path))
1309 return os.path.expanduser(os.path.expandvars(path))
1310
1310
1311 def hgcmd():
1311 def hgcmd():
1312 """Return the command used to execute current hg
1312 """Return the command used to execute current hg
1313
1313
1314 This is different from hgexecutable() because on Windows we want
1314 This is different from hgexecutable() because on Windows we want
1315 to avoid things opening new shell windows like batch files, so we
1315 to avoid things opening new shell windows like batch files, so we
1316 get either the python call or current executable.
1316 get either the python call or current executable.
1317 """
1317 """
1318 if mainfrozen():
1318 if mainfrozen():
1319 return [sys.executable]
1319 return [sys.executable]
1320 return gethgcmd()
1320 return gethgcmd()
1321
1321
1322 def rundetached(args, condfn):
1322 def rundetached(args, condfn):
1323 """Execute the argument list in a detached process.
1323 """Execute the argument list in a detached process.
1324
1324
1325 condfn is a callable which is called repeatedly and should return
1325 condfn is a callable which is called repeatedly and should return
1326 True once the child process is known to have started successfully.
1326 True once the child process is known to have started successfully.
1327 At this point, the child process PID is returned. If the child
1327 At this point, the child process PID is returned. If the child
1328 process fails to start or finishes before condfn() evaluates to
1328 process fails to start or finishes before condfn() evaluates to
1329 True, return -1.
1329 True, return -1.
1330 """
1330 """
1331 # Windows case is easier because the child process is either
1331 # Windows case is easier because the child process is either
1332 # successfully starting and validating the condition or exiting
1332 # successfully starting and validating the condition or exiting
1333 # on failure. We just poll on its PID. On Unix, if the child
1333 # on failure. We just poll on its PID. On Unix, if the child
1334 # process fails to start, it will be left in a zombie state until
1334 # process fails to start, it will be left in a zombie state until
1335 # the parent wait on it, which we cannot do since we expect a long
1335 # the parent wait on it, which we cannot do since we expect a long
1336 # running process on success. Instead we listen for SIGCHLD telling
1336 # running process on success. Instead we listen for SIGCHLD telling
1337 # us our child process terminated.
1337 # us our child process terminated.
1338 terminated = set()
1338 terminated = set()
1339 def handler(signum, frame):
1339 def handler(signum, frame):
1340 terminated.add(os.wait())
1340 terminated.add(os.wait())
1341 prevhandler = None
1341 prevhandler = None
1342 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1342 SIGCHLD = getattr(signal, 'SIGCHLD', None)
1343 if SIGCHLD is not None:
1343 if SIGCHLD is not None:
1344 prevhandler = signal.signal(SIGCHLD, handler)
1344 prevhandler = signal.signal(SIGCHLD, handler)
1345 try:
1345 try:
1346 pid = spawndetached(args)
1346 pid = spawndetached(args)
1347 while not condfn():
1347 while not condfn():
1348 if ((pid in terminated or not testpid(pid))
1348 if ((pid in terminated or not testpid(pid))
1349 and not condfn()):
1349 and not condfn()):
1350 return -1
1350 return -1
1351 time.sleep(0.1)
1351 time.sleep(0.1)
1352 return pid
1352 return pid
1353 finally:
1353 finally:
1354 if prevhandler is not None:
1354 if prevhandler is not None:
1355 signal.signal(signal.SIGCHLD, prevhandler)
1355 signal.signal(signal.SIGCHLD, prevhandler)
1356
1356
1357 try:
1357 try:
1358 any, all = any, all
1358 any, all = any, all
1359 except NameError:
1359 except NameError:
1360 def any(iterable):
1360 def any(iterable):
1361 for i in iterable:
1361 for i in iterable:
1362 if i:
1362 if i:
1363 return True
1363 return True
1364 return False
1364 return False
1365
1365
1366 def all(iterable):
1366 def all(iterable):
1367 for i in iterable:
1367 for i in iterable:
1368 if not i:
1368 if not i:
1369 return False
1369 return False
1370 return True
1370 return True
1371
1371
1372 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1372 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1373 """Return the result of interpolating items in the mapping into string s.
1373 """Return the result of interpolating items in the mapping into string s.
1374
1374
1375 prefix is a single character string, or a two character string with
1375 prefix is a single character string, or a two character string with
1376 a backslash as the first character if the prefix needs to be escaped in
1376 a backslash as the first character if the prefix needs to be escaped in
1377 a regular expression.
1377 a regular expression.
1378
1378
1379 fn is an optional function that will be applied to the replacement text
1379 fn is an optional function that will be applied to the replacement text
1380 just before replacement.
1380 just before replacement.
1381
1381
1382 escape_prefix is an optional flag that allows using doubled prefix for
1382 escape_prefix is an optional flag that allows using doubled prefix for
1383 its escaping.
1383 its escaping.
1384 """
1384 """
1385 fn = fn or (lambda s: s)
1385 fn = fn or (lambda s: s)
1386 patterns = '|'.join(mapping.keys())
1386 patterns = '|'.join(mapping.keys())
1387 if escape_prefix:
1387 if escape_prefix:
1388 patterns += '|' + prefix
1388 patterns += '|' + prefix
1389 if len(prefix) > 1:
1389 if len(prefix) > 1:
1390 prefix_char = prefix[1:]
1390 prefix_char = prefix[1:]
1391 else:
1391 else:
1392 prefix_char = prefix
1392 prefix_char = prefix
1393 mapping[prefix_char] = prefix_char
1393 mapping[prefix_char] = prefix_char
1394 r = re.compile(r'%s(%s)' % (prefix, patterns))
1394 r = re.compile(r'%s(%s)' % (prefix, patterns))
1395 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1395 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1396
1396
1397 def getport(port):
1397 def getport(port):
1398 """Return the port for a given network service.
1398 """Return the port for a given network service.
1399
1399
1400 If port is an integer, it's returned as is. If it's a string, it's
1400 If port is an integer, it's returned as is. If it's a string, it's
1401 looked up using socket.getservbyname(). If there's no matching
1401 looked up using socket.getservbyname(). If there's no matching
1402 service, util.Abort is raised.
1402 service, util.Abort is raised.
1403 """
1403 """
1404 try:
1404 try:
1405 return int(port)
1405 return int(port)
1406 except ValueError:
1406 except ValueError:
1407 pass
1407 pass
1408
1408
1409 try:
1409 try:
1410 return socket.getservbyname(port)
1410 return socket.getservbyname(port)
1411 except socket.error:
1411 except socket.error:
1412 raise Abort(_("no port number associated with service '%s'") % port)
1412 raise Abort(_("no port number associated with service '%s'") % port)
1413
1413
1414 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1414 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1415 '0': False, 'no': False, 'false': False, 'off': False,
1415 '0': False, 'no': False, 'false': False, 'off': False,
1416 'never': False}
1416 'never': False}
1417
1417
1418 def parsebool(s):
1418 def parsebool(s):
1419 """Parse s into a boolean.
1419 """Parse s into a boolean.
1420
1420
1421 If s is not a valid boolean, returns None.
1421 If s is not a valid boolean, returns None.
1422 """
1422 """
1423 return _booleans.get(s.lower(), None)
1423 return _booleans.get(s.lower(), None)
1424
1424
1425 _hexdig = '0123456789ABCDEFabcdef'
1425 _hexdig = '0123456789ABCDEFabcdef'
1426 _hextochr = dict((a + b, chr(int(a + b, 16)))
1426 _hextochr = dict((a + b, chr(int(a + b, 16)))
1427 for a in _hexdig for b in _hexdig)
1427 for a in _hexdig for b in _hexdig)
1428
1428
1429 def _urlunquote(s):
1429 def _urlunquote(s):
1430 """unquote('abc%20def') -> 'abc def'."""
1430 """unquote('abc%20def') -> 'abc def'."""
1431 res = s.split('%')
1431 res = s.split('%')
1432 # fastpath
1432 # fastpath
1433 if len(res) == 1:
1433 if len(res) == 1:
1434 return s
1434 return s
1435 s = res[0]
1435 s = res[0]
1436 for item in res[1:]:
1436 for item in res[1:]:
1437 try:
1437 try:
1438 s += _hextochr[item[:2]] + item[2:]
1438 s += _hextochr[item[:2]] + item[2:]
1439 except KeyError:
1439 except KeyError:
1440 s += '%' + item
1440 s += '%' + item
1441 except UnicodeDecodeError:
1441 except UnicodeDecodeError:
1442 s += unichr(int(item[:2], 16)) + item[2:]
1442 s += unichr(int(item[:2], 16)) + item[2:]
1443 return s
1443 return s
1444
1444
1445 class url(object):
1445 class url(object):
1446 r"""Reliable URL parser.
1446 r"""Reliable URL parser.
1447
1447
1448 This parses URLs and provides attributes for the following
1448 This parses URLs and provides attributes for the following
1449 components:
1449 components:
1450
1450
1451 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1451 <scheme>://<user>:<passwd>@<host>:<port>/<path>?<query>#<fragment>
1452
1452
1453 Missing components are set to None. The only exception is
1453 Missing components are set to None. The only exception is
1454 fragment, which is set to '' if present but empty.
1454 fragment, which is set to '' if present but empty.
1455
1455
1456 If parsefragment is False, fragment is included in query. If
1456 If parsefragment is False, fragment is included in query. If
1457 parsequery is False, query is included in path. If both are
1457 parsequery is False, query is included in path. If both are
1458 False, both fragment and query are included in path.
1458 False, both fragment and query are included in path.
1459
1459
1460 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1460 See http://www.ietf.org/rfc/rfc2396.txt for more information.
1461
1461
1462 Note that for backward compatibility reasons, bundle URLs do not
1462 Note that for backward compatibility reasons, bundle URLs do not
1463 take host names. That means 'bundle://../' has a path of '../'.
1463 take host names. That means 'bundle://../' has a path of '../'.
1464
1464
1465 Examples:
1465 Examples:
1466
1466
1467 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1467 >>> url('http://www.ietf.org/rfc/rfc2396.txt')
1468 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1468 <url scheme: 'http', host: 'www.ietf.org', path: 'rfc/rfc2396.txt'>
1469 >>> url('ssh://[::1]:2200//home/joe/repo')
1469 >>> url('ssh://[::1]:2200//home/joe/repo')
1470 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1470 <url scheme: 'ssh', host: '[::1]', port: '2200', path: '/home/joe/repo'>
1471 >>> url('file:///home/joe/repo')
1471 >>> url('file:///home/joe/repo')
1472 <url scheme: 'file', path: '/home/joe/repo'>
1472 <url scheme: 'file', path: '/home/joe/repo'>
1473 >>> url('file:///c:/temp/foo/')
1473 >>> url('file:///c:/temp/foo/')
1474 <url scheme: 'file', path: 'c:/temp/foo/'>
1474 <url scheme: 'file', path: 'c:/temp/foo/'>
1475 >>> url('bundle:foo')
1475 >>> url('bundle:foo')
1476 <url scheme: 'bundle', path: 'foo'>
1476 <url scheme: 'bundle', path: 'foo'>
1477 >>> url('bundle://../foo')
1477 >>> url('bundle://../foo')
1478 <url scheme: 'bundle', path: '../foo'>
1478 <url scheme: 'bundle', path: '../foo'>
1479 >>> url(r'c:\foo\bar')
1479 >>> url(r'c:\foo\bar')
1480 <url path: 'c:\\foo\\bar'>
1480 <url path: 'c:\\foo\\bar'>
1481 >>> url(r'\\blah\blah\blah')
1481 >>> url(r'\\blah\blah\blah')
1482 <url path: '\\\\blah\\blah\\blah'>
1482 <url path: '\\\\blah\\blah\\blah'>
1483 >>> url(r'\\blah\blah\blah#baz')
1483 >>> url(r'\\blah\blah\blah#baz')
1484 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1484 <url path: '\\\\blah\\blah\\blah', fragment: 'baz'>
1485
1485
1486 Authentication credentials:
1486 Authentication credentials:
1487
1487
1488 >>> url('ssh://joe:xyz@x/repo')
1488 >>> url('ssh://joe:xyz@x/repo')
1489 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1489 <url scheme: 'ssh', user: 'joe', passwd: 'xyz', host: 'x', path: 'repo'>
1490 >>> url('ssh://joe@x/repo')
1490 >>> url('ssh://joe@x/repo')
1491 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1491 <url scheme: 'ssh', user: 'joe', host: 'x', path: 'repo'>
1492
1492
1493 Query strings and fragments:
1493 Query strings and fragments:
1494
1494
1495 >>> url('http://host/a?b#c')
1495 >>> url('http://host/a?b#c')
1496 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1496 <url scheme: 'http', host: 'host', path: 'a', query: 'b', fragment: 'c'>
1497 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1497 >>> url('http://host/a?b#c', parsequery=False, parsefragment=False)
1498 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1498 <url scheme: 'http', host: 'host', path: 'a?b#c'>
1499 """
1499 """
1500
1500
1501 _safechars = "!~*'()+"
1501 _safechars = "!~*'()+"
1502 _safepchars = "/!~*'()+"
1502 _safepchars = "/!~*'()+"
1503 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1503 _matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
1504
1504
1505 def __init__(self, path, parsequery=True, parsefragment=True):
1505 def __init__(self, path, parsequery=True, parsefragment=True):
1506 # We slowly chomp away at path until we have only the path left
1506 # We slowly chomp away at path until we have only the path left
1507 self.scheme = self.user = self.passwd = self.host = None
1507 self.scheme = self.user = self.passwd = self.host = None
1508 self.port = self.path = self.query = self.fragment = None
1508 self.port = self.path = self.query = self.fragment = None
1509 self._localpath = True
1509 self._localpath = True
1510 self._hostport = ''
1510 self._hostport = ''
1511 self._origpath = path
1511 self._origpath = path
1512
1512
1513 if parsefragment and '#' in path:
1513 if parsefragment and '#' in path:
1514 path, self.fragment = path.split('#', 1)
1514 path, self.fragment = path.split('#', 1)
1515 if not path:
1515 if not path:
1516 path = None
1516 path = None
1517
1517
1518 # special case for Windows drive letters and UNC paths
1518 # special case for Windows drive letters and UNC paths
1519 if hasdriveletter(path) or path.startswith(r'\\'):
1519 if hasdriveletter(path) or path.startswith(r'\\'):
1520 self.path = path
1520 self.path = path
1521 return
1521 return
1522
1522
1523 # For compatibility reasons, we can't handle bundle paths as
1523 # For compatibility reasons, we can't handle bundle paths as
1524 # normal URLS
1524 # normal URLS
1525 if path.startswith('bundle:'):
1525 if path.startswith('bundle:'):
1526 self.scheme = 'bundle'
1526 self.scheme = 'bundle'
1527 path = path[7:]
1527 path = path[7:]
1528 if path.startswith('//'):
1528 if path.startswith('//'):
1529 path = path[2:]
1529 path = path[2:]
1530 self.path = path
1530 self.path = path
1531 return
1531 return
1532
1532
1533 if self._matchscheme(path):
1533 if self._matchscheme(path):
1534 parts = path.split(':', 1)
1534 parts = path.split(':', 1)
1535 if parts[0]:
1535 if parts[0]:
1536 self.scheme, path = parts
1536 self.scheme, path = parts
1537 self._localpath = False
1537 self._localpath = False
1538
1538
1539 if not path:
1539 if not path:
1540 path = None
1540 path = None
1541 if self._localpath:
1541 if self._localpath:
1542 self.path = ''
1542 self.path = ''
1543 return
1543 return
1544 else:
1544 else:
1545 if self._localpath:
1545 if self._localpath:
1546 self.path = path
1546 self.path = path
1547 return
1547 return
1548
1548
1549 if parsequery and '?' in path:
1549 if parsequery and '?' in path:
1550 path, self.query = path.split('?', 1)
1550 path, self.query = path.split('?', 1)
1551 if not path:
1551 if not path:
1552 path = None
1552 path = None
1553 if not self.query:
1553 if not self.query:
1554 self.query = None
1554 self.query = None
1555
1555
1556 # // is required to specify a host/authority
1556 # // is required to specify a host/authority
1557 if path and path.startswith('//'):
1557 if path and path.startswith('//'):
1558 parts = path[2:].split('/', 1)
1558 parts = path[2:].split('/', 1)
1559 if len(parts) > 1:
1559 if len(parts) > 1:
1560 self.host, path = parts
1560 self.host, path = parts
1561 path = path
1561 path = path
1562 else:
1562 else:
1563 self.host = parts[0]
1563 self.host = parts[0]
1564 path = None
1564 path = None
1565 if not self.host:
1565 if not self.host:
1566 self.host = None
1566 self.host = None
1567 # path of file:///d is /d
1567 # path of file:///d is /d
1568 # path of file:///d:/ is d:/, not /d:/
1568 # path of file:///d:/ is d:/, not /d:/
1569 if path and not hasdriveletter(path):
1569 if path and not hasdriveletter(path):
1570 path = '/' + path
1570 path = '/' + path
1571
1571
1572 if self.host and '@' in self.host:
1572 if self.host and '@' in self.host:
1573 self.user, self.host = self.host.rsplit('@', 1)
1573 self.user, self.host = self.host.rsplit('@', 1)
1574 if ':' in self.user:
1574 if ':' in self.user:
1575 self.user, self.passwd = self.user.split(':', 1)
1575 self.user, self.passwd = self.user.split(':', 1)
1576 if not self.host:
1576 if not self.host:
1577 self.host = None
1577 self.host = None
1578
1578
1579 # Don't split on colons in IPv6 addresses without ports
1579 # Don't split on colons in IPv6 addresses without ports
1580 if (self.host and ':' in self.host and
1580 if (self.host and ':' in self.host and
1581 not (self.host.startswith('[') and self.host.endswith(']'))):
1581 not (self.host.startswith('[') and self.host.endswith(']'))):
1582 self._hostport = self.host
1582 self._hostport = self.host
1583 self.host, self.port = self.host.rsplit(':', 1)
1583 self.host, self.port = self.host.rsplit(':', 1)
1584 if not self.host:
1584 if not self.host:
1585 self.host = None
1585 self.host = None
1586
1586
1587 if (self.host and self.scheme == 'file' and
1587 if (self.host and self.scheme == 'file' and
1588 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1588 self.host not in ('localhost', '127.0.0.1', '[::1]')):
1589 raise Abort(_('file:// URLs can only refer to localhost'))
1589 raise Abort(_('file:// URLs can only refer to localhost'))
1590
1590
1591 self.path = path
1591 self.path = path
1592
1592
1593 # leave the query string escaped
1593 # leave the query string escaped
1594 for a in ('user', 'passwd', 'host', 'port',
1594 for a in ('user', 'passwd', 'host', 'port',
1595 'path', 'fragment'):
1595 'path', 'fragment'):
1596 v = getattr(self, a)
1596 v = getattr(self, a)
1597 if v is not None:
1597 if v is not None:
1598 setattr(self, a, _urlunquote(v))
1598 setattr(self, a, _urlunquote(v))
1599
1599
1600 def __repr__(self):
1600 def __repr__(self):
1601 attrs = []
1601 attrs = []
1602 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1602 for a in ('scheme', 'user', 'passwd', 'host', 'port', 'path',
1603 'query', 'fragment'):
1603 'query', 'fragment'):
1604 v = getattr(self, a)
1604 v = getattr(self, a)
1605 if v is not None:
1605 if v is not None:
1606 attrs.append('%s: %r' % (a, v))
1606 attrs.append('%s: %r' % (a, v))
1607 return '<url %s>' % ', '.join(attrs)
1607 return '<url %s>' % ', '.join(attrs)
1608
1608
1609 def __str__(self):
1609 def __str__(self):
1610 r"""Join the URL's components back into a URL string.
1610 r"""Join the URL's components back into a URL string.
1611
1611
1612 Examples:
1612 Examples:
1613
1613
1614 >>> str(url('http://user:pw@host:80/?foo#bar'))
1614 >>> str(url('http://user:pw@host:80/?foo#bar'))
1615 'http://user:pw@host:80/?foo#bar'
1615 'http://user:pw@host:80/?foo#bar'
1616 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1616 >>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
1617 'http://user:pw@host:80/?foo=bar&baz=42'
1617 'http://user:pw@host:80/?foo=bar&baz=42'
1618 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1618 >>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
1619 'http://user:pw@host:80/?foo=bar%3dbaz'
1619 'http://user:pw@host:80/?foo=bar%3dbaz'
1620 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1620 >>> str(url('ssh://user:pw@[::1]:2200//home/joe#'))
1621 'ssh://user:pw@[::1]:2200//home/joe#'
1621 'ssh://user:pw@[::1]:2200//home/joe#'
1622 >>> str(url('http://localhost:80//'))
1622 >>> str(url('http://localhost:80//'))
1623 'http://localhost:80//'
1623 'http://localhost:80//'
1624 >>> str(url('http://localhost:80/'))
1624 >>> str(url('http://localhost:80/'))
1625 'http://localhost:80/'
1625 'http://localhost:80/'
1626 >>> str(url('http://localhost:80'))
1626 >>> str(url('http://localhost:80'))
1627 'http://localhost:80/'
1627 'http://localhost:80/'
1628 >>> str(url('bundle:foo'))
1628 >>> str(url('bundle:foo'))
1629 'bundle:foo'
1629 'bundle:foo'
1630 >>> str(url('bundle://../foo'))
1630 >>> str(url('bundle://../foo'))
1631 'bundle:../foo'
1631 'bundle:../foo'
1632 >>> str(url('path'))
1632 >>> str(url('path'))
1633 'path'
1633 'path'
1634 >>> str(url('file:///tmp/foo/bar'))
1634 >>> str(url('file:///tmp/foo/bar'))
1635 'file:///tmp/foo/bar'
1635 'file:///tmp/foo/bar'
1636 >>> str(url('file:///c:/tmp/foo/bar'))
1636 >>> str(url('file:///c:/tmp/foo/bar'))
1637 'file:///c%3A/tmp/foo/bar'
1637 'file:///c%3A/tmp/foo/bar'
1638 >>> print url(r'bundle:foo\bar')
1638 >>> print url(r'bundle:foo\bar')
1639 bundle:foo\bar
1639 bundle:foo\bar
1640 """
1640 """
1641 if self._localpath:
1641 if self._localpath:
1642 s = self.path
1642 s = self.path
1643 if self.scheme == 'bundle':
1643 if self.scheme == 'bundle':
1644 s = 'bundle:' + s
1644 s = 'bundle:' + s
1645 if self.fragment:
1645 if self.fragment:
1646 s += '#' + self.fragment
1646 s += '#' + self.fragment
1647 return s
1647 return s
1648
1648
1649 s = self.scheme + ':'
1649 s = self.scheme + ':'
1650 if self.user or self.passwd or self.host:
1650 if self.user or self.passwd or self.host:
1651 s += '//'
1651 s += '//'
1652 elif self.scheme and (not self.path or self.path.startswith('/')
1652 elif self.scheme and (not self.path or self.path.startswith('/')
1653 or hasdriveletter(self.path)):
1653 or hasdriveletter(self.path)):
1654 s += '//'
1654 s += '//'
1655 if hasdriveletter(self.path):
1655 if hasdriveletter(self.path):
1656 s += '/'
1656 s += '/'
1657 if self.user:
1657 if self.user:
1658 s += urllib.quote(self.user, safe=self._safechars)
1658 s += urllib.quote(self.user, safe=self._safechars)
1659 if self.passwd:
1659 if self.passwd:
1660 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1660 s += ':' + urllib.quote(self.passwd, safe=self._safechars)
1661 if self.user or self.passwd:
1661 if self.user or self.passwd:
1662 s += '@'
1662 s += '@'
1663 if self.host:
1663 if self.host:
1664 if not (self.host.startswith('[') and self.host.endswith(']')):
1664 if not (self.host.startswith('[') and self.host.endswith(']')):
1665 s += urllib.quote(self.host)
1665 s += urllib.quote(self.host)
1666 else:
1666 else:
1667 s += self.host
1667 s += self.host
1668 if self.port:
1668 if self.port:
1669 s += ':' + urllib.quote(self.port)
1669 s += ':' + urllib.quote(self.port)
1670 if self.host:
1670 if self.host:
1671 s += '/'
1671 s += '/'
1672 if self.path:
1672 if self.path:
1673 # TODO: similar to the query string, we should not unescape the
1673 # TODO: similar to the query string, we should not unescape the
1674 # path when we store it, the path might contain '%2f' = '/',
1674 # path when we store it, the path might contain '%2f' = '/',
1675 # which we should *not* escape.
1675 # which we should *not* escape.
1676 s += urllib.quote(self.path, safe=self._safepchars)
1676 s += urllib.quote(self.path, safe=self._safepchars)
1677 if self.query:
1677 if self.query:
1678 # we store the query in escaped form.
1678 # we store the query in escaped form.
1679 s += '?' + self.query
1679 s += '?' + self.query
1680 if self.fragment is not None:
1680 if self.fragment is not None:
1681 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1681 s += '#' + urllib.quote(self.fragment, safe=self._safepchars)
1682 return s
1682 return s
1683
1683
1684 def authinfo(self):
1684 def authinfo(self):
1685 user, passwd = self.user, self.passwd
1685 user, passwd = self.user, self.passwd
1686 try:
1686 try:
1687 self.user, self.passwd = None, None
1687 self.user, self.passwd = None, None
1688 s = str(self)
1688 s = str(self)
1689 finally:
1689 finally:
1690 self.user, self.passwd = user, passwd
1690 self.user, self.passwd = user, passwd
1691 if not self.user:
1691 if not self.user:
1692 return (s, None)
1692 return (s, None)
1693 # authinfo[1] is passed to urllib2 password manager, and its
1693 # authinfo[1] is passed to urllib2 password manager, and its
1694 # URIs must not contain credentials. The host is passed in the
1694 # URIs must not contain credentials. The host is passed in the
1695 # URIs list because Python < 2.4.3 uses only that to search for
1695 # URIs list because Python < 2.4.3 uses only that to search for
1696 # a password.
1696 # a password.
1697 return (s, (None, (s, self.host),
1697 return (s, (None, (s, self.host),
1698 self.user, self.passwd or ''))
1698 self.user, self.passwd or ''))
1699
1699
1700 def isabs(self):
1700 def isabs(self):
1701 if self.scheme and self.scheme != 'file':
1701 if self.scheme and self.scheme != 'file':
1702 return True # remote URL
1702 return True # remote URL
1703 if hasdriveletter(self.path):
1703 if hasdriveletter(self.path):
1704 return True # absolute for our purposes - can't be joined()
1704 return True # absolute for our purposes - can't be joined()
1705 if self.path.startswith(r'\\'):
1705 if self.path.startswith(r'\\'):
1706 return True # Windows UNC path
1706 return True # Windows UNC path
1707 if self.path.startswith('/'):
1707 if self.path.startswith('/'):
1708 return True # POSIX-style
1708 return True # POSIX-style
1709 return False
1709 return False
1710
1710
1711 def localpath(self):
1711 def localpath(self):
1712 if self.scheme == 'file' or self.scheme == 'bundle':
1712 if self.scheme == 'file' or self.scheme == 'bundle':
1713 path = self.path or '/'
1713 path = self.path or '/'
1714 # For Windows, we need to promote hosts containing drive
1714 # For Windows, we need to promote hosts containing drive
1715 # letters to paths with drive letters.
1715 # letters to paths with drive letters.
1716 if hasdriveletter(self._hostport):
1716 if hasdriveletter(self._hostport):
1717 path = self._hostport + '/' + self.path
1717 path = self._hostport + '/' + self.path
1718 elif (self.host is not None and self.path
1718 elif (self.host is not None and self.path
1719 and not hasdriveletter(path)):
1719 and not hasdriveletter(path)):
1720 path = '/' + path
1720 path = '/' + path
1721 return path
1721 return path
1722 return self._origpath
1722 return self._origpath
1723
1723
1724 def hasscheme(path):
1724 def hasscheme(path):
1725 return bool(url(path).scheme)
1725 return bool(url(path).scheme)
1726
1726
1727 def hasdriveletter(path):
1727 def hasdriveletter(path):
1728 return path and path[1:2] == ':' and path[0:1].isalpha()
1728 return path and path[1:2] == ':' and path[0:1].isalpha()
1729
1729
1730 def urllocalpath(path):
1730 def urllocalpath(path):
1731 return url(path, parsequery=False, parsefragment=False).localpath()
1731 return url(path, parsequery=False, parsefragment=False).localpath()
1732
1732
1733 def hidepassword(u):
1733 def hidepassword(u):
1734 '''hide user credential in a url string'''
1734 '''hide user credential in a url string'''
1735 u = url(u)
1735 u = url(u)
1736 if u.passwd:
1736 if u.passwd:
1737 u.passwd = '***'
1737 u.passwd = '***'
1738 return str(u)
1738 return str(u)
1739
1739
1740 def removeauth(u):
1740 def removeauth(u):
1741 '''remove all authentication information from a url string'''
1741 '''remove all authentication information from a url string'''
1742 u = url(u)
1742 u = url(u)
1743 u.user = u.passwd = None
1743 u.user = u.passwd = None
1744 return str(u)
1744 return str(u)
1745
1745
1746 def isatty(fd):
1746 def isatty(fd):
1747 try:
1747 try:
1748 return fd.isatty()
1748 return fd.isatty()
1749 except AttributeError:
1749 except AttributeError:
1750 return False
1750 return False
General Comments 0
You need to be logged in to leave comments. Login now