##// END OF EJS Templates
move checkfilename from util to scmutil...
Adrian Buehlmann -
r13974:23f2736a default
parent child Browse files
Show More
@@ -1,724 +1,724 b''
1 # dirstate.py - working directory tracking for mercurial
1 # dirstate.py - working directory tracking for mercurial
2 #
2 #
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
3 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from node import nullid
8 from node import nullid
9 from i18n import _
9 from i18n import _
10 import util, ignore, osutil, parsers, encoding
10 import scmutil, util, ignore, osutil, parsers, encoding
11 import struct, os, stat, errno
11 import struct, os, stat, errno
12 import cStringIO
12 import cStringIO
13
13
14 _format = ">cllll"
14 _format = ">cllll"
15 propertycache = util.propertycache
15 propertycache = util.propertycache
16
16
17 def _finddirs(path):
17 def _finddirs(path):
18 pos = path.rfind('/')
18 pos = path.rfind('/')
19 while pos != -1:
19 while pos != -1:
20 yield path[:pos]
20 yield path[:pos]
21 pos = path.rfind('/', 0, pos)
21 pos = path.rfind('/', 0, pos)
22
22
23 def _incdirs(dirs, path):
23 def _incdirs(dirs, path):
24 for base in _finddirs(path):
24 for base in _finddirs(path):
25 if base in dirs:
25 if base in dirs:
26 dirs[base] += 1
26 dirs[base] += 1
27 return
27 return
28 dirs[base] = 1
28 dirs[base] = 1
29
29
30 def _decdirs(dirs, path):
30 def _decdirs(dirs, path):
31 for base in _finddirs(path):
31 for base in _finddirs(path):
32 if dirs[base] > 1:
32 if dirs[base] > 1:
33 dirs[base] -= 1
33 dirs[base] -= 1
34 return
34 return
35 del dirs[base]
35 del dirs[base]
36
36
37 class dirstate(object):
37 class dirstate(object):
38
38
39 def __init__(self, opener, ui, root, validate):
39 def __init__(self, opener, ui, root, validate):
40 '''Create a new dirstate object.
40 '''Create a new dirstate object.
41
41
42 opener is an open()-like callable that can be used to open the
42 opener is an open()-like callable that can be used to open the
43 dirstate file; root is the root of the directory tracked by
43 dirstate file; root is the root of the directory tracked by
44 the dirstate.
44 the dirstate.
45 '''
45 '''
46 self._opener = opener
46 self._opener = opener
47 self._validate = validate
47 self._validate = validate
48 self._root = root
48 self._root = root
49 self._rootdir = os.path.join(root, '')
49 self._rootdir = os.path.join(root, '')
50 self._dirty = False
50 self._dirty = False
51 self._dirtypl = False
51 self._dirtypl = False
52 self._lastnormaltime = None
52 self._lastnormaltime = None
53 self._ui = ui
53 self._ui = ui
54
54
55 @propertycache
55 @propertycache
56 def _map(self):
56 def _map(self):
57 '''Return the dirstate contents as a map from filename to
57 '''Return the dirstate contents as a map from filename to
58 (state, mode, size, time).'''
58 (state, mode, size, time).'''
59 self._read()
59 self._read()
60 return self._map
60 return self._map
61
61
62 @propertycache
62 @propertycache
63 def _copymap(self):
63 def _copymap(self):
64 self._read()
64 self._read()
65 return self._copymap
65 return self._copymap
66
66
67 @propertycache
67 @propertycache
68 def _foldmap(self):
68 def _foldmap(self):
69 f = {}
69 f = {}
70 for name in self._map:
70 for name in self._map:
71 f[os.path.normcase(name)] = name
71 f[os.path.normcase(name)] = name
72 return f
72 return f
73
73
74 @propertycache
74 @propertycache
75 def _branch(self):
75 def _branch(self):
76 try:
76 try:
77 return self._opener("branch").read().strip() or "default"
77 return self._opener("branch").read().strip() or "default"
78 except IOError:
78 except IOError:
79 return "default"
79 return "default"
80
80
81 @propertycache
81 @propertycache
82 def _pl(self):
82 def _pl(self):
83 try:
83 try:
84 fp = self._opener("dirstate")
84 fp = self._opener("dirstate")
85 st = fp.read(40)
85 st = fp.read(40)
86 fp.close()
86 fp.close()
87 l = len(st)
87 l = len(st)
88 if l == 40:
88 if l == 40:
89 return st[:20], st[20:40]
89 return st[:20], st[20:40]
90 elif l > 0 and l < 40:
90 elif l > 0 and l < 40:
91 raise util.Abort(_('working directory state appears damaged!'))
91 raise util.Abort(_('working directory state appears damaged!'))
92 except IOError, err:
92 except IOError, err:
93 if err.errno != errno.ENOENT:
93 if err.errno != errno.ENOENT:
94 raise
94 raise
95 return [nullid, nullid]
95 return [nullid, nullid]
96
96
97 @propertycache
97 @propertycache
98 def _dirs(self):
98 def _dirs(self):
99 dirs = {}
99 dirs = {}
100 for f, s in self._map.iteritems():
100 for f, s in self._map.iteritems():
101 if s[0] != 'r':
101 if s[0] != 'r':
102 _incdirs(dirs, f)
102 _incdirs(dirs, f)
103 return dirs
103 return dirs
104
104
105 @propertycache
105 @propertycache
106 def _ignore(self):
106 def _ignore(self):
107 files = [self._join('.hgignore')]
107 files = [self._join('.hgignore')]
108 for name, path in self._ui.configitems("ui"):
108 for name, path in self._ui.configitems("ui"):
109 if name == 'ignore' or name.startswith('ignore.'):
109 if name == 'ignore' or name.startswith('ignore.'):
110 files.append(util.expandpath(path))
110 files.append(util.expandpath(path))
111 return ignore.ignore(self._root, files, self._ui.warn)
111 return ignore.ignore(self._root, files, self._ui.warn)
112
112
113 @propertycache
113 @propertycache
114 def _slash(self):
114 def _slash(self):
115 return self._ui.configbool('ui', 'slash') and os.sep != '/'
115 return self._ui.configbool('ui', 'slash') and os.sep != '/'
116
116
117 @propertycache
117 @propertycache
118 def _checklink(self):
118 def _checklink(self):
119 return util.checklink(self._root)
119 return util.checklink(self._root)
120
120
121 @propertycache
121 @propertycache
122 def _checkexec(self):
122 def _checkexec(self):
123 return util.checkexec(self._root)
123 return util.checkexec(self._root)
124
124
125 @propertycache
125 @propertycache
126 def _checkcase(self):
126 def _checkcase(self):
127 return not util.checkcase(self._join('.hg'))
127 return not util.checkcase(self._join('.hg'))
128
128
129 def _join(self, f):
129 def _join(self, f):
130 # much faster than os.path.join()
130 # much faster than os.path.join()
131 # it's safe because f is always a relative path
131 # it's safe because f is always a relative path
132 return self._rootdir + f
132 return self._rootdir + f
133
133
134 def flagfunc(self, fallback):
134 def flagfunc(self, fallback):
135 if self._checklink:
135 if self._checklink:
136 if self._checkexec:
136 if self._checkexec:
137 def f(x):
137 def f(x):
138 p = self._join(x)
138 p = self._join(x)
139 if os.path.islink(p):
139 if os.path.islink(p):
140 return 'l'
140 return 'l'
141 if util.is_exec(p):
141 if util.is_exec(p):
142 return 'x'
142 return 'x'
143 return ''
143 return ''
144 return f
144 return f
145 def f(x):
145 def f(x):
146 if os.path.islink(self._join(x)):
146 if os.path.islink(self._join(x)):
147 return 'l'
147 return 'l'
148 if 'x' in fallback(x):
148 if 'x' in fallback(x):
149 return 'x'
149 return 'x'
150 return ''
150 return ''
151 return f
151 return f
152 if self._checkexec:
152 if self._checkexec:
153 def f(x):
153 def f(x):
154 if 'l' in fallback(x):
154 if 'l' in fallback(x):
155 return 'l'
155 return 'l'
156 if util.is_exec(self._join(x)):
156 if util.is_exec(self._join(x)):
157 return 'x'
157 return 'x'
158 return ''
158 return ''
159 return f
159 return f
160 return fallback
160 return fallback
161
161
162 def getcwd(self):
162 def getcwd(self):
163 cwd = os.getcwd()
163 cwd = os.getcwd()
164 if cwd == self._root:
164 if cwd == self._root:
165 return ''
165 return ''
166 # self._root ends with a path separator if self._root is '/' or 'C:\'
166 # self._root ends with a path separator if self._root is '/' or 'C:\'
167 rootsep = self._root
167 rootsep = self._root
168 if not util.endswithsep(rootsep):
168 if not util.endswithsep(rootsep):
169 rootsep += os.sep
169 rootsep += os.sep
170 if cwd.startswith(rootsep):
170 if cwd.startswith(rootsep):
171 return cwd[len(rootsep):]
171 return cwd[len(rootsep):]
172 else:
172 else:
173 # we're outside the repo. return an absolute path.
173 # we're outside the repo. return an absolute path.
174 return cwd
174 return cwd
175
175
176 def pathto(self, f, cwd=None):
176 def pathto(self, f, cwd=None):
177 if cwd is None:
177 if cwd is None:
178 cwd = self.getcwd()
178 cwd = self.getcwd()
179 path = util.pathto(self._root, cwd, f)
179 path = util.pathto(self._root, cwd, f)
180 if self._slash:
180 if self._slash:
181 return util.normpath(path)
181 return util.normpath(path)
182 return path
182 return path
183
183
184 def __getitem__(self, key):
184 def __getitem__(self, key):
185 '''Return the current state of key (a filename) in the dirstate.
185 '''Return the current state of key (a filename) in the dirstate.
186
186
187 States are:
187 States are:
188 n normal
188 n normal
189 m needs merging
189 m needs merging
190 r marked for removal
190 r marked for removal
191 a marked for addition
191 a marked for addition
192 ? not tracked
192 ? not tracked
193 '''
193 '''
194 return self._map.get(key, ("?",))[0]
194 return self._map.get(key, ("?",))[0]
195
195
196 def __contains__(self, key):
196 def __contains__(self, key):
197 return key in self._map
197 return key in self._map
198
198
199 def __iter__(self):
199 def __iter__(self):
200 for x in sorted(self._map):
200 for x in sorted(self._map):
201 yield x
201 yield x
202
202
203 def parents(self):
203 def parents(self):
204 return [self._validate(p) for p in self._pl]
204 return [self._validate(p) for p in self._pl]
205
205
206 def p1(self):
206 def p1(self):
207 return self._validate(self._pl[0])
207 return self._validate(self._pl[0])
208
208
209 def p2(self):
209 def p2(self):
210 return self._validate(self._pl[1])
210 return self._validate(self._pl[1])
211
211
212 def branch(self):
212 def branch(self):
213 return encoding.tolocal(self._branch)
213 return encoding.tolocal(self._branch)
214
214
215 def setparents(self, p1, p2=nullid):
215 def setparents(self, p1, p2=nullid):
216 self._dirty = self._dirtypl = True
216 self._dirty = self._dirtypl = True
217 self._pl = p1, p2
217 self._pl = p1, p2
218
218
219 def setbranch(self, branch):
219 def setbranch(self, branch):
220 if branch in ['tip', '.', 'null']:
220 if branch in ['tip', '.', 'null']:
221 raise util.Abort(_('the name \'%s\' is reserved') % branch)
221 raise util.Abort(_('the name \'%s\' is reserved') % branch)
222 self._branch = encoding.fromlocal(branch)
222 self._branch = encoding.fromlocal(branch)
223 self._opener("branch", "w").write(self._branch + '\n')
223 self._opener("branch", "w").write(self._branch + '\n')
224
224
225 def _read(self):
225 def _read(self):
226 self._map = {}
226 self._map = {}
227 self._copymap = {}
227 self._copymap = {}
228 try:
228 try:
229 st = self._opener("dirstate").read()
229 st = self._opener("dirstate").read()
230 except IOError, err:
230 except IOError, err:
231 if err.errno != errno.ENOENT:
231 if err.errno != errno.ENOENT:
232 raise
232 raise
233 return
233 return
234 if not st:
234 if not st:
235 return
235 return
236
236
237 p = parsers.parse_dirstate(self._map, self._copymap, st)
237 p = parsers.parse_dirstate(self._map, self._copymap, st)
238 if not self._dirtypl:
238 if not self._dirtypl:
239 self._pl = p
239 self._pl = p
240
240
241 def invalidate(self):
241 def invalidate(self):
242 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
242 for a in ("_map", "_copymap", "_foldmap", "_branch", "_pl", "_dirs",
243 "_ignore"):
243 "_ignore"):
244 if a in self.__dict__:
244 if a in self.__dict__:
245 delattr(self, a)
245 delattr(self, a)
246 self._lastnormaltime = None
246 self._lastnormaltime = None
247 self._dirty = False
247 self._dirty = False
248
248
249 def copy(self, source, dest):
249 def copy(self, source, dest):
250 """Mark dest as a copy of source. Unmark dest if source is None."""
250 """Mark dest as a copy of source. Unmark dest if source is None."""
251 if source == dest:
251 if source == dest:
252 return
252 return
253 self._dirty = True
253 self._dirty = True
254 if source is not None:
254 if source is not None:
255 self._copymap[dest] = source
255 self._copymap[dest] = source
256 elif dest in self._copymap:
256 elif dest in self._copymap:
257 del self._copymap[dest]
257 del self._copymap[dest]
258
258
259 def copied(self, file):
259 def copied(self, file):
260 return self._copymap.get(file, None)
260 return self._copymap.get(file, None)
261
261
262 def copies(self):
262 def copies(self):
263 return self._copymap
263 return self._copymap
264
264
265 def _droppath(self, f):
265 def _droppath(self, f):
266 if self[f] not in "?r" and "_dirs" in self.__dict__:
266 if self[f] not in "?r" and "_dirs" in self.__dict__:
267 _decdirs(self._dirs, f)
267 _decdirs(self._dirs, f)
268
268
269 def _addpath(self, f, check=False):
269 def _addpath(self, f, check=False):
270 oldstate = self[f]
270 oldstate = self[f]
271 if check or oldstate == "r":
271 if check or oldstate == "r":
272 util.checkfilename(f)
272 scmutil.checkfilename(f)
273 if f in self._dirs:
273 if f in self._dirs:
274 raise util.Abort(_('directory %r already in dirstate') % f)
274 raise util.Abort(_('directory %r already in dirstate') % f)
275 # shadows
275 # shadows
276 for d in _finddirs(f):
276 for d in _finddirs(f):
277 if d in self._dirs:
277 if d in self._dirs:
278 break
278 break
279 if d in self._map and self[d] != 'r':
279 if d in self._map and self[d] != 'r':
280 raise util.Abort(
280 raise util.Abort(
281 _('file %r in dirstate clashes with %r') % (d, f))
281 _('file %r in dirstate clashes with %r') % (d, f))
282 if oldstate in "?r" and "_dirs" in self.__dict__:
282 if oldstate in "?r" and "_dirs" in self.__dict__:
283 _incdirs(self._dirs, f)
283 _incdirs(self._dirs, f)
284
284
285 def normal(self, f):
285 def normal(self, f):
286 '''Mark a file normal and clean.'''
286 '''Mark a file normal and clean.'''
287 self._dirty = True
287 self._dirty = True
288 self._addpath(f)
288 self._addpath(f)
289 s = os.lstat(self._join(f))
289 s = os.lstat(self._join(f))
290 mtime = int(s.st_mtime)
290 mtime = int(s.st_mtime)
291 self._map[f] = ('n', s.st_mode, s.st_size, mtime)
291 self._map[f] = ('n', s.st_mode, s.st_size, mtime)
292 if f in self._copymap:
292 if f in self._copymap:
293 del self._copymap[f]
293 del self._copymap[f]
294 if mtime > self._lastnormaltime:
294 if mtime > self._lastnormaltime:
295 # Remember the most recent modification timeslot for status(),
295 # Remember the most recent modification timeslot for status(),
296 # to make sure we won't miss future size-preserving file content
296 # to make sure we won't miss future size-preserving file content
297 # modifications that happen within the same timeslot.
297 # modifications that happen within the same timeslot.
298 self._lastnormaltime = mtime
298 self._lastnormaltime = mtime
299
299
300 def normallookup(self, f):
300 def normallookup(self, f):
301 '''Mark a file normal, but possibly dirty.'''
301 '''Mark a file normal, but possibly dirty.'''
302 if self._pl[1] != nullid and f in self._map:
302 if self._pl[1] != nullid and f in self._map:
303 # if there is a merge going on and the file was either
303 # if there is a merge going on and the file was either
304 # in state 'm' (-1) or coming from other parent (-2) before
304 # in state 'm' (-1) or coming from other parent (-2) before
305 # being removed, restore that state.
305 # being removed, restore that state.
306 entry = self._map[f]
306 entry = self._map[f]
307 if entry[0] == 'r' and entry[2] in (-1, -2):
307 if entry[0] == 'r' and entry[2] in (-1, -2):
308 source = self._copymap.get(f)
308 source = self._copymap.get(f)
309 if entry[2] == -1:
309 if entry[2] == -1:
310 self.merge(f)
310 self.merge(f)
311 elif entry[2] == -2:
311 elif entry[2] == -2:
312 self.otherparent(f)
312 self.otherparent(f)
313 if source:
313 if source:
314 self.copy(source, f)
314 self.copy(source, f)
315 return
315 return
316 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
316 if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
317 return
317 return
318 self._dirty = True
318 self._dirty = True
319 self._addpath(f)
319 self._addpath(f)
320 self._map[f] = ('n', 0, -1, -1)
320 self._map[f] = ('n', 0, -1, -1)
321 if f in self._copymap:
321 if f in self._copymap:
322 del self._copymap[f]
322 del self._copymap[f]
323
323
324 def otherparent(self, f):
324 def otherparent(self, f):
325 '''Mark as coming from the other parent, always dirty.'''
325 '''Mark as coming from the other parent, always dirty.'''
326 if self._pl[1] == nullid:
326 if self._pl[1] == nullid:
327 raise util.Abort(_("setting %r to other parent "
327 raise util.Abort(_("setting %r to other parent "
328 "only allowed in merges") % f)
328 "only allowed in merges") % f)
329 self._dirty = True
329 self._dirty = True
330 self._addpath(f)
330 self._addpath(f)
331 self._map[f] = ('n', 0, -2, -1)
331 self._map[f] = ('n', 0, -2, -1)
332 if f in self._copymap:
332 if f in self._copymap:
333 del self._copymap[f]
333 del self._copymap[f]
334
334
335 def add(self, f):
335 def add(self, f):
336 '''Mark a file added.'''
336 '''Mark a file added.'''
337 self._dirty = True
337 self._dirty = True
338 self._addpath(f, True)
338 self._addpath(f, True)
339 self._map[f] = ('a', 0, -1, -1)
339 self._map[f] = ('a', 0, -1, -1)
340 if f in self._copymap:
340 if f in self._copymap:
341 del self._copymap[f]
341 del self._copymap[f]
342
342
343 def remove(self, f):
343 def remove(self, f):
344 '''Mark a file removed.'''
344 '''Mark a file removed.'''
345 self._dirty = True
345 self._dirty = True
346 self._droppath(f)
346 self._droppath(f)
347 size = 0
347 size = 0
348 if self._pl[1] != nullid and f in self._map:
348 if self._pl[1] != nullid and f in self._map:
349 # backup the previous state
349 # backup the previous state
350 entry = self._map[f]
350 entry = self._map[f]
351 if entry[0] == 'm': # merge
351 if entry[0] == 'm': # merge
352 size = -1
352 size = -1
353 elif entry[0] == 'n' and entry[2] == -2: # other parent
353 elif entry[0] == 'n' and entry[2] == -2: # other parent
354 size = -2
354 size = -2
355 self._map[f] = ('r', 0, size, 0)
355 self._map[f] = ('r', 0, size, 0)
356 if size == 0 and f in self._copymap:
356 if size == 0 and f in self._copymap:
357 del self._copymap[f]
357 del self._copymap[f]
358
358
359 def merge(self, f):
359 def merge(self, f):
360 '''Mark a file merged.'''
360 '''Mark a file merged.'''
361 self._dirty = True
361 self._dirty = True
362 s = os.lstat(self._join(f))
362 s = os.lstat(self._join(f))
363 self._addpath(f)
363 self._addpath(f)
364 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
364 self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
365 if f in self._copymap:
365 if f in self._copymap:
366 del self._copymap[f]
366 del self._copymap[f]
367
367
368 def forget(self, f):
368 def forget(self, f):
369 '''Forget a file.'''
369 '''Forget a file.'''
370 self._dirty = True
370 self._dirty = True
371 try:
371 try:
372 self._droppath(f)
372 self._droppath(f)
373 del self._map[f]
373 del self._map[f]
374 except KeyError:
374 except KeyError:
375 self._ui.warn(_("not in dirstate: %s\n") % f)
375 self._ui.warn(_("not in dirstate: %s\n") % f)
376
376
377 def _normalize(self, path, isknown):
377 def _normalize(self, path, isknown):
378 normed = os.path.normcase(path)
378 normed = os.path.normcase(path)
379 folded = self._foldmap.get(normed, None)
379 folded = self._foldmap.get(normed, None)
380 if folded is None:
380 if folded is None:
381 if isknown or not os.path.lexists(os.path.join(self._root, path)):
381 if isknown or not os.path.lexists(os.path.join(self._root, path)):
382 folded = path
382 folded = path
383 else:
383 else:
384 folded = self._foldmap.setdefault(normed,
384 folded = self._foldmap.setdefault(normed,
385 util.fspath(path, self._root))
385 util.fspath(path, self._root))
386 return folded
386 return folded
387
387
388 def normalize(self, path, isknown=False):
388 def normalize(self, path, isknown=False):
389 '''
389 '''
390 normalize the case of a pathname when on a casefolding filesystem
390 normalize the case of a pathname when on a casefolding filesystem
391
391
392 isknown specifies whether the filename came from walking the
392 isknown specifies whether the filename came from walking the
393 disk, to avoid extra filesystem access
393 disk, to avoid extra filesystem access
394
394
395 The normalized case is determined based on the following precedence:
395 The normalized case is determined based on the following precedence:
396
396
397 - version of name already stored in the dirstate
397 - version of name already stored in the dirstate
398 - version of name stored on disk
398 - version of name stored on disk
399 - version provided via command arguments
399 - version provided via command arguments
400 '''
400 '''
401
401
402 if self._checkcase:
402 if self._checkcase:
403 return self._normalize(path, isknown)
403 return self._normalize(path, isknown)
404 return path
404 return path
405
405
406 def clear(self):
406 def clear(self):
407 self._map = {}
407 self._map = {}
408 if "_dirs" in self.__dict__:
408 if "_dirs" in self.__dict__:
409 delattr(self, "_dirs")
409 delattr(self, "_dirs")
410 self._copymap = {}
410 self._copymap = {}
411 self._pl = [nullid, nullid]
411 self._pl = [nullid, nullid]
412 self._lastnormaltime = None
412 self._lastnormaltime = None
413 self._dirty = True
413 self._dirty = True
414
414
415 def rebuild(self, parent, files):
415 def rebuild(self, parent, files):
416 self.clear()
416 self.clear()
417 for f in files:
417 for f in files:
418 if 'x' in files.flags(f):
418 if 'x' in files.flags(f):
419 self._map[f] = ('n', 0777, -1, 0)
419 self._map[f] = ('n', 0777, -1, 0)
420 else:
420 else:
421 self._map[f] = ('n', 0666, -1, 0)
421 self._map[f] = ('n', 0666, -1, 0)
422 self._pl = (parent, nullid)
422 self._pl = (parent, nullid)
423 self._dirty = True
423 self._dirty = True
424
424
425 def write(self):
425 def write(self):
426 if not self._dirty:
426 if not self._dirty:
427 return
427 return
428 st = self._opener("dirstate", "w", atomictemp=True)
428 st = self._opener("dirstate", "w", atomictemp=True)
429
429
430 # use the modification time of the newly created temporary file as the
430 # use the modification time of the newly created temporary file as the
431 # filesystem's notion of 'now'
431 # filesystem's notion of 'now'
432 now = int(util.fstat(st).st_mtime)
432 now = int(util.fstat(st).st_mtime)
433
433
434 cs = cStringIO.StringIO()
434 cs = cStringIO.StringIO()
435 copymap = self._copymap
435 copymap = self._copymap
436 pack = struct.pack
436 pack = struct.pack
437 write = cs.write
437 write = cs.write
438 write("".join(self._pl))
438 write("".join(self._pl))
439 for f, e in self._map.iteritems():
439 for f, e in self._map.iteritems():
440 if e[0] == 'n' and e[3] == now:
440 if e[0] == 'n' and e[3] == now:
441 # The file was last modified "simultaneously" with the current
441 # The file was last modified "simultaneously" with the current
442 # write to dirstate (i.e. within the same second for file-
442 # write to dirstate (i.e. within the same second for file-
443 # systems with a granularity of 1 sec). This commonly happens
443 # systems with a granularity of 1 sec). This commonly happens
444 # for at least a couple of files on 'update'.
444 # for at least a couple of files on 'update'.
445 # The user could change the file without changing its size
445 # The user could change the file without changing its size
446 # within the same second. Invalidate the file's stat data in
446 # within the same second. Invalidate the file's stat data in
447 # dirstate, forcing future 'status' calls to compare the
447 # dirstate, forcing future 'status' calls to compare the
448 # contents of the file. This prevents mistakenly treating such
448 # contents of the file. This prevents mistakenly treating such
449 # files as clean.
449 # files as clean.
450 e = (e[0], 0, -1, -1) # mark entry as 'unset'
450 e = (e[0], 0, -1, -1) # mark entry as 'unset'
451 self._map[f] = e
451 self._map[f] = e
452
452
453 if f in copymap:
453 if f in copymap:
454 f = "%s\0%s" % (f, copymap[f])
454 f = "%s\0%s" % (f, copymap[f])
455 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
455 e = pack(_format, e[0], e[1], e[2], e[3], len(f))
456 write(e)
456 write(e)
457 write(f)
457 write(f)
458 st.write(cs.getvalue())
458 st.write(cs.getvalue())
459 st.rename()
459 st.rename()
460 self._lastnormaltime = None
460 self._lastnormaltime = None
461 self._dirty = self._dirtypl = False
461 self._dirty = self._dirtypl = False
462
462
463 def _dirignore(self, f):
463 def _dirignore(self, f):
464 if f == '.':
464 if f == '.':
465 return False
465 return False
466 if self._ignore(f):
466 if self._ignore(f):
467 return True
467 return True
468 for p in _finddirs(f):
468 for p in _finddirs(f):
469 if self._ignore(p):
469 if self._ignore(p):
470 return True
470 return True
471 return False
471 return False
472
472
473 def walk(self, match, subrepos, unknown, ignored):
473 def walk(self, match, subrepos, unknown, ignored):
474 '''
474 '''
475 Walk recursively through the directory tree, finding all files
475 Walk recursively through the directory tree, finding all files
476 matched by match.
476 matched by match.
477
477
478 Return a dict mapping filename to stat-like object (either
478 Return a dict mapping filename to stat-like object (either
479 mercurial.osutil.stat instance or return value of os.stat()).
479 mercurial.osutil.stat instance or return value of os.stat()).
480 '''
480 '''
481
481
482 def fwarn(f, msg):
482 def fwarn(f, msg):
483 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
483 self._ui.warn('%s: %s\n' % (self.pathto(f), msg))
484 return False
484 return False
485
485
486 def badtype(mode):
486 def badtype(mode):
487 kind = _('unknown')
487 kind = _('unknown')
488 if stat.S_ISCHR(mode):
488 if stat.S_ISCHR(mode):
489 kind = _('character device')
489 kind = _('character device')
490 elif stat.S_ISBLK(mode):
490 elif stat.S_ISBLK(mode):
491 kind = _('block device')
491 kind = _('block device')
492 elif stat.S_ISFIFO(mode):
492 elif stat.S_ISFIFO(mode):
493 kind = _('fifo')
493 kind = _('fifo')
494 elif stat.S_ISSOCK(mode):
494 elif stat.S_ISSOCK(mode):
495 kind = _('socket')
495 kind = _('socket')
496 elif stat.S_ISDIR(mode):
496 elif stat.S_ISDIR(mode):
497 kind = _('directory')
497 kind = _('directory')
498 return _('unsupported file type (type is %s)') % kind
498 return _('unsupported file type (type is %s)') % kind
499
499
500 ignore = self._ignore
500 ignore = self._ignore
501 dirignore = self._dirignore
501 dirignore = self._dirignore
502 if ignored:
502 if ignored:
503 ignore = util.never
503 ignore = util.never
504 dirignore = util.never
504 dirignore = util.never
505 elif not unknown:
505 elif not unknown:
506 # if unknown and ignored are False, skip step 2
506 # if unknown and ignored are False, skip step 2
507 ignore = util.always
507 ignore = util.always
508 dirignore = util.always
508 dirignore = util.always
509
509
510 matchfn = match.matchfn
510 matchfn = match.matchfn
511 badfn = match.bad
511 badfn = match.bad
512 dmap = self._map
512 dmap = self._map
513 normpath = util.normpath
513 normpath = util.normpath
514 listdir = osutil.listdir
514 listdir = osutil.listdir
515 lstat = os.lstat
515 lstat = os.lstat
516 getkind = stat.S_IFMT
516 getkind = stat.S_IFMT
517 dirkind = stat.S_IFDIR
517 dirkind = stat.S_IFDIR
518 regkind = stat.S_IFREG
518 regkind = stat.S_IFREG
519 lnkkind = stat.S_IFLNK
519 lnkkind = stat.S_IFLNK
520 join = self._join
520 join = self._join
521 work = []
521 work = []
522 wadd = work.append
522 wadd = work.append
523
523
524 exact = skipstep3 = False
524 exact = skipstep3 = False
525 if matchfn == match.exact: # match.exact
525 if matchfn == match.exact: # match.exact
526 exact = True
526 exact = True
527 dirignore = util.always # skip step 2
527 dirignore = util.always # skip step 2
528 elif match.files() and not match.anypats(): # match.match, no patterns
528 elif match.files() and not match.anypats(): # match.match, no patterns
529 skipstep3 = True
529 skipstep3 = True
530
530
531 if self._checkcase:
531 if self._checkcase:
532 normalize = self._normalize
532 normalize = self._normalize
533 skipstep3 = False
533 skipstep3 = False
534 else:
534 else:
535 normalize = lambda x, y: x
535 normalize = lambda x, y: x
536
536
537 files = sorted(match.files())
537 files = sorted(match.files())
538 subrepos.sort()
538 subrepos.sort()
539 i, j = 0, 0
539 i, j = 0, 0
540 while i < len(files) and j < len(subrepos):
540 while i < len(files) and j < len(subrepos):
541 subpath = subrepos[j] + "/"
541 subpath = subrepos[j] + "/"
542 if files[i] < subpath:
542 if files[i] < subpath:
543 i += 1
543 i += 1
544 continue
544 continue
545 while i < len(files) and files[i].startswith(subpath):
545 while i < len(files) and files[i].startswith(subpath):
546 del files[i]
546 del files[i]
547 j += 1
547 j += 1
548
548
549 if not files or '.' in files:
549 if not files or '.' in files:
550 files = ['']
550 files = ['']
551 results = dict.fromkeys(subrepos)
551 results = dict.fromkeys(subrepos)
552 results['.hg'] = None
552 results['.hg'] = None
553
553
554 # step 1: find all explicit files
554 # step 1: find all explicit files
555 for ff in files:
555 for ff in files:
556 nf = normalize(normpath(ff), False)
556 nf = normalize(normpath(ff), False)
557 if nf in results:
557 if nf in results:
558 continue
558 continue
559
559
560 try:
560 try:
561 st = lstat(join(nf))
561 st = lstat(join(nf))
562 kind = getkind(st.st_mode)
562 kind = getkind(st.st_mode)
563 if kind == dirkind:
563 if kind == dirkind:
564 skipstep3 = False
564 skipstep3 = False
565 if nf in dmap:
565 if nf in dmap:
566 #file deleted on disk but still in dirstate
566 #file deleted on disk but still in dirstate
567 results[nf] = None
567 results[nf] = None
568 match.dir(nf)
568 match.dir(nf)
569 if not dirignore(nf):
569 if not dirignore(nf):
570 wadd(nf)
570 wadd(nf)
571 elif kind == regkind or kind == lnkkind:
571 elif kind == regkind or kind == lnkkind:
572 results[nf] = st
572 results[nf] = st
573 else:
573 else:
574 badfn(ff, badtype(kind))
574 badfn(ff, badtype(kind))
575 if nf in dmap:
575 if nf in dmap:
576 results[nf] = None
576 results[nf] = None
577 except OSError, inst:
577 except OSError, inst:
578 if nf in dmap: # does it exactly match a file?
578 if nf in dmap: # does it exactly match a file?
579 results[nf] = None
579 results[nf] = None
580 else: # does it match a directory?
580 else: # does it match a directory?
581 prefix = nf + "/"
581 prefix = nf + "/"
582 for fn in dmap:
582 for fn in dmap:
583 if fn.startswith(prefix):
583 if fn.startswith(prefix):
584 match.dir(nf)
584 match.dir(nf)
585 skipstep3 = False
585 skipstep3 = False
586 break
586 break
587 else:
587 else:
588 badfn(ff, inst.strerror)
588 badfn(ff, inst.strerror)
589
589
590 # step 2: visit subdirectories
590 # step 2: visit subdirectories
591 while work:
591 while work:
592 nd = work.pop()
592 nd = work.pop()
593 skip = None
593 skip = None
594 if nd == '.':
594 if nd == '.':
595 nd = ''
595 nd = ''
596 else:
596 else:
597 skip = '.hg'
597 skip = '.hg'
598 try:
598 try:
599 entries = listdir(join(nd), stat=True, skip=skip)
599 entries = listdir(join(nd), stat=True, skip=skip)
600 except OSError, inst:
600 except OSError, inst:
601 if inst.errno == errno.EACCES:
601 if inst.errno == errno.EACCES:
602 fwarn(nd, inst.strerror)
602 fwarn(nd, inst.strerror)
603 continue
603 continue
604 raise
604 raise
605 for f, kind, st in entries:
605 for f, kind, st in entries:
606 nf = normalize(nd and (nd + "/" + f) or f, True)
606 nf = normalize(nd and (nd + "/" + f) or f, True)
607 if nf not in results:
607 if nf not in results:
608 if kind == dirkind:
608 if kind == dirkind:
609 if not ignore(nf):
609 if not ignore(nf):
610 match.dir(nf)
610 match.dir(nf)
611 wadd(nf)
611 wadd(nf)
612 if nf in dmap and matchfn(nf):
612 if nf in dmap and matchfn(nf):
613 results[nf] = None
613 results[nf] = None
614 elif kind == regkind or kind == lnkkind:
614 elif kind == regkind or kind == lnkkind:
615 if nf in dmap:
615 if nf in dmap:
616 if matchfn(nf):
616 if matchfn(nf):
617 results[nf] = st
617 results[nf] = st
618 elif matchfn(nf) and not ignore(nf):
618 elif matchfn(nf) and not ignore(nf):
619 results[nf] = st
619 results[nf] = st
620 elif nf in dmap and matchfn(nf):
620 elif nf in dmap and matchfn(nf):
621 results[nf] = None
621 results[nf] = None
622
622
623 # step 3: report unseen items in the dmap hash
623 # step 3: report unseen items in the dmap hash
624 if not skipstep3 and not exact:
624 if not skipstep3 and not exact:
625 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
625 visit = sorted([f for f in dmap if f not in results and matchfn(f)])
626 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
626 for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
627 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
627 if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
628 st = None
628 st = None
629 results[nf] = st
629 results[nf] = st
630 for s in subrepos:
630 for s in subrepos:
631 del results[s]
631 del results[s]
632 del results['.hg']
632 del results['.hg']
633 return results
633 return results
634
634
635 def status(self, match, subrepos, ignored, clean, unknown):
635 def status(self, match, subrepos, ignored, clean, unknown):
636 '''Determine the status of the working copy relative to the
636 '''Determine the status of the working copy relative to the
637 dirstate and return a tuple of lists (unsure, modified, added,
637 dirstate and return a tuple of lists (unsure, modified, added,
638 removed, deleted, unknown, ignored, clean), where:
638 removed, deleted, unknown, ignored, clean), where:
639
639
640 unsure:
640 unsure:
641 files that might have been modified since the dirstate was
641 files that might have been modified since the dirstate was
642 written, but need to be read to be sure (size is the same
642 written, but need to be read to be sure (size is the same
643 but mtime differs)
643 but mtime differs)
644 modified:
644 modified:
645 files that have definitely been modified since the dirstate
645 files that have definitely been modified since the dirstate
646 was written (different size or mode)
646 was written (different size or mode)
647 added:
647 added:
648 files that have been explicitly added with hg add
648 files that have been explicitly added with hg add
649 removed:
649 removed:
650 files that have been explicitly removed with hg remove
650 files that have been explicitly removed with hg remove
651 deleted:
651 deleted:
652 files that have been deleted through other means ("missing")
652 files that have been deleted through other means ("missing")
653 unknown:
653 unknown:
654 files not in the dirstate that are not ignored
654 files not in the dirstate that are not ignored
655 ignored:
655 ignored:
656 files not in the dirstate that are ignored
656 files not in the dirstate that are ignored
657 (by _dirignore())
657 (by _dirignore())
658 clean:
658 clean:
659 files that have definitely not been modified since the
659 files that have definitely not been modified since the
660 dirstate was written
660 dirstate was written
661 '''
661 '''
662 listignored, listclean, listunknown = ignored, clean, unknown
662 listignored, listclean, listunknown = ignored, clean, unknown
663 lookup, modified, added, unknown, ignored = [], [], [], [], []
663 lookup, modified, added, unknown, ignored = [], [], [], [], []
664 removed, deleted, clean = [], [], []
664 removed, deleted, clean = [], [], []
665
665
666 dmap = self._map
666 dmap = self._map
667 ladd = lookup.append # aka "unsure"
667 ladd = lookup.append # aka "unsure"
668 madd = modified.append
668 madd = modified.append
669 aadd = added.append
669 aadd = added.append
670 uadd = unknown.append
670 uadd = unknown.append
671 iadd = ignored.append
671 iadd = ignored.append
672 radd = removed.append
672 radd = removed.append
673 dadd = deleted.append
673 dadd = deleted.append
674 cadd = clean.append
674 cadd = clean.append
675
675
676 lnkkind = stat.S_IFLNK
676 lnkkind = stat.S_IFLNK
677
677
678 for fn, st in self.walk(match, subrepos, listunknown,
678 for fn, st in self.walk(match, subrepos, listunknown,
679 listignored).iteritems():
679 listignored).iteritems():
680 if fn not in dmap:
680 if fn not in dmap:
681 if (listignored or match.exact(fn)) and self._dirignore(fn):
681 if (listignored or match.exact(fn)) and self._dirignore(fn):
682 if listignored:
682 if listignored:
683 iadd(fn)
683 iadd(fn)
684 elif listunknown:
684 elif listunknown:
685 uadd(fn)
685 uadd(fn)
686 continue
686 continue
687
687
688 state, mode, size, time = dmap[fn]
688 state, mode, size, time = dmap[fn]
689
689
690 if not st and state in "nma":
690 if not st and state in "nma":
691 dadd(fn)
691 dadd(fn)
692 elif state == 'n':
692 elif state == 'n':
693 # The "mode & lnkkind != lnkkind or self._checklink"
693 # The "mode & lnkkind != lnkkind or self._checklink"
694 # lines are an expansion of "islink => checklink"
694 # lines are an expansion of "islink => checklink"
695 # where islink means "is this a link?" and checklink
695 # where islink means "is this a link?" and checklink
696 # means "can we check links?".
696 # means "can we check links?".
697 mtime = int(st.st_mtime)
697 mtime = int(st.st_mtime)
698 if (size >= 0 and
698 if (size >= 0 and
699 (size != st.st_size
699 (size != st.st_size
700 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
700 or ((mode ^ st.st_mode) & 0100 and self._checkexec))
701 and (mode & lnkkind != lnkkind or self._checklink)
701 and (mode & lnkkind != lnkkind or self._checklink)
702 or size == -2 # other parent
702 or size == -2 # other parent
703 or fn in self._copymap):
703 or fn in self._copymap):
704 madd(fn)
704 madd(fn)
705 elif (mtime != time
705 elif (mtime != time
706 and (mode & lnkkind != lnkkind or self._checklink)):
706 and (mode & lnkkind != lnkkind or self._checklink)):
707 ladd(fn)
707 ladd(fn)
708 elif mtime == self._lastnormaltime:
708 elif mtime == self._lastnormaltime:
709 # fn may have been changed in the same timeslot without
709 # fn may have been changed in the same timeslot without
710 # changing its size. This can happen if we quickly do
710 # changing its size. This can happen if we quickly do
711 # multiple commits in a single transaction.
711 # multiple commits in a single transaction.
712 # Force lookup, so we don't miss such a racy file change.
712 # Force lookup, so we don't miss such a racy file change.
713 ladd(fn)
713 ladd(fn)
714 elif listclean:
714 elif listclean:
715 cadd(fn)
715 cadd(fn)
716 elif state == 'm':
716 elif state == 'm':
717 madd(fn)
717 madd(fn)
718 elif state == 'a':
718 elif state == 'a':
719 aadd(fn)
719 aadd(fn)
720 elif state == 'r':
720 elif state == 'r':
721 radd(fn)
721 radd(fn)
722
722
723 return (lookup, modified, added, removed, deleted, unknown, ignored,
723 return (lookup, modified, added, removed, deleted, unknown, ignored,
724 clean)
724 clean)
@@ -1,245 +1,250 b''
1 # scmutil.py - Mercurial core utility functions
1 # scmutil.py - Mercurial core utility functions
2 #
2 #
3 # Copyright Matt Mackall <mpm@selenic.com>
3 # Copyright Matt Mackall <mpm@selenic.com>
4 #
4 #
5 # This software may be used and distributed according to the terms of the
5 # This software may be used and distributed according to the terms of the
6 # GNU General Public License version 2 or any later version.
6 # GNU General Public License version 2 or any later version.
7
7
8 from i18n import _
8 from i18n import _
9 import util, error
9 import util, error
10 import os, errno, stat
10 import os, errno, stat
11
11
12 def checkfilename(f):
13 '''Check that the filename f is an acceptable filename for a tracked file'''
14 if '\r' in f or '\n' in f:
15 raise util.Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
16
12 def checkportable(ui, f):
17 def checkportable(ui, f):
13 '''Check if filename f is portable and warn or abort depending on config'''
18 '''Check if filename f is portable and warn or abort depending on config'''
14 util.checkfilename(f)
19 checkfilename(f)
15 val = ui.config('ui', 'portablefilenames', 'warn')
20 val = ui.config('ui', 'portablefilenames', 'warn')
16 lval = val.lower()
21 lval = val.lower()
17 abort = os.name == 'nt' or lval == 'abort'
22 abort = os.name == 'nt' or lval == 'abort'
18 bval = util.parsebool(val)
23 bval = util.parsebool(val)
19 if abort or lval == 'warn' or bval:
24 if abort or lval == 'warn' or bval:
20 msg = util.checkwinfilename(f)
25 msg = util.checkwinfilename(f)
21 if msg:
26 if msg:
22 if abort:
27 if abort:
23 raise util.Abort("%s: %r" % (msg, f))
28 raise util.Abort("%s: %r" % (msg, f))
24 ui.warn(_("warning: %s: %r\n") % (msg, f))
29 ui.warn(_("warning: %s: %r\n") % (msg, f))
25 elif bval is None and lval != 'ignore':
30 elif bval is None and lval != 'ignore':
26 raise error.ConfigError(
31 raise error.ConfigError(
27 _("ui.portablefilenames value is invalid ('%s')") % val)
32 _("ui.portablefilenames value is invalid ('%s')") % val)
28
33
29 class path_auditor(object):
34 class path_auditor(object):
30 '''ensure that a filesystem path contains no banned components.
35 '''ensure that a filesystem path contains no banned components.
31 the following properties of a path are checked:
36 the following properties of a path are checked:
32
37
33 - ends with a directory separator
38 - ends with a directory separator
34 - under top-level .hg
39 - under top-level .hg
35 - starts at the root of a windows drive
40 - starts at the root of a windows drive
36 - contains ".."
41 - contains ".."
37 - traverses a symlink (e.g. a/symlink_here/b)
42 - traverses a symlink (e.g. a/symlink_here/b)
38 - inside a nested repository (a callback can be used to approve
43 - inside a nested repository (a callback can be used to approve
39 some nested repositories, e.g., subrepositories)
44 some nested repositories, e.g., subrepositories)
40 '''
45 '''
41
46
42 def __init__(self, root, callback=None):
47 def __init__(self, root, callback=None):
43 self.audited = set()
48 self.audited = set()
44 self.auditeddir = set()
49 self.auditeddir = set()
45 self.root = root
50 self.root = root
46 self.callback = callback
51 self.callback = callback
47
52
48 def __call__(self, path):
53 def __call__(self, path):
49 '''Check the relative path.
54 '''Check the relative path.
50 path may contain a pattern (e.g. foodir/**.txt)'''
55 path may contain a pattern (e.g. foodir/**.txt)'''
51
56
52 if path in self.audited:
57 if path in self.audited:
53 return
58 return
54 # AIX ignores "/" at end of path, others raise EISDIR.
59 # AIX ignores "/" at end of path, others raise EISDIR.
55 if util.endswithsep(path):
60 if util.endswithsep(path):
56 raise util.Abort(_("path ends in directory separator: %s") % path)
61 raise util.Abort(_("path ends in directory separator: %s") % path)
57 normpath = os.path.normcase(path)
62 normpath = os.path.normcase(path)
58 parts = util.splitpath(normpath)
63 parts = util.splitpath(normpath)
59 if (os.path.splitdrive(path)[0]
64 if (os.path.splitdrive(path)[0]
60 or parts[0].lower() in ('.hg', '.hg.', '')
65 or parts[0].lower() in ('.hg', '.hg.', '')
61 or os.pardir in parts):
66 or os.pardir in parts):
62 raise util.Abort(_("path contains illegal component: %s") % path)
67 raise util.Abort(_("path contains illegal component: %s") % path)
63 if '.hg' in path.lower():
68 if '.hg' in path.lower():
64 lparts = [p.lower() for p in parts]
69 lparts = [p.lower() for p in parts]
65 for p in '.hg', '.hg.':
70 for p in '.hg', '.hg.':
66 if p in lparts[1:]:
71 if p in lparts[1:]:
67 pos = lparts.index(p)
72 pos = lparts.index(p)
68 base = os.path.join(*parts[:pos])
73 base = os.path.join(*parts[:pos])
69 raise util.Abort(_('path %r is inside nested repo %r')
74 raise util.Abort(_('path %r is inside nested repo %r')
70 % (path, base))
75 % (path, base))
71
76
72 parts.pop()
77 parts.pop()
73 prefixes = []
78 prefixes = []
74 while parts:
79 while parts:
75 prefix = os.sep.join(parts)
80 prefix = os.sep.join(parts)
76 if prefix in self.auditeddir:
81 if prefix in self.auditeddir:
77 break
82 break
78 curpath = os.path.join(self.root, prefix)
83 curpath = os.path.join(self.root, prefix)
79 try:
84 try:
80 st = os.lstat(curpath)
85 st = os.lstat(curpath)
81 except OSError, err:
86 except OSError, err:
82 # EINVAL can be raised as invalid path syntax under win32.
87 # EINVAL can be raised as invalid path syntax under win32.
83 # They must be ignored for patterns can be checked too.
88 # They must be ignored for patterns can be checked too.
84 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
89 if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL):
85 raise
90 raise
86 else:
91 else:
87 if stat.S_ISLNK(st.st_mode):
92 if stat.S_ISLNK(st.st_mode):
88 raise util.Abort(
93 raise util.Abort(
89 _('path %r traverses symbolic link %r')
94 _('path %r traverses symbolic link %r')
90 % (path, prefix))
95 % (path, prefix))
91 elif (stat.S_ISDIR(st.st_mode) and
96 elif (stat.S_ISDIR(st.st_mode) and
92 os.path.isdir(os.path.join(curpath, '.hg'))):
97 os.path.isdir(os.path.join(curpath, '.hg'))):
93 if not self.callback or not self.callback(curpath):
98 if not self.callback or not self.callback(curpath):
94 raise util.Abort(_('path %r is inside nested repo %r') %
99 raise util.Abort(_('path %r is inside nested repo %r') %
95 (path, prefix))
100 (path, prefix))
96 prefixes.append(prefix)
101 prefixes.append(prefix)
97 parts.pop()
102 parts.pop()
98
103
99 self.audited.add(path)
104 self.audited.add(path)
100 # only add prefixes to the cache after checking everything: we don't
105 # only add prefixes to the cache after checking everything: we don't
101 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
106 # want to add "foo/bar/baz" before checking if there's a "foo/.hg"
102 self.auditeddir.update(prefixes)
107 self.auditeddir.update(prefixes)
103
108
104 class opener(object):
109 class opener(object):
105 '''Open files relative to a base directory
110 '''Open files relative to a base directory
106
111
107 This class is used to hide the details of COW semantics and
112 This class is used to hide the details of COW semantics and
108 remote file access from higher level code.
113 remote file access from higher level code.
109 '''
114 '''
110 def __init__(self, base, audit=True):
115 def __init__(self, base, audit=True):
111 self.base = base
116 self.base = base
112 if audit:
117 if audit:
113 self.auditor = path_auditor(base)
118 self.auditor = path_auditor(base)
114 else:
119 else:
115 self.auditor = util.always
120 self.auditor = util.always
116 self.createmode = None
121 self.createmode = None
117 self._trustnlink = None
122 self._trustnlink = None
118
123
119 @util.propertycache
124 @util.propertycache
120 def _can_symlink(self):
125 def _can_symlink(self):
121 return util.checklink(self.base)
126 return util.checklink(self.base)
122
127
123 def _fixfilemode(self, name):
128 def _fixfilemode(self, name):
124 if self.createmode is None:
129 if self.createmode is None:
125 return
130 return
126 os.chmod(name, self.createmode & 0666)
131 os.chmod(name, self.createmode & 0666)
127
132
128 def __call__(self, path, mode="r", text=False, atomictemp=False):
133 def __call__(self, path, mode="r", text=False, atomictemp=False):
129 r = util.checkosfilename(path)
134 r = util.checkosfilename(path)
130 if r:
135 if r:
131 raise util.Abort("%s: %r" % (r, path))
136 raise util.Abort("%s: %r" % (r, path))
132 self.auditor(path)
137 self.auditor(path)
133 f = os.path.join(self.base, path)
138 f = os.path.join(self.base, path)
134
139
135 if not text and "b" not in mode:
140 if not text and "b" not in mode:
136 mode += "b" # for that other OS
141 mode += "b" # for that other OS
137
142
138 nlink = -1
143 nlink = -1
139 dirname, basename = os.path.split(f)
144 dirname, basename = os.path.split(f)
140 # If basename is empty, then the path is malformed because it points
145 # If basename is empty, then the path is malformed because it points
141 # to a directory. Let the posixfile() call below raise IOError.
146 # to a directory. Let the posixfile() call below raise IOError.
142 if basename and mode not in ('r', 'rb'):
147 if basename and mode not in ('r', 'rb'):
143 if atomictemp:
148 if atomictemp:
144 if not os.path.isdir(dirname):
149 if not os.path.isdir(dirname):
145 util.makedirs(dirname, self.createmode)
150 util.makedirs(dirname, self.createmode)
146 return util.atomictempfile(f, mode, self.createmode)
151 return util.atomictempfile(f, mode, self.createmode)
147 try:
152 try:
148 if 'w' in mode:
153 if 'w' in mode:
149 util.unlink(f)
154 util.unlink(f)
150 nlink = 0
155 nlink = 0
151 else:
156 else:
152 # nlinks() may behave differently for files on Windows
157 # nlinks() may behave differently for files on Windows
153 # shares if the file is open.
158 # shares if the file is open.
154 fd = util.posixfile(f)
159 fd = util.posixfile(f)
155 nlink = util.nlinks(f)
160 nlink = util.nlinks(f)
156 if nlink < 1:
161 if nlink < 1:
157 nlink = 2 # force mktempcopy (issue1922)
162 nlink = 2 # force mktempcopy (issue1922)
158 fd.close()
163 fd.close()
159 except (OSError, IOError), e:
164 except (OSError, IOError), e:
160 if e.errno != errno.ENOENT:
165 if e.errno != errno.ENOENT:
161 raise
166 raise
162 nlink = 0
167 nlink = 0
163 if not os.path.isdir(dirname):
168 if not os.path.isdir(dirname):
164 util.makedirs(dirname, self.createmode)
169 util.makedirs(dirname, self.createmode)
165 if nlink > 0:
170 if nlink > 0:
166 if self._trustnlink is None:
171 if self._trustnlink is None:
167 self._trustnlink = nlink > 1 or util.checknlink(f)
172 self._trustnlink = nlink > 1 or util.checknlink(f)
168 if nlink > 1 or not self._trustnlink:
173 if nlink > 1 or not self._trustnlink:
169 util.rename(util.mktempcopy(f), f)
174 util.rename(util.mktempcopy(f), f)
170 fp = util.posixfile(f, mode)
175 fp = util.posixfile(f, mode)
171 if nlink == 0:
176 if nlink == 0:
172 self._fixfilemode(f)
177 self._fixfilemode(f)
173 return fp
178 return fp
174
179
175 def symlink(self, src, dst):
180 def symlink(self, src, dst):
176 self.auditor(dst)
181 self.auditor(dst)
177 linkname = os.path.join(self.base, dst)
182 linkname = os.path.join(self.base, dst)
178 try:
183 try:
179 os.unlink(linkname)
184 os.unlink(linkname)
180 except OSError:
185 except OSError:
181 pass
186 pass
182
187
183 dirname = os.path.dirname(linkname)
188 dirname = os.path.dirname(linkname)
184 if not os.path.exists(dirname):
189 if not os.path.exists(dirname):
185 util.makedirs(dirname, self.createmode)
190 util.makedirs(dirname, self.createmode)
186
191
187 if self._can_symlink:
192 if self._can_symlink:
188 try:
193 try:
189 os.symlink(src, linkname)
194 os.symlink(src, linkname)
190 except OSError, err:
195 except OSError, err:
191 raise OSError(err.errno, _('could not symlink to %r: %s') %
196 raise OSError(err.errno, _('could not symlink to %r: %s') %
192 (src, err.strerror), linkname)
197 (src, err.strerror), linkname)
193 else:
198 else:
194 f = self(dst, "w")
199 f = self(dst, "w")
195 f.write(src)
200 f.write(src)
196 f.close()
201 f.close()
197 self._fixfilemode(dst)
202 self._fixfilemode(dst)
198
203
199 def canonpath(root, cwd, myname, auditor=None):
204 def canonpath(root, cwd, myname, auditor=None):
200 '''return the canonical path of myname, given cwd and root'''
205 '''return the canonical path of myname, given cwd and root'''
201 if util.endswithsep(root):
206 if util.endswithsep(root):
202 rootsep = root
207 rootsep = root
203 else:
208 else:
204 rootsep = root + os.sep
209 rootsep = root + os.sep
205 name = myname
210 name = myname
206 if not os.path.isabs(name):
211 if not os.path.isabs(name):
207 name = os.path.join(root, cwd, name)
212 name = os.path.join(root, cwd, name)
208 name = os.path.normpath(name)
213 name = os.path.normpath(name)
209 if auditor is None:
214 if auditor is None:
210 auditor = path_auditor(root)
215 auditor = path_auditor(root)
211 if name != rootsep and name.startswith(rootsep):
216 if name != rootsep and name.startswith(rootsep):
212 name = name[len(rootsep):]
217 name = name[len(rootsep):]
213 auditor(name)
218 auditor(name)
214 return util.pconvert(name)
219 return util.pconvert(name)
215 elif name == root:
220 elif name == root:
216 return ''
221 return ''
217 else:
222 else:
218 # Determine whether `name' is in the hierarchy at or beneath `root',
223 # Determine whether `name' is in the hierarchy at or beneath `root',
219 # by iterating name=dirname(name) until that causes no change (can't
224 # by iterating name=dirname(name) until that causes no change (can't
220 # check name == '/', because that doesn't work on windows). For each
225 # check name == '/', because that doesn't work on windows). For each
221 # `name', compare dev/inode numbers. If they match, the list `rel'
226 # `name', compare dev/inode numbers. If they match, the list `rel'
222 # holds the reversed list of components making up the relative file
227 # holds the reversed list of components making up the relative file
223 # name we want.
228 # name we want.
224 root_st = os.stat(root)
229 root_st = os.stat(root)
225 rel = []
230 rel = []
226 while True:
231 while True:
227 try:
232 try:
228 name_st = os.stat(name)
233 name_st = os.stat(name)
229 except OSError:
234 except OSError:
230 break
235 break
231 if util.samestat(name_st, root_st):
236 if util.samestat(name_st, root_st):
232 if not rel:
237 if not rel:
233 # name was actually the same as root (maybe a symlink)
238 # name was actually the same as root (maybe a symlink)
234 return ''
239 return ''
235 rel.reverse()
240 rel.reverse()
236 name = os.path.join(*rel)
241 name = os.path.join(*rel)
237 auditor(name)
242 auditor(name)
238 return util.pconvert(name)
243 return util.pconvert(name)
239 dirname, basename = os.path.split(name)
244 dirname, basename = os.path.split(name)
240 rel.append(basename)
245 rel.append(basename)
241 if dirname == name:
246 if dirname == name:
242 break
247 break
243 name = dirname
248 name = dirname
244
249
245 raise util.Abort('%s not under root' % myname)
250 raise util.Abort('%s not under root' % myname)
@@ -1,1368 +1,1363 b''
1 # util.py - Mercurial utility functions and platform specfic implementations
1 # util.py - Mercurial utility functions and platform specfic implementations
2 #
2 #
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
3 # Copyright 2005 K. Thananchayan <thananck@yahoo.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
4 # Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
5 # Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
6 #
6 #
7 # This software may be used and distributed according to the terms of the
7 # This software may be used and distributed according to the terms of the
8 # GNU General Public License version 2 or any later version.
8 # GNU General Public License version 2 or any later version.
9
9
10 """Mercurial utility functions and platform specfic implementations.
10 """Mercurial utility functions and platform specfic implementations.
11
11
12 This contains helper routines that are independent of the SCM core and
12 This contains helper routines that are independent of the SCM core and
13 hide platform-specific details from the core.
13 hide platform-specific details from the core.
14 """
14 """
15
15
16 from i18n import _
16 from i18n import _
17 import error, osutil, encoding
17 import error, osutil, encoding
18 import errno, re, shutil, sys, tempfile, traceback
18 import errno, re, shutil, sys, tempfile, traceback
19 import os, time, calendar, textwrap, unicodedata, signal
19 import os, time, calendar, textwrap, unicodedata, signal
20 import imp, socket
20 import imp, socket
21
21
22 # Python compatibility
22 # Python compatibility
23
23
24 def sha1(s):
24 def sha1(s):
25 return _fastsha1(s)
25 return _fastsha1(s)
26
26
27 def _fastsha1(s):
27 def _fastsha1(s):
28 # This function will import sha1 from hashlib or sha (whichever is
28 # This function will import sha1 from hashlib or sha (whichever is
29 # available) and overwrite itself with it on the first call.
29 # available) and overwrite itself with it on the first call.
30 # Subsequent calls will go directly to the imported function.
30 # Subsequent calls will go directly to the imported function.
31 if sys.version_info >= (2, 5):
31 if sys.version_info >= (2, 5):
32 from hashlib import sha1 as _sha1
32 from hashlib import sha1 as _sha1
33 else:
33 else:
34 from sha import sha as _sha1
34 from sha import sha as _sha1
35 global _fastsha1, sha1
35 global _fastsha1, sha1
36 _fastsha1 = sha1 = _sha1
36 _fastsha1 = sha1 = _sha1
37 return _sha1(s)
37 return _sha1(s)
38
38
39 import __builtin__
39 import __builtin__
40
40
41 if sys.version_info[0] < 3:
41 if sys.version_info[0] < 3:
42 def fakebuffer(sliceable, offset=0):
42 def fakebuffer(sliceable, offset=0):
43 return sliceable[offset:]
43 return sliceable[offset:]
44 else:
44 else:
45 def fakebuffer(sliceable, offset=0):
45 def fakebuffer(sliceable, offset=0):
46 return memoryview(sliceable)[offset:]
46 return memoryview(sliceable)[offset:]
47 try:
47 try:
48 buffer
48 buffer
49 except NameError:
49 except NameError:
50 __builtin__.buffer = fakebuffer
50 __builtin__.buffer = fakebuffer
51
51
52 import subprocess
52 import subprocess
53 closefds = os.name == 'posix'
53 closefds = os.name == 'posix'
54
54
55 def popen2(cmd, env=None, newlines=False):
55 def popen2(cmd, env=None, newlines=False):
56 # Setting bufsize to -1 lets the system decide the buffer size.
56 # Setting bufsize to -1 lets the system decide the buffer size.
57 # The default for bufsize is 0, meaning unbuffered. This leads to
57 # The default for bufsize is 0, meaning unbuffered. This leads to
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
58 # poor performance on Mac OS X: http://bugs.python.org/issue4194
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
59 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
60 close_fds=closefds,
60 close_fds=closefds,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
61 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
62 universal_newlines=newlines,
62 universal_newlines=newlines,
63 env=env)
63 env=env)
64 return p.stdin, p.stdout
64 return p.stdin, p.stdout
65
65
66 def popen3(cmd, env=None, newlines=False):
66 def popen3(cmd, env=None, newlines=False):
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
67 p = subprocess.Popen(cmd, shell=True, bufsize=-1,
68 close_fds=closefds,
68 close_fds=closefds,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
69 stdin=subprocess.PIPE, stdout=subprocess.PIPE,
70 stderr=subprocess.PIPE,
70 stderr=subprocess.PIPE,
71 universal_newlines=newlines,
71 universal_newlines=newlines,
72 env=env)
72 env=env)
73 return p.stdin, p.stdout, p.stderr
73 return p.stdin, p.stdout, p.stderr
74
74
75 def version():
75 def version():
76 """Return version information if available."""
76 """Return version information if available."""
77 try:
77 try:
78 import __version__
78 import __version__
79 return __version__.version
79 return __version__.version
80 except ImportError:
80 except ImportError:
81 return 'unknown'
81 return 'unknown'
82
82
83 # used by parsedate
83 # used by parsedate
84 defaultdateformats = (
84 defaultdateformats = (
85 '%Y-%m-%d %H:%M:%S',
85 '%Y-%m-%d %H:%M:%S',
86 '%Y-%m-%d %I:%M:%S%p',
86 '%Y-%m-%d %I:%M:%S%p',
87 '%Y-%m-%d %H:%M',
87 '%Y-%m-%d %H:%M',
88 '%Y-%m-%d %I:%M%p',
88 '%Y-%m-%d %I:%M%p',
89 '%Y-%m-%d',
89 '%Y-%m-%d',
90 '%m-%d',
90 '%m-%d',
91 '%m/%d',
91 '%m/%d',
92 '%m/%d/%y',
92 '%m/%d/%y',
93 '%m/%d/%Y',
93 '%m/%d/%Y',
94 '%a %b %d %H:%M:%S %Y',
94 '%a %b %d %H:%M:%S %Y',
95 '%a %b %d %I:%M:%S%p %Y',
95 '%a %b %d %I:%M:%S%p %Y',
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
96 '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822"
97 '%b %d %H:%M:%S %Y',
97 '%b %d %H:%M:%S %Y',
98 '%b %d %I:%M:%S%p %Y',
98 '%b %d %I:%M:%S%p %Y',
99 '%b %d %H:%M:%S',
99 '%b %d %H:%M:%S',
100 '%b %d %I:%M:%S%p',
100 '%b %d %I:%M:%S%p',
101 '%b %d %H:%M',
101 '%b %d %H:%M',
102 '%b %d %I:%M%p',
102 '%b %d %I:%M%p',
103 '%b %d %Y',
103 '%b %d %Y',
104 '%b %d',
104 '%b %d',
105 '%H:%M:%S',
105 '%H:%M:%S',
106 '%I:%M:%S%p',
106 '%I:%M:%S%p',
107 '%H:%M',
107 '%H:%M',
108 '%I:%M%p',
108 '%I:%M%p',
109 )
109 )
110
110
111 extendeddateformats = defaultdateformats + (
111 extendeddateformats = defaultdateformats + (
112 "%Y",
112 "%Y",
113 "%Y-%m",
113 "%Y-%m",
114 "%b",
114 "%b",
115 "%b %Y",
115 "%b %Y",
116 )
116 )
117
117
118 def cachefunc(func):
118 def cachefunc(func):
119 '''cache the result of function calls'''
119 '''cache the result of function calls'''
120 # XXX doesn't handle keywords args
120 # XXX doesn't handle keywords args
121 cache = {}
121 cache = {}
122 if func.func_code.co_argcount == 1:
122 if func.func_code.co_argcount == 1:
123 # we gain a small amount of time because
123 # we gain a small amount of time because
124 # we don't need to pack/unpack the list
124 # we don't need to pack/unpack the list
125 def f(arg):
125 def f(arg):
126 if arg not in cache:
126 if arg not in cache:
127 cache[arg] = func(arg)
127 cache[arg] = func(arg)
128 return cache[arg]
128 return cache[arg]
129 else:
129 else:
130 def f(*args):
130 def f(*args):
131 if args not in cache:
131 if args not in cache:
132 cache[args] = func(*args)
132 cache[args] = func(*args)
133 return cache[args]
133 return cache[args]
134
134
135 return f
135 return f
136
136
137 def lrucachefunc(func):
137 def lrucachefunc(func):
138 '''cache most recent results of function calls'''
138 '''cache most recent results of function calls'''
139 cache = {}
139 cache = {}
140 order = []
140 order = []
141 if func.func_code.co_argcount == 1:
141 if func.func_code.co_argcount == 1:
142 def f(arg):
142 def f(arg):
143 if arg not in cache:
143 if arg not in cache:
144 if len(cache) > 20:
144 if len(cache) > 20:
145 del cache[order.pop(0)]
145 del cache[order.pop(0)]
146 cache[arg] = func(arg)
146 cache[arg] = func(arg)
147 else:
147 else:
148 order.remove(arg)
148 order.remove(arg)
149 order.append(arg)
149 order.append(arg)
150 return cache[arg]
150 return cache[arg]
151 else:
151 else:
152 def f(*args):
152 def f(*args):
153 if args not in cache:
153 if args not in cache:
154 if len(cache) > 20:
154 if len(cache) > 20:
155 del cache[order.pop(0)]
155 del cache[order.pop(0)]
156 cache[args] = func(*args)
156 cache[args] = func(*args)
157 else:
157 else:
158 order.remove(args)
158 order.remove(args)
159 order.append(args)
159 order.append(args)
160 return cache[args]
160 return cache[args]
161
161
162 return f
162 return f
163
163
164 class propertycache(object):
164 class propertycache(object):
165 def __init__(self, func):
165 def __init__(self, func):
166 self.func = func
166 self.func = func
167 self.name = func.__name__
167 self.name = func.__name__
168 def __get__(self, obj, type=None):
168 def __get__(self, obj, type=None):
169 result = self.func(obj)
169 result = self.func(obj)
170 setattr(obj, self.name, result)
170 setattr(obj, self.name, result)
171 return result
171 return result
172
172
173 def pipefilter(s, cmd):
173 def pipefilter(s, cmd):
174 '''filter string S through command CMD, returning its output'''
174 '''filter string S through command CMD, returning its output'''
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
175 p = subprocess.Popen(cmd, shell=True, close_fds=closefds,
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
176 stdin=subprocess.PIPE, stdout=subprocess.PIPE)
177 pout, perr = p.communicate(s)
177 pout, perr = p.communicate(s)
178 return pout
178 return pout
179
179
180 def tempfilter(s, cmd):
180 def tempfilter(s, cmd):
181 '''filter string S through a pair of temporary files with CMD.
181 '''filter string S through a pair of temporary files with CMD.
182 CMD is used as a template to create the real command to be run,
182 CMD is used as a template to create the real command to be run,
183 with the strings INFILE and OUTFILE replaced by the real names of
183 with the strings INFILE and OUTFILE replaced by the real names of
184 the temporary files generated.'''
184 the temporary files generated.'''
185 inname, outname = None, None
185 inname, outname = None, None
186 try:
186 try:
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
187 infd, inname = tempfile.mkstemp(prefix='hg-filter-in-')
188 fp = os.fdopen(infd, 'wb')
188 fp = os.fdopen(infd, 'wb')
189 fp.write(s)
189 fp.write(s)
190 fp.close()
190 fp.close()
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
191 outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-')
192 os.close(outfd)
192 os.close(outfd)
193 cmd = cmd.replace('INFILE', inname)
193 cmd = cmd.replace('INFILE', inname)
194 cmd = cmd.replace('OUTFILE', outname)
194 cmd = cmd.replace('OUTFILE', outname)
195 code = os.system(cmd)
195 code = os.system(cmd)
196 if sys.platform == 'OpenVMS' and code & 1:
196 if sys.platform == 'OpenVMS' and code & 1:
197 code = 0
197 code = 0
198 if code:
198 if code:
199 raise Abort(_("command '%s' failed: %s") %
199 raise Abort(_("command '%s' failed: %s") %
200 (cmd, explain_exit(code)))
200 (cmd, explain_exit(code)))
201 fp = open(outname, 'rb')
201 fp = open(outname, 'rb')
202 r = fp.read()
202 r = fp.read()
203 fp.close()
203 fp.close()
204 return r
204 return r
205 finally:
205 finally:
206 try:
206 try:
207 if inname:
207 if inname:
208 os.unlink(inname)
208 os.unlink(inname)
209 except:
209 except:
210 pass
210 pass
211 try:
211 try:
212 if outname:
212 if outname:
213 os.unlink(outname)
213 os.unlink(outname)
214 except:
214 except:
215 pass
215 pass
216
216
217 filtertable = {
217 filtertable = {
218 'tempfile:': tempfilter,
218 'tempfile:': tempfilter,
219 'pipe:': pipefilter,
219 'pipe:': pipefilter,
220 }
220 }
221
221
222 def filter(s, cmd):
222 def filter(s, cmd):
223 "filter a string through a command that transforms its input to its output"
223 "filter a string through a command that transforms its input to its output"
224 for name, fn in filtertable.iteritems():
224 for name, fn in filtertable.iteritems():
225 if cmd.startswith(name):
225 if cmd.startswith(name):
226 return fn(s, cmd[len(name):].lstrip())
226 return fn(s, cmd[len(name):].lstrip())
227 return pipefilter(s, cmd)
227 return pipefilter(s, cmd)
228
228
229 def binary(s):
229 def binary(s):
230 """return true if a string is binary data"""
230 """return true if a string is binary data"""
231 return bool(s and '\0' in s)
231 return bool(s and '\0' in s)
232
232
233 def increasingchunks(source, min=1024, max=65536):
233 def increasingchunks(source, min=1024, max=65536):
234 '''return no less than min bytes per chunk while data remains,
234 '''return no less than min bytes per chunk while data remains,
235 doubling min after each chunk until it reaches max'''
235 doubling min after each chunk until it reaches max'''
236 def log2(x):
236 def log2(x):
237 if not x:
237 if not x:
238 return 0
238 return 0
239 i = 0
239 i = 0
240 while x:
240 while x:
241 x >>= 1
241 x >>= 1
242 i += 1
242 i += 1
243 return i - 1
243 return i - 1
244
244
245 buf = []
245 buf = []
246 blen = 0
246 blen = 0
247 for chunk in source:
247 for chunk in source:
248 buf.append(chunk)
248 buf.append(chunk)
249 blen += len(chunk)
249 blen += len(chunk)
250 if blen >= min:
250 if blen >= min:
251 if min < max:
251 if min < max:
252 min = min << 1
252 min = min << 1
253 nmin = 1 << log2(blen)
253 nmin = 1 << log2(blen)
254 if nmin > min:
254 if nmin > min:
255 min = nmin
255 min = nmin
256 if min > max:
256 if min > max:
257 min = max
257 min = max
258 yield ''.join(buf)
258 yield ''.join(buf)
259 blen = 0
259 blen = 0
260 buf = []
260 buf = []
261 if buf:
261 if buf:
262 yield ''.join(buf)
262 yield ''.join(buf)
263
263
264 Abort = error.Abort
264 Abort = error.Abort
265
265
266 def always(fn):
266 def always(fn):
267 return True
267 return True
268
268
269 def never(fn):
269 def never(fn):
270 return False
270 return False
271
271
272 def pathto(root, n1, n2):
272 def pathto(root, n1, n2):
273 '''return the relative path from one place to another.
273 '''return the relative path from one place to another.
274 root should use os.sep to separate directories
274 root should use os.sep to separate directories
275 n1 should use os.sep to separate directories
275 n1 should use os.sep to separate directories
276 n2 should use "/" to separate directories
276 n2 should use "/" to separate directories
277 returns an os.sep-separated path.
277 returns an os.sep-separated path.
278
278
279 If n1 is a relative path, it's assumed it's
279 If n1 is a relative path, it's assumed it's
280 relative to root.
280 relative to root.
281 n2 should always be relative to root.
281 n2 should always be relative to root.
282 '''
282 '''
283 if not n1:
283 if not n1:
284 return localpath(n2)
284 return localpath(n2)
285 if os.path.isabs(n1):
285 if os.path.isabs(n1):
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
286 if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]:
287 return os.path.join(root, localpath(n2))
287 return os.path.join(root, localpath(n2))
288 n2 = '/'.join((pconvert(root), n2))
288 n2 = '/'.join((pconvert(root), n2))
289 a, b = splitpath(n1), n2.split('/')
289 a, b = splitpath(n1), n2.split('/')
290 a.reverse()
290 a.reverse()
291 b.reverse()
291 b.reverse()
292 while a and b and a[-1] == b[-1]:
292 while a and b and a[-1] == b[-1]:
293 a.pop()
293 a.pop()
294 b.pop()
294 b.pop()
295 b.reverse()
295 b.reverse()
296 return os.sep.join((['..'] * len(a)) + b) or '.'
296 return os.sep.join((['..'] * len(a)) + b) or '.'
297
297
298 _hgexecutable = None
298 _hgexecutable = None
299
299
300 def main_is_frozen():
300 def main_is_frozen():
301 """return True if we are a frozen executable.
301 """return True if we are a frozen executable.
302
302
303 The code supports py2exe (most common, Windows only) and tools/freeze
303 The code supports py2exe (most common, Windows only) and tools/freeze
304 (portable, not much used).
304 (portable, not much used).
305 """
305 """
306 return (hasattr(sys, "frozen") or # new py2exe
306 return (hasattr(sys, "frozen") or # new py2exe
307 hasattr(sys, "importers") or # old py2exe
307 hasattr(sys, "importers") or # old py2exe
308 imp.is_frozen("__main__")) # tools/freeze
308 imp.is_frozen("__main__")) # tools/freeze
309
309
310 def hgexecutable():
310 def hgexecutable():
311 """return location of the 'hg' executable.
311 """return location of the 'hg' executable.
312
312
313 Defaults to $HG or 'hg' in the search path.
313 Defaults to $HG or 'hg' in the search path.
314 """
314 """
315 if _hgexecutable is None:
315 if _hgexecutable is None:
316 hg = os.environ.get('HG')
316 hg = os.environ.get('HG')
317 if hg:
317 if hg:
318 set_hgexecutable(hg)
318 set_hgexecutable(hg)
319 elif main_is_frozen():
319 elif main_is_frozen():
320 set_hgexecutable(sys.executable)
320 set_hgexecutable(sys.executable)
321 else:
321 else:
322 exe = find_exe('hg') or os.path.basename(sys.argv[0])
322 exe = find_exe('hg') or os.path.basename(sys.argv[0])
323 set_hgexecutable(exe)
323 set_hgexecutable(exe)
324 return _hgexecutable
324 return _hgexecutable
325
325
326 def set_hgexecutable(path):
326 def set_hgexecutable(path):
327 """set location of the 'hg' executable"""
327 """set location of the 'hg' executable"""
328 global _hgexecutable
328 global _hgexecutable
329 _hgexecutable = path
329 _hgexecutable = path
330
330
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
331 def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
332 '''enhanced shell command execution.
332 '''enhanced shell command execution.
333 run with environment maybe modified, maybe in different dir.
333 run with environment maybe modified, maybe in different dir.
334
334
335 if command fails and onerr is None, return status. if ui object,
335 if command fails and onerr is None, return status. if ui object,
336 print error message and return status, else raise onerr object as
336 print error message and return status, else raise onerr object as
337 exception.
337 exception.
338
338
339 if out is specified, it is assumed to be a file-like object that has a
339 if out is specified, it is assumed to be a file-like object that has a
340 write() method. stdout and stderr will be redirected to out.'''
340 write() method. stdout and stderr will be redirected to out.'''
341 try:
341 try:
342 sys.stdout.flush()
342 sys.stdout.flush()
343 except Exception:
343 except Exception:
344 pass
344 pass
345 def py2shell(val):
345 def py2shell(val):
346 'convert python object into string that is useful to shell'
346 'convert python object into string that is useful to shell'
347 if val is None or val is False:
347 if val is None or val is False:
348 return '0'
348 return '0'
349 if val is True:
349 if val is True:
350 return '1'
350 return '1'
351 return str(val)
351 return str(val)
352 origcmd = cmd
352 origcmd = cmd
353 cmd = quotecommand(cmd)
353 cmd = quotecommand(cmd)
354 env = dict(os.environ)
354 env = dict(os.environ)
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
355 env.update((k, py2shell(v)) for k, v in environ.iteritems())
356 env['HG'] = hgexecutable()
356 env['HG'] = hgexecutable()
357 if out is None:
357 if out is None:
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
358 rc = subprocess.call(cmd, shell=True, close_fds=closefds,
359 env=env, cwd=cwd)
359 env=env, cwd=cwd)
360 else:
360 else:
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
361 proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
362 env=env, cwd=cwd, stdout=subprocess.PIPE,
363 stderr=subprocess.STDOUT)
363 stderr=subprocess.STDOUT)
364 for line in proc.stdout:
364 for line in proc.stdout:
365 out.write(line)
365 out.write(line)
366 proc.wait()
366 proc.wait()
367 rc = proc.returncode
367 rc = proc.returncode
368 if sys.platform == 'OpenVMS' and rc & 1:
368 if sys.platform == 'OpenVMS' and rc & 1:
369 rc = 0
369 rc = 0
370 if rc and onerr:
370 if rc and onerr:
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
371 errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
372 explain_exit(rc)[0])
372 explain_exit(rc)[0])
373 if errprefix:
373 if errprefix:
374 errmsg = '%s: %s' % (errprefix, errmsg)
374 errmsg = '%s: %s' % (errprefix, errmsg)
375 try:
375 try:
376 onerr.warn(errmsg + '\n')
376 onerr.warn(errmsg + '\n')
377 except AttributeError:
377 except AttributeError:
378 raise onerr(errmsg)
378 raise onerr(errmsg)
379 return rc
379 return rc
380
380
381 def checksignature(func):
381 def checksignature(func):
382 '''wrap a function with code to check for calling errors'''
382 '''wrap a function with code to check for calling errors'''
383 def check(*args, **kwargs):
383 def check(*args, **kwargs):
384 try:
384 try:
385 return func(*args, **kwargs)
385 return func(*args, **kwargs)
386 except TypeError:
386 except TypeError:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
387 if len(traceback.extract_tb(sys.exc_info()[2])) == 1:
388 raise error.SignatureError
388 raise error.SignatureError
389 raise
389 raise
390
390
391 return check
391 return check
392
392
393 def makedir(path, notindexed):
393 def makedir(path, notindexed):
394 os.mkdir(path)
394 os.mkdir(path)
395
395
396 def unlinkpath(f):
396 def unlinkpath(f):
397 """unlink and remove the directory if it is empty"""
397 """unlink and remove the directory if it is empty"""
398 os.unlink(f)
398 os.unlink(f)
399 # try removing directories that might now be empty
399 # try removing directories that might now be empty
400 try:
400 try:
401 os.removedirs(os.path.dirname(f))
401 os.removedirs(os.path.dirname(f))
402 except OSError:
402 except OSError:
403 pass
403 pass
404
404
405 def copyfile(src, dest):
405 def copyfile(src, dest):
406 "copy a file, preserving mode and atime/mtime"
406 "copy a file, preserving mode and atime/mtime"
407 if os.path.islink(src):
407 if os.path.islink(src):
408 try:
408 try:
409 os.unlink(dest)
409 os.unlink(dest)
410 except:
410 except:
411 pass
411 pass
412 os.symlink(os.readlink(src), dest)
412 os.symlink(os.readlink(src), dest)
413 else:
413 else:
414 try:
414 try:
415 shutil.copyfile(src, dest)
415 shutil.copyfile(src, dest)
416 shutil.copymode(src, dest)
416 shutil.copymode(src, dest)
417 except shutil.Error, inst:
417 except shutil.Error, inst:
418 raise Abort(str(inst))
418 raise Abort(str(inst))
419
419
420 def copyfiles(src, dst, hardlink=None):
420 def copyfiles(src, dst, hardlink=None):
421 """Copy a directory tree using hardlinks if possible"""
421 """Copy a directory tree using hardlinks if possible"""
422
422
423 if hardlink is None:
423 if hardlink is None:
424 hardlink = (os.stat(src).st_dev ==
424 hardlink = (os.stat(src).st_dev ==
425 os.stat(os.path.dirname(dst)).st_dev)
425 os.stat(os.path.dirname(dst)).st_dev)
426
426
427 num = 0
427 num = 0
428 if os.path.isdir(src):
428 if os.path.isdir(src):
429 os.mkdir(dst)
429 os.mkdir(dst)
430 for name, kind in osutil.listdir(src):
430 for name, kind in osutil.listdir(src):
431 srcname = os.path.join(src, name)
431 srcname = os.path.join(src, name)
432 dstname = os.path.join(dst, name)
432 dstname = os.path.join(dst, name)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
433 hardlink, n = copyfiles(srcname, dstname, hardlink)
434 num += n
434 num += n
435 else:
435 else:
436 if hardlink:
436 if hardlink:
437 try:
437 try:
438 os_link(src, dst)
438 os_link(src, dst)
439 except (IOError, OSError):
439 except (IOError, OSError):
440 hardlink = False
440 hardlink = False
441 shutil.copy(src, dst)
441 shutil.copy(src, dst)
442 else:
442 else:
443 shutil.copy(src, dst)
443 shutil.copy(src, dst)
444 num += 1
444 num += 1
445
445
446 return hardlink, num
446 return hardlink, num
447
447
448 def checkfilename(f):
449 '''Check that the filename f is an acceptable filename for a tracked file'''
450 if '\r' in f or '\n' in f:
451 raise Abort(_("'\\n' and '\\r' disallowed in filenames: %r") % f)
452
453 _windows_reserved_filenames = '''con prn aux nul
448 _windows_reserved_filenames = '''con prn aux nul
454 com1 com2 com3 com4 com5 com6 com7 com8 com9
449 com1 com2 com3 com4 com5 com6 com7 com8 com9
455 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
450 lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split()
456 _windows_reserved_chars = ':*?"<>|'
451 _windows_reserved_chars = ':*?"<>|'
457 def checkwinfilename(path):
452 def checkwinfilename(path):
458 '''Check that the base-relative path is a valid filename on Windows.
453 '''Check that the base-relative path is a valid filename on Windows.
459 Returns None if the path is ok, or a UI string describing the problem.
454 Returns None if the path is ok, or a UI string describing the problem.
460
455
461 >>> checkwinfilename("just/a/normal/path")
456 >>> checkwinfilename("just/a/normal/path")
462 >>> checkwinfilename("foo/bar/con.xml")
457 >>> checkwinfilename("foo/bar/con.xml")
463 "filename contains 'con', which is reserved on Windows"
458 "filename contains 'con', which is reserved on Windows"
464 >>> checkwinfilename("foo/con.xml/bar")
459 >>> checkwinfilename("foo/con.xml/bar")
465 "filename contains 'con', which is reserved on Windows"
460 "filename contains 'con', which is reserved on Windows"
466 >>> checkwinfilename("foo/bar/xml.con")
461 >>> checkwinfilename("foo/bar/xml.con")
467 >>> checkwinfilename("foo/bar/AUX/bla.txt")
462 >>> checkwinfilename("foo/bar/AUX/bla.txt")
468 "filename contains 'AUX', which is reserved on Windows"
463 "filename contains 'AUX', which is reserved on Windows"
469 >>> checkwinfilename("foo/bar/bla:.txt")
464 >>> checkwinfilename("foo/bar/bla:.txt")
470 "filename contains ':', which is reserved on Windows"
465 "filename contains ':', which is reserved on Windows"
471 >>> checkwinfilename("foo/bar/b\07la.txt")
466 >>> checkwinfilename("foo/bar/b\07la.txt")
472 "filename contains '\\\\x07', which is invalid on Windows"
467 "filename contains '\\\\x07', which is invalid on Windows"
473 >>> checkwinfilename("foo/bar/bla ")
468 >>> checkwinfilename("foo/bar/bla ")
474 "filename ends with ' ', which is not allowed on Windows"
469 "filename ends with ' ', which is not allowed on Windows"
475 '''
470 '''
476 for n in path.replace('\\', '/').split('/'):
471 for n in path.replace('\\', '/').split('/'):
477 if not n:
472 if not n:
478 continue
473 continue
479 for c in n:
474 for c in n:
480 if c in _windows_reserved_chars:
475 if c in _windows_reserved_chars:
481 return _("filename contains '%s', which is reserved "
476 return _("filename contains '%s', which is reserved "
482 "on Windows") % c
477 "on Windows") % c
483 if ord(c) <= 31:
478 if ord(c) <= 31:
484 return _("filename contains %r, which is invalid "
479 return _("filename contains %r, which is invalid "
485 "on Windows") % c
480 "on Windows") % c
486 base = n.split('.')[0]
481 base = n.split('.')[0]
487 if base and base.lower() in _windows_reserved_filenames:
482 if base and base.lower() in _windows_reserved_filenames:
488 return _("filename contains '%s', which is reserved "
483 return _("filename contains '%s', which is reserved "
489 "on Windows") % base
484 "on Windows") % base
490 t = n[-1]
485 t = n[-1]
491 if t in '. ':
486 if t in '. ':
492 return _("filename ends with '%s', which is not allowed "
487 return _("filename ends with '%s', which is not allowed "
493 "on Windows") % t
488 "on Windows") % t
494
489
495 def lookup_reg(key, name=None, scope=None):
490 def lookup_reg(key, name=None, scope=None):
496 return None
491 return None
497
492
498 def hidewindow():
493 def hidewindow():
499 """Hide current shell window.
494 """Hide current shell window.
500
495
501 Used to hide the window opened when starting asynchronous
496 Used to hide the window opened when starting asynchronous
502 child process under Windows, unneeded on other systems.
497 child process under Windows, unneeded on other systems.
503 """
498 """
504 pass
499 pass
505
500
506 if os.name == 'nt':
501 if os.name == 'nt':
507 checkosfilename = checkwinfilename
502 checkosfilename = checkwinfilename
508 from windows import *
503 from windows import *
509 else:
504 else:
510 from posix import *
505 from posix import *
511
506
512 def makelock(info, pathname):
507 def makelock(info, pathname):
513 try:
508 try:
514 return os.symlink(info, pathname)
509 return os.symlink(info, pathname)
515 except OSError, why:
510 except OSError, why:
516 if why.errno == errno.EEXIST:
511 if why.errno == errno.EEXIST:
517 raise
512 raise
518 except AttributeError: # no symlink in os
513 except AttributeError: # no symlink in os
519 pass
514 pass
520
515
521 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
516 ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL)
522 os.write(ld, info)
517 os.write(ld, info)
523 os.close(ld)
518 os.close(ld)
524
519
525 def readlock(pathname):
520 def readlock(pathname):
526 try:
521 try:
527 return os.readlink(pathname)
522 return os.readlink(pathname)
528 except OSError, why:
523 except OSError, why:
529 if why.errno not in (errno.EINVAL, errno.ENOSYS):
524 if why.errno not in (errno.EINVAL, errno.ENOSYS):
530 raise
525 raise
531 except AttributeError: # no symlink in os
526 except AttributeError: # no symlink in os
532 pass
527 pass
533 fp = posixfile(pathname)
528 fp = posixfile(pathname)
534 r = fp.read()
529 r = fp.read()
535 fp.close()
530 fp.close()
536 return r
531 return r
537
532
538 def fstat(fp):
533 def fstat(fp):
539 '''stat file object that may not have fileno method.'''
534 '''stat file object that may not have fileno method.'''
540 try:
535 try:
541 return os.fstat(fp.fileno())
536 return os.fstat(fp.fileno())
542 except AttributeError:
537 except AttributeError:
543 return os.stat(fp.name)
538 return os.stat(fp.name)
544
539
545 # File system features
540 # File system features
546
541
547 def checkcase(path):
542 def checkcase(path):
548 """
543 """
549 Check whether the given path is on a case-sensitive filesystem
544 Check whether the given path is on a case-sensitive filesystem
550
545
551 Requires a path (like /foo/.hg) ending with a foldable final
546 Requires a path (like /foo/.hg) ending with a foldable final
552 directory component.
547 directory component.
553 """
548 """
554 s1 = os.stat(path)
549 s1 = os.stat(path)
555 d, b = os.path.split(path)
550 d, b = os.path.split(path)
556 p2 = os.path.join(d, b.upper())
551 p2 = os.path.join(d, b.upper())
557 if path == p2:
552 if path == p2:
558 p2 = os.path.join(d, b.lower())
553 p2 = os.path.join(d, b.lower())
559 try:
554 try:
560 s2 = os.stat(p2)
555 s2 = os.stat(p2)
561 if s2 == s1:
556 if s2 == s1:
562 return False
557 return False
563 return True
558 return True
564 except:
559 except:
565 return True
560 return True
566
561
567 _fspathcache = {}
562 _fspathcache = {}
568 def fspath(name, root):
563 def fspath(name, root):
569 '''Get name in the case stored in the filesystem
564 '''Get name in the case stored in the filesystem
570
565
571 The name is either relative to root, or it is an absolute path starting
566 The name is either relative to root, or it is an absolute path starting
572 with root. Note that this function is unnecessary, and should not be
567 with root. Note that this function is unnecessary, and should not be
573 called, for case-sensitive filesystems (simply because it's expensive).
568 called, for case-sensitive filesystems (simply because it's expensive).
574 '''
569 '''
575 # If name is absolute, make it relative
570 # If name is absolute, make it relative
576 if name.lower().startswith(root.lower()):
571 if name.lower().startswith(root.lower()):
577 l = len(root)
572 l = len(root)
578 if name[l] == os.sep or name[l] == os.altsep:
573 if name[l] == os.sep or name[l] == os.altsep:
579 l = l + 1
574 l = l + 1
580 name = name[l:]
575 name = name[l:]
581
576
582 if not os.path.lexists(os.path.join(root, name)):
577 if not os.path.lexists(os.path.join(root, name)):
583 return None
578 return None
584
579
585 seps = os.sep
580 seps = os.sep
586 if os.altsep:
581 if os.altsep:
587 seps = seps + os.altsep
582 seps = seps + os.altsep
588 # Protect backslashes. This gets silly very quickly.
583 # Protect backslashes. This gets silly very quickly.
589 seps.replace('\\','\\\\')
584 seps.replace('\\','\\\\')
590 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
585 pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
591 dir = os.path.normcase(os.path.normpath(root))
586 dir = os.path.normcase(os.path.normpath(root))
592 result = []
587 result = []
593 for part, sep in pattern.findall(name):
588 for part, sep in pattern.findall(name):
594 if sep:
589 if sep:
595 result.append(sep)
590 result.append(sep)
596 continue
591 continue
597
592
598 if dir not in _fspathcache:
593 if dir not in _fspathcache:
599 _fspathcache[dir] = os.listdir(dir)
594 _fspathcache[dir] = os.listdir(dir)
600 contents = _fspathcache[dir]
595 contents = _fspathcache[dir]
601
596
602 lpart = part.lower()
597 lpart = part.lower()
603 lenp = len(part)
598 lenp = len(part)
604 for n in contents:
599 for n in contents:
605 if lenp == len(n) and n.lower() == lpart:
600 if lenp == len(n) and n.lower() == lpart:
606 result.append(n)
601 result.append(n)
607 break
602 break
608 else:
603 else:
609 # Cannot happen, as the file exists!
604 # Cannot happen, as the file exists!
610 result.append(part)
605 result.append(part)
611 dir = os.path.join(dir, lpart)
606 dir = os.path.join(dir, lpart)
612
607
613 return ''.join(result)
608 return ''.join(result)
614
609
615 def checknlink(testfile):
610 def checknlink(testfile):
616 '''check whether hardlink count reporting works properly'''
611 '''check whether hardlink count reporting works properly'''
617
612
618 # testfile may be open, so we need a separate file for checking to
613 # testfile may be open, so we need a separate file for checking to
619 # work around issue2543 (or testfile may get lost on Samba shares)
614 # work around issue2543 (or testfile may get lost on Samba shares)
620 f1 = testfile + ".hgtmp1"
615 f1 = testfile + ".hgtmp1"
621 if os.path.lexists(f1):
616 if os.path.lexists(f1):
622 return False
617 return False
623 try:
618 try:
624 posixfile(f1, 'w').close()
619 posixfile(f1, 'w').close()
625 except IOError:
620 except IOError:
626 return False
621 return False
627
622
628 f2 = testfile + ".hgtmp2"
623 f2 = testfile + ".hgtmp2"
629 fd = None
624 fd = None
630 try:
625 try:
631 try:
626 try:
632 os_link(f1, f2)
627 os_link(f1, f2)
633 except OSError:
628 except OSError:
634 return False
629 return False
635
630
636 # nlinks() may behave differently for files on Windows shares if
631 # nlinks() may behave differently for files on Windows shares if
637 # the file is open.
632 # the file is open.
638 fd = posixfile(f2)
633 fd = posixfile(f2)
639 return nlinks(f2) > 1
634 return nlinks(f2) > 1
640 finally:
635 finally:
641 if fd is not None:
636 if fd is not None:
642 fd.close()
637 fd.close()
643 for f in (f1, f2):
638 for f in (f1, f2):
644 try:
639 try:
645 os.unlink(f)
640 os.unlink(f)
646 except OSError:
641 except OSError:
647 pass
642 pass
648
643
649 return False
644 return False
650
645
651 def endswithsep(path):
646 def endswithsep(path):
652 '''Check path ends with os.sep or os.altsep.'''
647 '''Check path ends with os.sep or os.altsep.'''
653 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
648 return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep)
654
649
655 def splitpath(path):
650 def splitpath(path):
656 '''Split path by os.sep.
651 '''Split path by os.sep.
657 Note that this function does not use os.altsep because this is
652 Note that this function does not use os.altsep because this is
658 an alternative of simple "xxx.split(os.sep)".
653 an alternative of simple "xxx.split(os.sep)".
659 It is recommended to use os.path.normpath() before using this
654 It is recommended to use os.path.normpath() before using this
660 function if need.'''
655 function if need.'''
661 return path.split(os.sep)
656 return path.split(os.sep)
662
657
663 def gui():
658 def gui():
664 '''Are we running in a GUI?'''
659 '''Are we running in a GUI?'''
665 if sys.platform == 'darwin':
660 if sys.platform == 'darwin':
666 if 'SSH_CONNECTION' in os.environ:
661 if 'SSH_CONNECTION' in os.environ:
667 # handle SSH access to a box where the user is logged in
662 # handle SSH access to a box where the user is logged in
668 return False
663 return False
669 elif getattr(osutil, 'isgui', None):
664 elif getattr(osutil, 'isgui', None):
670 # check if a CoreGraphics session is available
665 # check if a CoreGraphics session is available
671 return osutil.isgui()
666 return osutil.isgui()
672 else:
667 else:
673 # pure build; use a safe default
668 # pure build; use a safe default
674 return True
669 return True
675 else:
670 else:
676 return os.name == "nt" or os.environ.get("DISPLAY")
671 return os.name == "nt" or os.environ.get("DISPLAY")
677
672
678 def mktempcopy(name, emptyok=False, createmode=None):
673 def mktempcopy(name, emptyok=False, createmode=None):
679 """Create a temporary file with the same contents from name
674 """Create a temporary file with the same contents from name
680
675
681 The permission bits are copied from the original file.
676 The permission bits are copied from the original file.
682
677
683 If the temporary file is going to be truncated immediately, you
678 If the temporary file is going to be truncated immediately, you
684 can use emptyok=True as an optimization.
679 can use emptyok=True as an optimization.
685
680
686 Returns the name of the temporary file.
681 Returns the name of the temporary file.
687 """
682 """
688 d, fn = os.path.split(name)
683 d, fn = os.path.split(name)
689 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
684 fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d)
690 os.close(fd)
685 os.close(fd)
691 # Temporary files are created with mode 0600, which is usually not
686 # Temporary files are created with mode 0600, which is usually not
692 # what we want. If the original file already exists, just copy
687 # what we want. If the original file already exists, just copy
693 # its mode. Otherwise, manually obey umask.
688 # its mode. Otherwise, manually obey umask.
694 try:
689 try:
695 st_mode = os.lstat(name).st_mode & 0777
690 st_mode = os.lstat(name).st_mode & 0777
696 except OSError, inst:
691 except OSError, inst:
697 if inst.errno != errno.ENOENT:
692 if inst.errno != errno.ENOENT:
698 raise
693 raise
699 st_mode = createmode
694 st_mode = createmode
700 if st_mode is None:
695 if st_mode is None:
701 st_mode = ~umask
696 st_mode = ~umask
702 st_mode &= 0666
697 st_mode &= 0666
703 os.chmod(temp, st_mode)
698 os.chmod(temp, st_mode)
704 if emptyok:
699 if emptyok:
705 return temp
700 return temp
706 try:
701 try:
707 try:
702 try:
708 ifp = posixfile(name, "rb")
703 ifp = posixfile(name, "rb")
709 except IOError, inst:
704 except IOError, inst:
710 if inst.errno == errno.ENOENT:
705 if inst.errno == errno.ENOENT:
711 return temp
706 return temp
712 if not getattr(inst, 'filename', None):
707 if not getattr(inst, 'filename', None):
713 inst.filename = name
708 inst.filename = name
714 raise
709 raise
715 ofp = posixfile(temp, "wb")
710 ofp = posixfile(temp, "wb")
716 for chunk in filechunkiter(ifp):
711 for chunk in filechunkiter(ifp):
717 ofp.write(chunk)
712 ofp.write(chunk)
718 ifp.close()
713 ifp.close()
719 ofp.close()
714 ofp.close()
720 except:
715 except:
721 try: os.unlink(temp)
716 try: os.unlink(temp)
722 except: pass
717 except: pass
723 raise
718 raise
724 return temp
719 return temp
725
720
726 class atomictempfile(object):
721 class atomictempfile(object):
727 """file-like object that atomically updates a file
722 """file-like object that atomically updates a file
728
723
729 All writes will be redirected to a temporary copy of the original
724 All writes will be redirected to a temporary copy of the original
730 file. When rename is called, the copy is renamed to the original
725 file. When rename is called, the copy is renamed to the original
731 name, making the changes visible.
726 name, making the changes visible.
732 """
727 """
733 def __init__(self, name, mode='w+b', createmode=None):
728 def __init__(self, name, mode='w+b', createmode=None):
734 self.__name = name
729 self.__name = name
735 self._fp = None
730 self._fp = None
736 self.temp = mktempcopy(name, emptyok=('w' in mode),
731 self.temp = mktempcopy(name, emptyok=('w' in mode),
737 createmode=createmode)
732 createmode=createmode)
738 self._fp = posixfile(self.temp, mode)
733 self._fp = posixfile(self.temp, mode)
739
734
740 def __getattr__(self, name):
735 def __getattr__(self, name):
741 return getattr(self._fp, name)
736 return getattr(self._fp, name)
742
737
743 def rename(self):
738 def rename(self):
744 if not self._fp.closed:
739 if not self._fp.closed:
745 self._fp.close()
740 self._fp.close()
746 rename(self.temp, localpath(self.__name))
741 rename(self.temp, localpath(self.__name))
747
742
748 def close(self):
743 def close(self):
749 if not self._fp:
744 if not self._fp:
750 return
745 return
751 if not self._fp.closed:
746 if not self._fp.closed:
752 try:
747 try:
753 os.unlink(self.temp)
748 os.unlink(self.temp)
754 except: pass
749 except: pass
755 self._fp.close()
750 self._fp.close()
756
751
757 def __del__(self):
752 def __del__(self):
758 self.close()
753 self.close()
759
754
760 def makedirs(name, mode=None):
755 def makedirs(name, mode=None):
761 """recursive directory creation with parent mode inheritance"""
756 """recursive directory creation with parent mode inheritance"""
762 parent = os.path.abspath(os.path.dirname(name))
757 parent = os.path.abspath(os.path.dirname(name))
763 try:
758 try:
764 os.mkdir(name)
759 os.mkdir(name)
765 if mode is not None:
760 if mode is not None:
766 os.chmod(name, mode)
761 os.chmod(name, mode)
767 return
762 return
768 except OSError, err:
763 except OSError, err:
769 if err.errno == errno.EEXIST:
764 if err.errno == errno.EEXIST:
770 return
765 return
771 if not name or parent == name or err.errno != errno.ENOENT:
766 if not name or parent == name or err.errno != errno.ENOENT:
772 raise
767 raise
773 makedirs(parent, mode)
768 makedirs(parent, mode)
774 makedirs(name, mode)
769 makedirs(name, mode)
775
770
776 class chunkbuffer(object):
771 class chunkbuffer(object):
777 """Allow arbitrary sized chunks of data to be efficiently read from an
772 """Allow arbitrary sized chunks of data to be efficiently read from an
778 iterator over chunks of arbitrary size."""
773 iterator over chunks of arbitrary size."""
779
774
780 def __init__(self, in_iter):
775 def __init__(self, in_iter):
781 """in_iter is the iterator that's iterating over the input chunks.
776 """in_iter is the iterator that's iterating over the input chunks.
782 targetsize is how big a buffer to try to maintain."""
777 targetsize is how big a buffer to try to maintain."""
783 def splitbig(chunks):
778 def splitbig(chunks):
784 for chunk in chunks:
779 for chunk in chunks:
785 if len(chunk) > 2**20:
780 if len(chunk) > 2**20:
786 pos = 0
781 pos = 0
787 while pos < len(chunk):
782 while pos < len(chunk):
788 end = pos + 2 ** 18
783 end = pos + 2 ** 18
789 yield chunk[pos:end]
784 yield chunk[pos:end]
790 pos = end
785 pos = end
791 else:
786 else:
792 yield chunk
787 yield chunk
793 self.iter = splitbig(in_iter)
788 self.iter = splitbig(in_iter)
794 self._queue = []
789 self._queue = []
795
790
796 def read(self, l):
791 def read(self, l):
797 """Read L bytes of data from the iterator of chunks of data.
792 """Read L bytes of data from the iterator of chunks of data.
798 Returns less than L bytes if the iterator runs dry."""
793 Returns less than L bytes if the iterator runs dry."""
799 left = l
794 left = l
800 buf = ''
795 buf = ''
801 queue = self._queue
796 queue = self._queue
802 while left > 0:
797 while left > 0:
803 # refill the queue
798 # refill the queue
804 if not queue:
799 if not queue:
805 target = 2**18
800 target = 2**18
806 for chunk in self.iter:
801 for chunk in self.iter:
807 queue.append(chunk)
802 queue.append(chunk)
808 target -= len(chunk)
803 target -= len(chunk)
809 if target <= 0:
804 if target <= 0:
810 break
805 break
811 if not queue:
806 if not queue:
812 break
807 break
813
808
814 chunk = queue.pop(0)
809 chunk = queue.pop(0)
815 left -= len(chunk)
810 left -= len(chunk)
816 if left < 0:
811 if left < 0:
817 queue.insert(0, chunk[left:])
812 queue.insert(0, chunk[left:])
818 buf += chunk[:left]
813 buf += chunk[:left]
819 else:
814 else:
820 buf += chunk
815 buf += chunk
821
816
822 return buf
817 return buf
823
818
824 def filechunkiter(f, size=65536, limit=None):
819 def filechunkiter(f, size=65536, limit=None):
825 """Create a generator that produces the data in the file size
820 """Create a generator that produces the data in the file size
826 (default 65536) bytes at a time, up to optional limit (default is
821 (default 65536) bytes at a time, up to optional limit (default is
827 to read all data). Chunks may be less than size bytes if the
822 to read all data). Chunks may be less than size bytes if the
828 chunk is the last chunk in the file, or the file is a socket or
823 chunk is the last chunk in the file, or the file is a socket or
829 some other type of file that sometimes reads less data than is
824 some other type of file that sometimes reads less data than is
830 requested."""
825 requested."""
831 assert size >= 0
826 assert size >= 0
832 assert limit is None or limit >= 0
827 assert limit is None or limit >= 0
833 while True:
828 while True:
834 if limit is None:
829 if limit is None:
835 nbytes = size
830 nbytes = size
836 else:
831 else:
837 nbytes = min(limit, size)
832 nbytes = min(limit, size)
838 s = nbytes and f.read(nbytes)
833 s = nbytes and f.read(nbytes)
839 if not s:
834 if not s:
840 break
835 break
841 if limit:
836 if limit:
842 limit -= len(s)
837 limit -= len(s)
843 yield s
838 yield s
844
839
845 def makedate():
840 def makedate():
846 lt = time.localtime()
841 lt = time.localtime()
847 if lt[8] == 1 and time.daylight:
842 if lt[8] == 1 and time.daylight:
848 tz = time.altzone
843 tz = time.altzone
849 else:
844 else:
850 tz = time.timezone
845 tz = time.timezone
851 t = time.mktime(lt)
846 t = time.mktime(lt)
852 if t < 0:
847 if t < 0:
853 hint = _("check your clock")
848 hint = _("check your clock")
854 raise Abort(_("negative timestamp: %d") % t, hint=hint)
849 raise Abort(_("negative timestamp: %d") % t, hint=hint)
855 return t, tz
850 return t, tz
856
851
857 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
852 def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
858 """represent a (unixtime, offset) tuple as a localized time.
853 """represent a (unixtime, offset) tuple as a localized time.
859 unixtime is seconds since the epoch, and offset is the time zone's
854 unixtime is seconds since the epoch, and offset is the time zone's
860 number of seconds away from UTC. if timezone is false, do not
855 number of seconds away from UTC. if timezone is false, do not
861 append time zone to string."""
856 append time zone to string."""
862 t, tz = date or makedate()
857 t, tz = date or makedate()
863 if t < 0:
858 if t < 0:
864 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
859 t = 0 # time.gmtime(lt) fails on Windows for lt < -43200
865 tz = 0
860 tz = 0
866 if "%1" in format or "%2" in format:
861 if "%1" in format or "%2" in format:
867 sign = (tz > 0) and "-" or "+"
862 sign = (tz > 0) and "-" or "+"
868 minutes = abs(tz) // 60
863 minutes = abs(tz) // 60
869 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
864 format = format.replace("%1", "%c%02d" % (sign, minutes // 60))
870 format = format.replace("%2", "%02d" % (minutes % 60))
865 format = format.replace("%2", "%02d" % (minutes % 60))
871 s = time.strftime(format, time.gmtime(float(t) - tz))
866 s = time.strftime(format, time.gmtime(float(t) - tz))
872 return s
867 return s
873
868
874 def shortdate(date=None):
869 def shortdate(date=None):
875 """turn (timestamp, tzoff) tuple into iso 8631 date."""
870 """turn (timestamp, tzoff) tuple into iso 8631 date."""
876 return datestr(date, format='%Y-%m-%d')
871 return datestr(date, format='%Y-%m-%d')
877
872
878 def strdate(string, format, defaults=[]):
873 def strdate(string, format, defaults=[]):
879 """parse a localized time string and return a (unixtime, offset) tuple.
874 """parse a localized time string and return a (unixtime, offset) tuple.
880 if the string cannot be parsed, ValueError is raised."""
875 if the string cannot be parsed, ValueError is raised."""
881 def timezone(string):
876 def timezone(string):
882 tz = string.split()[-1]
877 tz = string.split()[-1]
883 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
878 if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit():
884 sign = (tz[0] == "+") and 1 or -1
879 sign = (tz[0] == "+") and 1 or -1
885 hours = int(tz[1:3])
880 hours = int(tz[1:3])
886 minutes = int(tz[3:5])
881 minutes = int(tz[3:5])
887 return -sign * (hours * 60 + minutes) * 60
882 return -sign * (hours * 60 + minutes) * 60
888 if tz == "GMT" or tz == "UTC":
883 if tz == "GMT" or tz == "UTC":
889 return 0
884 return 0
890 return None
885 return None
891
886
892 # NOTE: unixtime = localunixtime + offset
887 # NOTE: unixtime = localunixtime + offset
893 offset, date = timezone(string), string
888 offset, date = timezone(string), string
894 if offset is not None:
889 if offset is not None:
895 date = " ".join(string.split()[:-1])
890 date = " ".join(string.split()[:-1])
896
891
897 # add missing elements from defaults
892 # add missing elements from defaults
898 usenow = False # default to using biased defaults
893 usenow = False # default to using biased defaults
899 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
894 for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity
900 found = [True for p in part if ("%"+p) in format]
895 found = [True for p in part if ("%"+p) in format]
901 if not found:
896 if not found:
902 date += "@" + defaults[part][usenow]
897 date += "@" + defaults[part][usenow]
903 format += "@%" + part[0]
898 format += "@%" + part[0]
904 else:
899 else:
905 # We've found a specific time element, less specific time
900 # We've found a specific time element, less specific time
906 # elements are relative to today
901 # elements are relative to today
907 usenow = True
902 usenow = True
908
903
909 timetuple = time.strptime(date, format)
904 timetuple = time.strptime(date, format)
910 localunixtime = int(calendar.timegm(timetuple))
905 localunixtime = int(calendar.timegm(timetuple))
911 if offset is None:
906 if offset is None:
912 # local timezone
907 # local timezone
913 unixtime = int(time.mktime(timetuple))
908 unixtime = int(time.mktime(timetuple))
914 offset = unixtime - localunixtime
909 offset = unixtime - localunixtime
915 else:
910 else:
916 unixtime = localunixtime + offset
911 unixtime = localunixtime + offset
917 return unixtime, offset
912 return unixtime, offset
918
913
919 def parsedate(date, formats=None, bias={}):
914 def parsedate(date, formats=None, bias={}):
920 """parse a localized date/time and return a (unixtime, offset) tuple.
915 """parse a localized date/time and return a (unixtime, offset) tuple.
921
916
922 The date may be a "unixtime offset" string or in one of the specified
917 The date may be a "unixtime offset" string or in one of the specified
923 formats. If the date already is a (unixtime, offset) tuple, it is returned.
918 formats. If the date already is a (unixtime, offset) tuple, it is returned.
924 """
919 """
925 if not date:
920 if not date:
926 return 0, 0
921 return 0, 0
927 if isinstance(date, tuple) and len(date) == 2:
922 if isinstance(date, tuple) and len(date) == 2:
928 return date
923 return date
929 if not formats:
924 if not formats:
930 formats = defaultdateformats
925 formats = defaultdateformats
931 date = date.strip()
926 date = date.strip()
932 try:
927 try:
933 when, offset = map(int, date.split(' '))
928 when, offset = map(int, date.split(' '))
934 except ValueError:
929 except ValueError:
935 # fill out defaults
930 # fill out defaults
936 now = makedate()
931 now = makedate()
937 defaults = {}
932 defaults = {}
938 nowmap = {}
933 nowmap = {}
939 for part in ("d", "mb", "yY", "HI", "M", "S"):
934 for part in ("d", "mb", "yY", "HI", "M", "S"):
940 # this piece is for rounding the specific end of unknowns
935 # this piece is for rounding the specific end of unknowns
941 b = bias.get(part)
936 b = bias.get(part)
942 if b is None:
937 if b is None:
943 if part[0] in "HMS":
938 if part[0] in "HMS":
944 b = "00"
939 b = "00"
945 else:
940 else:
946 b = "0"
941 b = "0"
947
942
948 # this piece is for matching the generic end to today's date
943 # this piece is for matching the generic end to today's date
949 n = datestr(now, "%" + part[0])
944 n = datestr(now, "%" + part[0])
950
945
951 defaults[part] = (b, n)
946 defaults[part] = (b, n)
952
947
953 for format in formats:
948 for format in formats:
954 try:
949 try:
955 when, offset = strdate(date, format, defaults)
950 when, offset = strdate(date, format, defaults)
956 except (ValueError, OverflowError):
951 except (ValueError, OverflowError):
957 pass
952 pass
958 else:
953 else:
959 break
954 break
960 else:
955 else:
961 raise Abort(_('invalid date: %r') % date)
956 raise Abort(_('invalid date: %r') % date)
962 # validate explicit (probably user-specified) date and
957 # validate explicit (probably user-specified) date and
963 # time zone offset. values must fit in signed 32 bits for
958 # time zone offset. values must fit in signed 32 bits for
964 # current 32-bit linux runtimes. timezones go from UTC-12
959 # current 32-bit linux runtimes. timezones go from UTC-12
965 # to UTC+14
960 # to UTC+14
966 if abs(when) > 0x7fffffff:
961 if abs(when) > 0x7fffffff:
967 raise Abort(_('date exceeds 32 bits: %d') % when)
962 raise Abort(_('date exceeds 32 bits: %d') % when)
968 if when < 0:
963 if when < 0:
969 raise Abort(_('negative date value: %d') % when)
964 raise Abort(_('negative date value: %d') % when)
970 if offset < -50400 or offset > 43200:
965 if offset < -50400 or offset > 43200:
971 raise Abort(_('impossible time zone offset: %d') % offset)
966 raise Abort(_('impossible time zone offset: %d') % offset)
972 return when, offset
967 return when, offset
973
968
974 def matchdate(date):
969 def matchdate(date):
975 """Return a function that matches a given date match specifier
970 """Return a function that matches a given date match specifier
976
971
977 Formats include:
972 Formats include:
978
973
979 '{date}' match a given date to the accuracy provided
974 '{date}' match a given date to the accuracy provided
980
975
981 '<{date}' on or before a given date
976 '<{date}' on or before a given date
982
977
983 '>{date}' on or after a given date
978 '>{date}' on or after a given date
984
979
985 >>> p1 = parsedate("10:29:59")
980 >>> p1 = parsedate("10:29:59")
986 >>> p2 = parsedate("10:30:00")
981 >>> p2 = parsedate("10:30:00")
987 >>> p3 = parsedate("10:30:59")
982 >>> p3 = parsedate("10:30:59")
988 >>> p4 = parsedate("10:31:00")
983 >>> p4 = parsedate("10:31:00")
989 >>> p5 = parsedate("Sep 15 10:30:00 1999")
984 >>> p5 = parsedate("Sep 15 10:30:00 1999")
990 >>> f = matchdate("10:30")
985 >>> f = matchdate("10:30")
991 >>> f(p1[0])
986 >>> f(p1[0])
992 False
987 False
993 >>> f(p2[0])
988 >>> f(p2[0])
994 True
989 True
995 >>> f(p3[0])
990 >>> f(p3[0])
996 True
991 True
997 >>> f(p4[0])
992 >>> f(p4[0])
998 False
993 False
999 >>> f(p5[0])
994 >>> f(p5[0])
1000 False
995 False
1001 """
996 """
1002
997
1003 def lower(date):
998 def lower(date):
1004 d = dict(mb="1", d="1")
999 d = dict(mb="1", d="1")
1005 return parsedate(date, extendeddateformats, d)[0]
1000 return parsedate(date, extendeddateformats, d)[0]
1006
1001
1007 def upper(date):
1002 def upper(date):
1008 d = dict(mb="12", HI="23", M="59", S="59")
1003 d = dict(mb="12", HI="23", M="59", S="59")
1009 for days in ("31", "30", "29"):
1004 for days in ("31", "30", "29"):
1010 try:
1005 try:
1011 d["d"] = days
1006 d["d"] = days
1012 return parsedate(date, extendeddateformats, d)[0]
1007 return parsedate(date, extendeddateformats, d)[0]
1013 except:
1008 except:
1014 pass
1009 pass
1015 d["d"] = "28"
1010 d["d"] = "28"
1016 return parsedate(date, extendeddateformats, d)[0]
1011 return parsedate(date, extendeddateformats, d)[0]
1017
1012
1018 date = date.strip()
1013 date = date.strip()
1019
1014
1020 if not date:
1015 if not date:
1021 raise Abort(_("dates cannot consist entirely of whitespace"))
1016 raise Abort(_("dates cannot consist entirely of whitespace"))
1022 elif date[0] == "<":
1017 elif date[0] == "<":
1023 if not date[1:]:
1018 if not date[1:]:
1024 raise Abort(_("invalid day spec, use '<DATE'"))
1019 raise Abort(_("invalid day spec, use '<DATE'"))
1025 when = upper(date[1:])
1020 when = upper(date[1:])
1026 return lambda x: x <= when
1021 return lambda x: x <= when
1027 elif date[0] == ">":
1022 elif date[0] == ">":
1028 if not date[1:]:
1023 if not date[1:]:
1029 raise Abort(_("invalid day spec, use '>DATE'"))
1024 raise Abort(_("invalid day spec, use '>DATE'"))
1030 when = lower(date[1:])
1025 when = lower(date[1:])
1031 return lambda x: x >= when
1026 return lambda x: x >= when
1032 elif date[0] == "-":
1027 elif date[0] == "-":
1033 try:
1028 try:
1034 days = int(date[1:])
1029 days = int(date[1:])
1035 except ValueError:
1030 except ValueError:
1036 raise Abort(_("invalid day spec: %s") % date[1:])
1031 raise Abort(_("invalid day spec: %s") % date[1:])
1037 if days < 0:
1032 if days < 0:
1038 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1033 raise Abort(_("%s must be nonnegative (see 'hg help dates')")
1039 % date[1:])
1034 % date[1:])
1040 when = makedate()[0] - days * 3600 * 24
1035 when = makedate()[0] - days * 3600 * 24
1041 return lambda x: x >= when
1036 return lambda x: x >= when
1042 elif " to " in date:
1037 elif " to " in date:
1043 a, b = date.split(" to ")
1038 a, b = date.split(" to ")
1044 start, stop = lower(a), upper(b)
1039 start, stop = lower(a), upper(b)
1045 return lambda x: x >= start and x <= stop
1040 return lambda x: x >= start and x <= stop
1046 else:
1041 else:
1047 start, stop = lower(date), upper(date)
1042 start, stop = lower(date), upper(date)
1048 return lambda x: x >= start and x <= stop
1043 return lambda x: x >= start and x <= stop
1049
1044
1050 def shortuser(user):
1045 def shortuser(user):
1051 """Return a short representation of a user name or email address."""
1046 """Return a short representation of a user name or email address."""
1052 f = user.find('@')
1047 f = user.find('@')
1053 if f >= 0:
1048 if f >= 0:
1054 user = user[:f]
1049 user = user[:f]
1055 f = user.find('<')
1050 f = user.find('<')
1056 if f >= 0:
1051 if f >= 0:
1057 user = user[f + 1:]
1052 user = user[f + 1:]
1058 f = user.find(' ')
1053 f = user.find(' ')
1059 if f >= 0:
1054 if f >= 0:
1060 user = user[:f]
1055 user = user[:f]
1061 f = user.find('.')
1056 f = user.find('.')
1062 if f >= 0:
1057 if f >= 0:
1063 user = user[:f]
1058 user = user[:f]
1064 return user
1059 return user
1065
1060
1066 def email(author):
1061 def email(author):
1067 '''get email of author.'''
1062 '''get email of author.'''
1068 r = author.find('>')
1063 r = author.find('>')
1069 if r == -1:
1064 if r == -1:
1070 r = None
1065 r = None
1071 return author[author.find('<') + 1:r]
1066 return author[author.find('<') + 1:r]
1072
1067
1073 def _ellipsis(text, maxlength):
1068 def _ellipsis(text, maxlength):
1074 if len(text) <= maxlength:
1069 if len(text) <= maxlength:
1075 return text, False
1070 return text, False
1076 else:
1071 else:
1077 return "%s..." % (text[:maxlength - 3]), True
1072 return "%s..." % (text[:maxlength - 3]), True
1078
1073
1079 def ellipsis(text, maxlength=400):
1074 def ellipsis(text, maxlength=400):
1080 """Trim string to at most maxlength (default: 400) characters."""
1075 """Trim string to at most maxlength (default: 400) characters."""
1081 try:
1076 try:
1082 # use unicode not to split at intermediate multi-byte sequence
1077 # use unicode not to split at intermediate multi-byte sequence
1083 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1078 utext, truncated = _ellipsis(text.decode(encoding.encoding),
1084 maxlength)
1079 maxlength)
1085 if not truncated:
1080 if not truncated:
1086 return text
1081 return text
1087 return utext.encode(encoding.encoding)
1082 return utext.encode(encoding.encoding)
1088 except (UnicodeDecodeError, UnicodeEncodeError):
1083 except (UnicodeDecodeError, UnicodeEncodeError):
1089 return _ellipsis(text, maxlength)[0]
1084 return _ellipsis(text, maxlength)[0]
1090
1085
1091 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1086 def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
1092 '''yield every hg repository under path, recursively.'''
1087 '''yield every hg repository under path, recursively.'''
1093 def errhandler(err):
1088 def errhandler(err):
1094 if err.filename == path:
1089 if err.filename == path:
1095 raise err
1090 raise err
1096 if followsym and hasattr(os.path, 'samestat'):
1091 if followsym and hasattr(os.path, 'samestat'):
1097 def _add_dir_if_not_there(dirlst, dirname):
1092 def _add_dir_if_not_there(dirlst, dirname):
1098 match = False
1093 match = False
1099 samestat = os.path.samestat
1094 samestat = os.path.samestat
1100 dirstat = os.stat(dirname)
1095 dirstat = os.stat(dirname)
1101 for lstdirstat in dirlst:
1096 for lstdirstat in dirlst:
1102 if samestat(dirstat, lstdirstat):
1097 if samestat(dirstat, lstdirstat):
1103 match = True
1098 match = True
1104 break
1099 break
1105 if not match:
1100 if not match:
1106 dirlst.append(dirstat)
1101 dirlst.append(dirstat)
1107 return not match
1102 return not match
1108 else:
1103 else:
1109 followsym = False
1104 followsym = False
1110
1105
1111 if (seen_dirs is None) and followsym:
1106 if (seen_dirs is None) and followsym:
1112 seen_dirs = []
1107 seen_dirs = []
1113 _add_dir_if_not_there(seen_dirs, path)
1108 _add_dir_if_not_there(seen_dirs, path)
1114 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1109 for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler):
1115 dirs.sort()
1110 dirs.sort()
1116 if '.hg' in dirs:
1111 if '.hg' in dirs:
1117 yield root # found a repository
1112 yield root # found a repository
1118 qroot = os.path.join(root, '.hg', 'patches')
1113 qroot = os.path.join(root, '.hg', 'patches')
1119 if os.path.isdir(os.path.join(qroot, '.hg')):
1114 if os.path.isdir(os.path.join(qroot, '.hg')):
1120 yield qroot # we have a patch queue repo here
1115 yield qroot # we have a patch queue repo here
1121 if recurse:
1116 if recurse:
1122 # avoid recursing inside the .hg directory
1117 # avoid recursing inside the .hg directory
1123 dirs.remove('.hg')
1118 dirs.remove('.hg')
1124 else:
1119 else:
1125 dirs[:] = [] # don't descend further
1120 dirs[:] = [] # don't descend further
1126 elif followsym:
1121 elif followsym:
1127 newdirs = []
1122 newdirs = []
1128 for d in dirs:
1123 for d in dirs:
1129 fname = os.path.join(root, d)
1124 fname = os.path.join(root, d)
1130 if _add_dir_if_not_there(seen_dirs, fname):
1125 if _add_dir_if_not_there(seen_dirs, fname):
1131 if os.path.islink(fname):
1126 if os.path.islink(fname):
1132 for hgname in walkrepos(fname, True, seen_dirs):
1127 for hgname in walkrepos(fname, True, seen_dirs):
1133 yield hgname
1128 yield hgname
1134 else:
1129 else:
1135 newdirs.append(d)
1130 newdirs.append(d)
1136 dirs[:] = newdirs
1131 dirs[:] = newdirs
1137
1132
1138 _rcpath = None
1133 _rcpath = None
1139
1134
1140 def os_rcpath():
1135 def os_rcpath():
1141 '''return default os-specific hgrc search path'''
1136 '''return default os-specific hgrc search path'''
1142 path = system_rcpath()
1137 path = system_rcpath()
1143 path.extend(user_rcpath())
1138 path.extend(user_rcpath())
1144 path = [os.path.normpath(f) for f in path]
1139 path = [os.path.normpath(f) for f in path]
1145 return path
1140 return path
1146
1141
1147 def rcpath():
1142 def rcpath():
1148 '''return hgrc search path. if env var HGRCPATH is set, use it.
1143 '''return hgrc search path. if env var HGRCPATH is set, use it.
1149 for each item in path, if directory, use files ending in .rc,
1144 for each item in path, if directory, use files ending in .rc,
1150 else use item.
1145 else use item.
1151 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1146 make HGRCPATH empty to only look in .hg/hgrc of current repo.
1152 if no HGRCPATH, use default os-specific path.'''
1147 if no HGRCPATH, use default os-specific path.'''
1153 global _rcpath
1148 global _rcpath
1154 if _rcpath is None:
1149 if _rcpath is None:
1155 if 'HGRCPATH' in os.environ:
1150 if 'HGRCPATH' in os.environ:
1156 _rcpath = []
1151 _rcpath = []
1157 for p in os.environ['HGRCPATH'].split(os.pathsep):
1152 for p in os.environ['HGRCPATH'].split(os.pathsep):
1158 if not p:
1153 if not p:
1159 continue
1154 continue
1160 p = expandpath(p)
1155 p = expandpath(p)
1161 if os.path.isdir(p):
1156 if os.path.isdir(p):
1162 for f, kind in osutil.listdir(p):
1157 for f, kind in osutil.listdir(p):
1163 if f.endswith('.rc'):
1158 if f.endswith('.rc'):
1164 _rcpath.append(os.path.join(p, f))
1159 _rcpath.append(os.path.join(p, f))
1165 else:
1160 else:
1166 _rcpath.append(p)
1161 _rcpath.append(p)
1167 else:
1162 else:
1168 _rcpath = os_rcpath()
1163 _rcpath = os_rcpath()
1169 return _rcpath
1164 return _rcpath
1170
1165
1171 def bytecount(nbytes):
1166 def bytecount(nbytes):
1172 '''return byte count formatted as readable string, with units'''
1167 '''return byte count formatted as readable string, with units'''
1173
1168
1174 units = (
1169 units = (
1175 (100, 1 << 30, _('%.0f GB')),
1170 (100, 1 << 30, _('%.0f GB')),
1176 (10, 1 << 30, _('%.1f GB')),
1171 (10, 1 << 30, _('%.1f GB')),
1177 (1, 1 << 30, _('%.2f GB')),
1172 (1, 1 << 30, _('%.2f GB')),
1178 (100, 1 << 20, _('%.0f MB')),
1173 (100, 1 << 20, _('%.0f MB')),
1179 (10, 1 << 20, _('%.1f MB')),
1174 (10, 1 << 20, _('%.1f MB')),
1180 (1, 1 << 20, _('%.2f MB')),
1175 (1, 1 << 20, _('%.2f MB')),
1181 (100, 1 << 10, _('%.0f KB')),
1176 (100, 1 << 10, _('%.0f KB')),
1182 (10, 1 << 10, _('%.1f KB')),
1177 (10, 1 << 10, _('%.1f KB')),
1183 (1, 1 << 10, _('%.2f KB')),
1178 (1, 1 << 10, _('%.2f KB')),
1184 (1, 1, _('%.0f bytes')),
1179 (1, 1, _('%.0f bytes')),
1185 )
1180 )
1186
1181
1187 for multiplier, divisor, format in units:
1182 for multiplier, divisor, format in units:
1188 if nbytes >= divisor * multiplier:
1183 if nbytes >= divisor * multiplier:
1189 return format % (nbytes / float(divisor))
1184 return format % (nbytes / float(divisor))
1190 return units[-1][2] % nbytes
1185 return units[-1][2] % nbytes
1191
1186
1192 def uirepr(s):
1187 def uirepr(s):
1193 # Avoid double backslash in Windows path repr()
1188 # Avoid double backslash in Windows path repr()
1194 return repr(s).replace('\\\\', '\\')
1189 return repr(s).replace('\\\\', '\\')
1195
1190
1196 # delay import of textwrap
1191 # delay import of textwrap
1197 def MBTextWrapper(**kwargs):
1192 def MBTextWrapper(**kwargs):
1198 class tw(textwrap.TextWrapper):
1193 class tw(textwrap.TextWrapper):
1199 """
1194 """
1200 Extend TextWrapper for double-width characters.
1195 Extend TextWrapper for double-width characters.
1201
1196
1202 Some Asian characters use two terminal columns instead of one.
1197 Some Asian characters use two terminal columns instead of one.
1203 A good example of this behavior can be seen with u'\u65e5\u672c',
1198 A good example of this behavior can be seen with u'\u65e5\u672c',
1204 the two Japanese characters for "Japan":
1199 the two Japanese characters for "Japan":
1205 len() returns 2, but when printed to a terminal, they eat 4 columns.
1200 len() returns 2, but when printed to a terminal, they eat 4 columns.
1206
1201
1207 (Note that this has nothing to do whatsoever with unicode
1202 (Note that this has nothing to do whatsoever with unicode
1208 representation, or encoding of the underlying string)
1203 representation, or encoding of the underlying string)
1209 """
1204 """
1210 def __init__(self, **kwargs):
1205 def __init__(self, **kwargs):
1211 textwrap.TextWrapper.__init__(self, **kwargs)
1206 textwrap.TextWrapper.__init__(self, **kwargs)
1212
1207
1213 def _cutdown(self, str, space_left):
1208 def _cutdown(self, str, space_left):
1214 l = 0
1209 l = 0
1215 ucstr = unicode(str, encoding.encoding)
1210 ucstr = unicode(str, encoding.encoding)
1216 colwidth = unicodedata.east_asian_width
1211 colwidth = unicodedata.east_asian_width
1217 for i in xrange(len(ucstr)):
1212 for i in xrange(len(ucstr)):
1218 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1213 l += colwidth(ucstr[i]) in 'WFA' and 2 or 1
1219 if space_left < l:
1214 if space_left < l:
1220 return (ucstr[:i].encode(encoding.encoding),
1215 return (ucstr[:i].encode(encoding.encoding),
1221 ucstr[i:].encode(encoding.encoding))
1216 ucstr[i:].encode(encoding.encoding))
1222 return str, ''
1217 return str, ''
1223
1218
1224 # overriding of base class
1219 # overriding of base class
1225 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1220 def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width):
1226 space_left = max(width - cur_len, 1)
1221 space_left = max(width - cur_len, 1)
1227
1222
1228 if self.break_long_words:
1223 if self.break_long_words:
1229 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1224 cut, res = self._cutdown(reversed_chunks[-1], space_left)
1230 cur_line.append(cut)
1225 cur_line.append(cut)
1231 reversed_chunks[-1] = res
1226 reversed_chunks[-1] = res
1232 elif not cur_line:
1227 elif not cur_line:
1233 cur_line.append(reversed_chunks.pop())
1228 cur_line.append(reversed_chunks.pop())
1234
1229
1235 global MBTextWrapper
1230 global MBTextWrapper
1236 MBTextWrapper = tw
1231 MBTextWrapper = tw
1237 return tw(**kwargs)
1232 return tw(**kwargs)
1238
1233
1239 def wrap(line, width, initindent='', hangindent=''):
1234 def wrap(line, width, initindent='', hangindent=''):
1240 maxindent = max(len(hangindent), len(initindent))
1235 maxindent = max(len(hangindent), len(initindent))
1241 if width <= maxindent:
1236 if width <= maxindent:
1242 # adjust for weird terminal size
1237 # adjust for weird terminal size
1243 width = max(78, maxindent + 1)
1238 width = max(78, maxindent + 1)
1244 wrapper = MBTextWrapper(width=width,
1239 wrapper = MBTextWrapper(width=width,
1245 initial_indent=initindent,
1240 initial_indent=initindent,
1246 subsequent_indent=hangindent)
1241 subsequent_indent=hangindent)
1247 return wrapper.fill(line)
1242 return wrapper.fill(line)
1248
1243
1249 def iterlines(iterator):
1244 def iterlines(iterator):
1250 for chunk in iterator:
1245 for chunk in iterator:
1251 for line in chunk.splitlines():
1246 for line in chunk.splitlines():
1252 yield line
1247 yield line
1253
1248
1254 def expandpath(path):
1249 def expandpath(path):
1255 return os.path.expanduser(os.path.expandvars(path))
1250 return os.path.expanduser(os.path.expandvars(path))
1256
1251
1257 def hgcmd():
1252 def hgcmd():
1258 """Return the command used to execute current hg
1253 """Return the command used to execute current hg
1259
1254
1260 This is different from hgexecutable() because on Windows we want
1255 This is different from hgexecutable() because on Windows we want
1261 to avoid things opening new shell windows like batch files, so we
1256 to avoid things opening new shell windows like batch files, so we
1262 get either the python call or current executable.
1257 get either the python call or current executable.
1263 """
1258 """
1264 if main_is_frozen():
1259 if main_is_frozen():
1265 return [sys.executable]
1260 return [sys.executable]
1266 return gethgcmd()
1261 return gethgcmd()
1267
1262
1268 def rundetached(args, condfn):
1263 def rundetached(args, condfn):
1269 """Execute the argument list in a detached process.
1264 """Execute the argument list in a detached process.
1270
1265
1271 condfn is a callable which is called repeatedly and should return
1266 condfn is a callable which is called repeatedly and should return
1272 True once the child process is known to have started successfully.
1267 True once the child process is known to have started successfully.
1273 At this point, the child process PID is returned. If the child
1268 At this point, the child process PID is returned. If the child
1274 process fails to start or finishes before condfn() evaluates to
1269 process fails to start or finishes before condfn() evaluates to
1275 True, return -1.
1270 True, return -1.
1276 """
1271 """
1277 # Windows case is easier because the child process is either
1272 # Windows case is easier because the child process is either
1278 # successfully starting and validating the condition or exiting
1273 # successfully starting and validating the condition or exiting
1279 # on failure. We just poll on its PID. On Unix, if the child
1274 # on failure. We just poll on its PID. On Unix, if the child
1280 # process fails to start, it will be left in a zombie state until
1275 # process fails to start, it will be left in a zombie state until
1281 # the parent wait on it, which we cannot do since we expect a long
1276 # the parent wait on it, which we cannot do since we expect a long
1282 # running process on success. Instead we listen for SIGCHLD telling
1277 # running process on success. Instead we listen for SIGCHLD telling
1283 # us our child process terminated.
1278 # us our child process terminated.
1284 terminated = set()
1279 terminated = set()
1285 def handler(signum, frame):
1280 def handler(signum, frame):
1286 terminated.add(os.wait())
1281 terminated.add(os.wait())
1287 prevhandler = None
1282 prevhandler = None
1288 if hasattr(signal, 'SIGCHLD'):
1283 if hasattr(signal, 'SIGCHLD'):
1289 prevhandler = signal.signal(signal.SIGCHLD, handler)
1284 prevhandler = signal.signal(signal.SIGCHLD, handler)
1290 try:
1285 try:
1291 pid = spawndetached(args)
1286 pid = spawndetached(args)
1292 while not condfn():
1287 while not condfn():
1293 if ((pid in terminated or not testpid(pid))
1288 if ((pid in terminated or not testpid(pid))
1294 and not condfn()):
1289 and not condfn()):
1295 return -1
1290 return -1
1296 time.sleep(0.1)
1291 time.sleep(0.1)
1297 return pid
1292 return pid
1298 finally:
1293 finally:
1299 if prevhandler is not None:
1294 if prevhandler is not None:
1300 signal.signal(signal.SIGCHLD, prevhandler)
1295 signal.signal(signal.SIGCHLD, prevhandler)
1301
1296
1302 try:
1297 try:
1303 any, all = any, all
1298 any, all = any, all
1304 except NameError:
1299 except NameError:
1305 def any(iterable):
1300 def any(iterable):
1306 for i in iterable:
1301 for i in iterable:
1307 if i:
1302 if i:
1308 return True
1303 return True
1309 return False
1304 return False
1310
1305
1311 def all(iterable):
1306 def all(iterable):
1312 for i in iterable:
1307 for i in iterable:
1313 if not i:
1308 if not i:
1314 return False
1309 return False
1315 return True
1310 return True
1316
1311
1317 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1312 def interpolate(prefix, mapping, s, fn=None, escape_prefix=False):
1318 """Return the result of interpolating items in the mapping into string s.
1313 """Return the result of interpolating items in the mapping into string s.
1319
1314
1320 prefix is a single character string, or a two character string with
1315 prefix is a single character string, or a two character string with
1321 a backslash as the first character if the prefix needs to be escaped in
1316 a backslash as the first character if the prefix needs to be escaped in
1322 a regular expression.
1317 a regular expression.
1323
1318
1324 fn is an optional function that will be applied to the replacement text
1319 fn is an optional function that will be applied to the replacement text
1325 just before replacement.
1320 just before replacement.
1326
1321
1327 escape_prefix is an optional flag that allows using doubled prefix for
1322 escape_prefix is an optional flag that allows using doubled prefix for
1328 its escaping.
1323 its escaping.
1329 """
1324 """
1330 fn = fn or (lambda s: s)
1325 fn = fn or (lambda s: s)
1331 patterns = '|'.join(mapping.keys())
1326 patterns = '|'.join(mapping.keys())
1332 if escape_prefix:
1327 if escape_prefix:
1333 patterns += '|' + prefix
1328 patterns += '|' + prefix
1334 if len(prefix) > 1:
1329 if len(prefix) > 1:
1335 prefix_char = prefix[1:]
1330 prefix_char = prefix[1:]
1336 else:
1331 else:
1337 prefix_char = prefix
1332 prefix_char = prefix
1338 mapping[prefix_char] = prefix_char
1333 mapping[prefix_char] = prefix_char
1339 r = re.compile(r'%s(%s)' % (prefix, patterns))
1334 r = re.compile(r'%s(%s)' % (prefix, patterns))
1340 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1335 return r.sub(lambda x: fn(mapping[x.group()[1:]]), s)
1341
1336
1342 def getport(port):
1337 def getport(port):
1343 """Return the port for a given network service.
1338 """Return the port for a given network service.
1344
1339
1345 If port is an integer, it's returned as is. If it's a string, it's
1340 If port is an integer, it's returned as is. If it's a string, it's
1346 looked up using socket.getservbyname(). If there's no matching
1341 looked up using socket.getservbyname(). If there's no matching
1347 service, util.Abort is raised.
1342 service, util.Abort is raised.
1348 """
1343 """
1349 try:
1344 try:
1350 return int(port)
1345 return int(port)
1351 except ValueError:
1346 except ValueError:
1352 pass
1347 pass
1353
1348
1354 try:
1349 try:
1355 return socket.getservbyname(port)
1350 return socket.getservbyname(port)
1356 except socket.error:
1351 except socket.error:
1357 raise Abort(_("no port number associated with service '%s'") % port)
1352 raise Abort(_("no port number associated with service '%s'") % port)
1358
1353
1359 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1354 _booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True,
1360 '0': False, 'no': False, 'false': False, 'off': False,
1355 '0': False, 'no': False, 'false': False, 'off': False,
1361 'never': False}
1356 'never': False}
1362
1357
1363 def parsebool(s):
1358 def parsebool(s):
1364 """Parse s into a boolean.
1359 """Parse s into a boolean.
1365
1360
1366 If s is not a valid boolean, returns None.
1361 If s is not a valid boolean, returns None.
1367 """
1362 """
1368 return _booleans.get(s.lower(), None)
1363 return _booleans.get(s.lower(), None)
General Comments 0
You need to be logged in to leave comments. Login now